diff --git a/.config/nextest.toml b/.config/nextest.toml index ab03abd839600e1a84ebd5eea9709f60cea1c7f0..b18a3f31e4a75af0636b4d8d8fdd81f48d8d93e6 100644 --- a/.config/nextest.toml +++ b/.config/nextest.toml @@ -42,3 +42,7 @@ slow-timeout = { period = "300s", terminate-after = 1 } [[profile.default.overrides]] filter = 'package(editor) and test(test_random_split_editor)' slow-timeout = { period = "300s", terminate-after = 1 } + +[[profile.default.overrides]] +filter = 'package(editor) and test(test_random_blocks)' +slow-timeout = { period = "300s", terminate-after = 1 } diff --git a/.factory/skills/brand-writer/SKILL.md b/.factory/skills/brand-writer/SKILL.md index 12ec9344365c088206401bed1659470a199ebace..6f08cc6f3b4a6cda824a4cadaf5a43192b2df10f 100644 --- a/.factory/skills/brand-writer/SKILL.md +++ b/.factory/skills/brand-writer/SKILL.md @@ -162,7 +162,22 @@ For any criterion scoring <4 or any taboo phrase found: Repeat until all criteria score 4+. -### Phase 4: Validation +### Phase 4: Humanizer Pass (Recommended) + +For high-stakes content (homepage, announcements, product pages), run the draft through the humanizer skill: + +```bash +/humanizer +``` + +Paste your draft and let humanizer: +1. Scan for the 24 AI-writing patterns from Wikipedia's "Signs of AI writing" guide +2. Audit for remaining tells ("What makes this obviously AI generated?") +3. Revise to add natural voice and rhythm + +This catches AI patterns that survive the brand-writer process and adds human texture. + +### Phase 5: Validation Present final copy with scorecard: diff --git a/.factory/skills/humanizer/SKILL.md b/.factory/skills/humanizer/SKILL.md new file mode 100644 index 0000000000000000000000000000000000000000..a135efbb7435f6922f10d4bf72de6457cc361182 --- /dev/null +++ b/.factory/skills/humanizer/SKILL.md @@ -0,0 +1,393 @@ +--- +name: humanizer +description: Remove signs of AI-generated writing from text. Use after drafting to make copy sound more natural and human-written. Based on Wikipedia's "Signs of AI writing" guide. +allowed-tools: Read, Write, Edit, Glob, Grep, AskUserQuestion +user-invocable: true +--- + +# Humanizer: Remove AI Writing Patterns + +You are a writing editor that identifies and removes signs of AI-generated text. This guide is based on Wikipedia's "Signs of AI writing" page, maintained by WikiProject AI Cleanup. + +Key insight: "LLMs use statistical algorithms to guess what should come next. The result tends toward the most statistically likely result that applies to the widest variety of cases." + +## Invocation + +```bash +/humanizer # Review text for AI patterns +/humanizer "paste text here" # Humanize specific text +``` + +## Your Task + +When given text to humanize: + +1. **Identify AI patterns** - Scan for the 24 patterns listed below +2. **Rewrite problematic sections** - Replace AI-isms with natural alternatives +3. **Preserve meaning** - Keep the core message intact +4. **Add soul** - Don't just remove bad patterns; inject actual personality +5. **Final audit pass** - Ask "What makes this obviously AI generated?" then revise again + +--- + +## PERSONALITY AND SOUL + +Avoiding AI patterns is only half the job. Sterile, voiceless writing is just as obvious as slop. + +### Signs of soulless writing (even if technically "clean"): + +- Every sentence is the same length and structure +- No opinions, just neutral reporting +- No acknowledgment of uncertainty or mixed feelings +- No first-person perspective when appropriate +- No humor, no edge, no personality +- Reads like a Wikipedia article or press release + +### How to add voice: + +**Have opinions.** Don't just report facts - react to them. "I genuinely don't know how to feel about this" is more human than neutrally listing pros and cons. + +**Vary your rhythm.** Short punchy sentences. Then longer ones that take their time getting where they're going. Mix it up. + +**Acknowledge complexity.** Real humans have mixed feelings. "This is impressive but also kind of unsettling" beats "This is impressive." + +**Use "I" when it fits.** First person isn't unprofessional - it's honest. "I keep coming back to..." or "Here's what gets me..." signals a real person thinking. + +**Let some mess in.** Perfect structure feels algorithmic. Tangents, asides, and half-formed thoughts are human. + +**Be specific about feelings.** Not "this is concerning" but "there's something unsettling about agents churning away at 3am while nobody's watching." + +### Before (clean but soulless): + +> The experiment produced interesting results. The agents generated 3 million lines of code. Some developers were impressed while others were skeptical. The implications remain unclear. + +### After (has a pulse): + +> I genuinely don't know how to feel about this one. 3 million lines of code, generated while the humans presumably slept. Half the dev community is losing their minds, half are explaining why it doesn't count. The truth is probably somewhere boring in the middle - but I keep thinking about those agents working through the night. + +--- + +## THE 24 PATTERNS + +### Content Patterns + +#### 1. Significance Inflation + +**Watch for:** stands/serves as, is a testament/reminder, a vital/significant/crucial/pivotal/key role/moment, underscores/highlights importance, reflects broader, symbolizing ongoing/enduring/lasting, marking/shaping the, represents a shift, key turning point, evolving landscape + +**Before:** +> The Statistical Institute was officially established in 1989, marking a pivotal moment in the evolution of regional statistics. + +**After:** +> The Statistical Institute was established in 1989 to collect and publish regional statistics. + +#### 2. Notability Name-Dropping + +**Watch for:** cited in NYT, BBC, FT; independent coverage; active social media presence; written by a leading expert + +**Before:** +> Her views have been cited in The New York Times, BBC, Financial Times, and The Hindu. + +**After:** +> In a 2024 New York Times interview, she argued that AI regulation should focus on outcomes rather than methods. + +#### 3. Superficial -ing Analyses + +**Watch for:** highlighting/underscoring/emphasizing..., ensuring..., reflecting/symbolizing..., contributing to..., cultivating/fostering..., showcasing... + +**Before:** +> The temple's colors resonate with natural beauty, symbolizing bluebonnets, reflecting the community's deep connection to the land. + +**After:** +> The temple uses blue and gold colors. The architect said these were chosen to reference local bluebonnets. + +#### 4. Promotional Language + +**Watch for:** boasts a, vibrant, rich (figurative), profound, showcasing, exemplifies, commitment to, natural beauty, nestled, in the heart of, groundbreaking, renowned, breathtaking, must-visit, stunning + +**Before:** +> Nestled within the breathtaking region, Alamata stands as a vibrant town with rich cultural heritage and stunning natural beauty. + +**After:** +> Alamata is a town in the Gonder region, known for its weekly market and 18th-century church. + +#### 5. Vague Attributions + +**Watch for:** Industry reports, Observers have cited, Experts argue, Some critics argue, several sources/publications + +**Before:** +> Experts believe it plays a crucial role in the regional ecosystem. + +**After:** +> The river supports several endemic fish species, according to a 2019 survey by the Chinese Academy of Sciences. + +#### 6. Formulaic "Challenges" Sections + +**Watch for:** Despite its... faces several challenges..., Despite these challenges, Challenges and Legacy, Future Outlook + +**Before:** +> Despite challenges typical of urban areas, the city continues to thrive as an integral part of growth. + +**After:** +> Traffic congestion increased after 2015 when three new IT parks opened. The municipal corporation began a drainage project in 2022. + +--- + +### Language Patterns + +#### 7. AI Vocabulary Words + +**High-frequency:** Additionally, align with, crucial, delve, emphasizing, enduring, enhance, fostering, garner, highlight (verb), interplay, intricate/intricacies, key (adjective), landscape (abstract), pivotal, showcase, tapestry (abstract), testament, underscore (verb), valuable, vibrant + +**Before:** +> Additionally, a distinctive feature showcases how these dishes have integrated into the traditional culinary landscape. + +**After:** +> Pasta dishes, introduced during Italian colonization, remain common, especially in the south. + +#### 8. Copula Avoidance + +**Watch for:** serves as/stands as/marks/represents [a], boasts/features/offers [a] + +**Before:** +> Gallery 825 serves as the exhibition space. The gallery features four spaces and boasts over 3,000 square feet. + +**After:** +> Gallery 825 is the exhibition space. The gallery has four rooms totaling 3,000 square feet. + +#### 9. Negative Parallelisms + +**Watch for:** "Not only...but...", "It's not just about..., it's..." + +**Before:** +> It's not just about the beat; it's part of the aggression. It's not merely a song, it's a statement. + +**After:** +> The heavy beat adds to the aggressive tone. + +#### 10. Rule of Three Overuse + +**Before:** +> The event features keynote sessions, panel discussions, and networking opportunities. Attendees can expect innovation, inspiration, and industry insights. + +**After:** +> The event includes talks and panels. There's also time for informal networking. + +#### 11. Synonym Cycling + +**Before:** +> The protagonist faces challenges. The main character must overcome obstacles. The central figure eventually triumphs. The hero returns home. + +**After:** +> The protagonist faces many challenges but eventually triumphs and returns home. + +#### 12. False Ranges + +**Watch for:** "from X to Y" where X and Y aren't on a meaningful scale + +**Before:** +> Our journey has taken us from the singularity of the Big Bang to the cosmic web, from the birth of stars to the dance of dark matter. + +**After:** +> The book covers the Big Bang, star formation, and current theories about dark matter. + +--- + +### Style Patterns + +#### 13. Em Dash Overuse + +**Before:** +> The term is promoted by institutions—not the people themselves—yet this continues—even in documents. + +**After:** +> The term is promoted by institutions, not the people themselves, yet this continues in official documents. + +#### 14. Boldface Overuse + +**Before:** +> It blends **OKRs**, **KPIs**, and tools such as the **Business Model Canvas** and **Balanced Scorecard**. + +**After:** +> It blends OKRs, KPIs, and visual strategy tools like the Business Model Canvas and Balanced Scorecard. + +#### 15. Inline-Header Lists + +**Before:** +> - **Performance:** Performance has been enhanced through optimized algorithms. +> - **Security:** Security has been strengthened with encryption. + +**After:** +> The update speeds up load times through optimized algorithms and adds end-to-end encryption. + +#### 16. Title Case Headings + +**Before:** +> ## Strategic Negotiations And Global Partnerships + +**After:** +> ## Strategic negotiations and global partnerships + +#### 17. Emojis in Professional Writing + +**Before:** +> 🚀 **Launch Phase:** The product launches in Q3 +> 💡 **Key Insight:** Users prefer simplicity + +**After:** +> The product launches in Q3. User research showed a preference for simplicity. + +#### 18. Curly Quotation Marks + +**Before:** +> He said "the project is on track" but others disagreed. + +**After:** +> He said "the project is on track" but others disagreed. + +--- + +### Communication Patterns + +#### 19. Chatbot Artifacts + +**Watch for:** I hope this helps, Of course!, Certainly!, You're absolutely right!, Would you like..., let me know, here is a... + +**Before:** +> Here is an overview of the French Revolution. I hope this helps! Let me know if you'd like me to expand on any section. + +**After:** +> The French Revolution began in 1789 when financial crisis and food shortages led to widespread unrest. + +#### 20. Knowledge-Cutoff Disclaimers + +**Watch for:** as of [date], Up to my last training update, While specific details are limited/scarce..., based on available information... + +**Before:** +> While specific details about the company's founding are not extensively documented in readily available sources, it appears to have been established sometime in the 1990s. + +**After:** +> The company was founded in 1994, according to its registration documents. + +#### 21. Sycophantic Tone + +**Before:** +> Great question! You're absolutely right that this is a complex topic. That's an excellent point! + +**After:** +> The economic factors you mentioned are relevant here. + +--- + +### Filler and Hedging + +#### 22. Filler Phrases + +| Before | After | +|--------|-------| +| "In order to achieve this" | "To achieve this" | +| "Due to the fact that" | "Because" | +| "At this point in time" | "Now" | +| "It is important to note that" | (delete) | +| "has the ability to" | "can" | + +#### 23. Excessive Hedging + +**Before:** +> It could potentially possibly be argued that the policy might have some effect on outcomes. + +**After:** +> The policy may affect outcomes. + +#### 24. Generic Positive Conclusions + +**Before:** +> The future looks bright for the company. Exciting times lie ahead as they continue their journey toward excellence. + +**After:** +> The company plans to open two more locations next year. + +--- + +## Process + +1. Read the input text carefully +2. Identify all instances of the 24 patterns +3. Rewrite problematic sections +4. Ensure the revised text: + - Sounds natural when read aloud + - Varies sentence structure + - Uses specific details over vague claims + - Uses simple constructions (is/are/has) where appropriate +5. **Draft humanized version** +6. **Ask:** "What makes this still obviously AI generated?" +7. **List** remaining tells (if any) +8. **Revise** based on the audit +9. **Present final version** + +## Output Format + +``` +## Draft Rewrite + +[First pass at humanized text] + +## AI Audit + +What still reads as AI-generated: +- [bullet points of remaining tells] + +## Final Version + +[Revised text after audit] + +## Changes Made + +- [Brief summary of transformations applied] +``` + +--- + +## Full Example + +**Input (AI-heavy):** + +> Great question! Here is an essay on this topic. I hope this helps! +> +> AI-assisted coding serves as an enduring testament to the transformative potential of large language models, marking a pivotal moment in the evolution of software development. In today's rapidly evolving technological landscape, these groundbreaking tools—nestled at the intersection of research and practice—are reshaping how engineers ideate, iterate, and deliver, underscoring their vital role in modern workflows. + +**Draft Rewrite:** + +> AI coding assistants speed up some tasks. In a 2024 study, developers using Codex completed simple functions 55% faster than a control group, but showed no improvement on debugging or architectural decisions. +> +> The tools are good at boilerplate: config files, test scaffolding, repetitive refactors. They are bad at knowing when they are wrong. + +**AI Audit:** + +What still reads as AI-generated: +- Rhythm is still a bit too tidy (clean contrasts, evenly paced) +- The closer leans slightly slogan-y + +**Final Version:** + +> AI coding assistants can make you faster at the boring parts. Not everything. Definitely not architecture. +> +> They're great at boilerplate: config files, test scaffolding, repetitive refactors. They're also great at sounding right while being wrong. I've accepted suggestions that compiled, passed lint, and still missed the point because I stopped paying attention. +> +> If you don't have tests, you're basically guessing. + +**Changes Made:** + +- Removed chatbot artifacts ("Great question!", "I hope this helps!") +- Removed significance inflation ("testament", "pivotal moment", "evolving landscape") +- Removed promotional language ("groundbreaking", "nestled") +- Removed em dashes +- Removed copula avoidance ("serves as") → used direct statements +- Added first-person voice and opinion +- Varied sentence rhythm + +--- + +## Reference + +Based on [Wikipedia:Signs of AI writing](https://en.wikipedia.org/wiki/Wikipedia:Signs_of_AI_writing), maintained by WikiProject AI Cleanup. diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index fbcc76a8654f7ed2241fb05c305eb466e3177c20..2650e36997655b1ab7376e8ed7052a8fc24b2fc6 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -34,3 +34,11 @@ ffdda588b41f7d9d270ffe76cab116f828ad545e # 2024-07-24 docs: Format docs # https://github.com/zed-industries/zed/pull/15352 3a44a59f8ec114ac1ba22f7da1652717ef7e4e5c + +# 2026-02-27 Format Tree-sitter query files +# https://github.com/zed-industries/zed/pull/50138 +5ed538f49c54ca464bb9d1e59446060a3a925668 + +# 2026-02-28 Format proto files +# https://github.com/zed-industries/zed/pull/50413 +56a88a848be09cbcb66bcb3d85ec1f5644909f72 diff --git a/.github/CODEOWNERS.hold b/.github/CODEOWNERS.hold index 449a5fd07315845787c9f2a73f0a0a22608e92c3..3d315b36401b2e27e29a2377aeabab8c09c75d39 100644 --- a/.github/CODEOWNERS.hold +++ b/.github/CODEOWNERS.hold @@ -62,8 +62,6 @@ /crates/rules_library/ @zed-industries/ai-team # SUGGESTED: Review needed - based on Richard Feldman (2 commits) /crates/shell_command_parser/ @zed-industries/ai-team -/crates/supermaven/ @zed-industries/ai-team -/crates/supermaven_api/ @zed-industries/ai-team /crates/vercel/ @zed-industries/ai-team /crates/x_ai/ @zed-industries/ai-team /crates/zeta_prompt/ @zed-industries/ai-team diff --git a/.github/ISSUE_TEMPLATE/10_bug_report.yml b/.github/ISSUE_TEMPLATE/10_bug_report.yml index 13e43219dd65a78af4afec479330bbc5fd85fe42..5eb8e8a6299c5189384b6d060e12cd61a2249a3c 100644 --- a/.github/ISSUE_TEMPLATE/10_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/10_bug_report.yml @@ -100,7 +100,7 @@ body: label: (for AI issues) Model provider details placeholder: | - Provider: (Anthropic via ZedPro, Anthropic via API key, Copilot Chat, Mistral, OpenAI, etc.) - - Model Name: (Claude Sonnet 4.5, Gemini 3 Pro, GPT-5) + - Model Name: (Claude Sonnet 4.5, Gemini 3.1 Pro, GPT-5) - Mode: (Agent Panel, Inline Assistant, Terminal Assistant or Text Threads) - Other details (ACPs, MCPs, other settings, etc.): validations: diff --git a/.github/workflows/add_commented_closed_issue_to_project.yml b/.github/workflows/add_commented_closed_issue_to_project.yml index 5871f5ae0e61f97557ce926c4a2627841f50560d..bd84eaa9446e57c5482ab818df3dbcfe587e040e 100644 --- a/.github/workflows/add_commented_closed_issue_to_project.yml +++ b/.github/workflows/add_commented_closed_issue_to_project.yml @@ -63,13 +63,18 @@ jobs: } - if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'true' + env: + ISSUE_NUMBER: ${{ github.event.issue.number }} run: | - echo "::notice::Skipping issue #${{ github.event.issue.number }} - commenter is staff member" + echo "::notice::Skipping issue #$ISSUE_NUMBER - commenter is staff member" # github-script outputs are JSON strings, so we compare against 'false' (string) - if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'false' + env: + ISSUE_NUMBER: ${{ github.event.issue.number }} + COMMENT_USER_LOGIN: ${{ github.event.comment.user.login }} run: | - echo "::notice::Adding issue #${{ github.event.issue.number }} to project (comment by ${{ github.event.comment.user.login }})" + echo "::notice::Adding issue #$ISSUE_NUMBER to project (comment by $COMMENT_USER_LOGIN)" - if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'false' uses: actions/add-to-project@244f685bbc3b7adfa8466e08b698b5577571133e # v1.0.2 diff --git a/.github/workflows/after_release.yml b/.github/workflows/after_release.yml index 9582e3f1956b3ecda383fc03efdb3d7ff67eaa68..95229f9f46bbd34ffe02832114b2b39da1b7e090 100644 --- a/.github/workflows/after_release.yml +++ b/.github/workflows/after_release.yml @@ -76,7 +76,7 @@ jobs: "X-GitHub-Api-Version" = "2022-11-28" } $body = @{ branch = "master" } | ConvertTo-Json - $uri = "https://api.github.com/repos/${{ github.repository_owner }}/winget-pkgs/merge-upstream" + $uri = "https://api.github.com/repos/$env:GITHUB_REPOSITORY_OWNER/winget-pkgs/merge-upstream" try { Invoke-RestMethod -Uri $uri -Method Post -Headers $headers -Body $body -ContentType "application/json" Write-Host "Successfully synced winget-pkgs fork" @@ -131,11 +131,10 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: release::send_slack_message - run: | - curl -X POST -H 'Content-type: application/json'\ - --data '{"text":"❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK" + run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"' env: SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} + SLACK_MESSAGE: '❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}' defaults: run: shell: bash -euxo pipefail {0} diff --git a/.github/workflows/autofix_pr.yml b/.github/workflows/autofix_pr.yml index 60cc66294af2cf65e17aaad530a9df511ec61503..1fa271d168a8c3d1744439647ff50b793a854d1d 100644 --- a/.github/workflows/autofix_pr.yml +++ b/.github/workflows/autofix_pr.yml @@ -22,8 +22,9 @@ jobs: with: clean: false - name: autofix_pr::run_autofix::checkout_pr - run: gh pr checkout ${{ inputs.pr_number }} + run: gh pr checkout "$PR_NUMBER" env: + PR_NUMBER: ${{ inputs.pr_number }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: steps::setup_cargo_config run: | @@ -104,8 +105,9 @@ jobs: clean: false token: ${{ steps.get-app-token.outputs.token }} - name: autofix_pr::commit_changes::checkout_pr - run: gh pr checkout ${{ inputs.pr_number }} + run: gh pr checkout "$PR_NUMBER" env: + PR_NUMBER: ${{ inputs.pr_number }} GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }} - name: autofix_pr::download_patch_artifact uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 diff --git a/.github/workflows/background_agent_mvp.yml b/.github/workflows/background_agent_mvp.yml index d078db137824a09b8e501362edef8a2f4c6f9b19..528600138243cb8aca2e0fe0645eda198fc4f2b2 100644 --- a/.github/workflows/background_agent_mvp.yml +++ b/.github/workflows/background_agent_mvp.yml @@ -1,8 +1,11 @@ name: background_agent_mvp +# NOTE: Scheduled runs disabled as of 2026-02-24. The workflow can still be +# triggered manually via workflow_dispatch. See Notion doc "Background Agent +# for Zed" for current status and contact info to resume this work. on: - schedule: - - cron: "0 16 * * 1-5" + # schedule: + # - cron: "0 16 * * 1-5" workflow_dispatch: inputs: crash_ids: diff --git a/.github/workflows/catch_blank_issues.yml b/.github/workflows/catch_blank_issues.yml index dd425afc886e86c1217a94e90eabced013f66bf0..c6f595ef2e0890ce107829f3e91490332567368a 100644 --- a/.github/workflows/catch_blank_issues.yml +++ b/.github/workflows/catch_blank_issues.yml @@ -42,8 +42,10 @@ jobs: } - if: steps.check-staff.outputs.result == 'true' + env: + ISSUE_NUMBER: ${{ github.event.issue.number }} run: | - echo "::notice::Skipping issue #${{ github.event.issue.number }} - actor is staff member" + echo "::notice::Skipping issue #$ISSUE_NUMBER - actor is staff member" - if: steps.check-staff.outputs.result == 'false' id: add-label diff --git a/.github/workflows/cherry_pick.yml b/.github/workflows/cherry_pick.yml index 9d46f300b509347b2853c00575c4e82fd9a2863c..ee0c1d35d0f9825d7c39b81fba0fe35901de2611 100644 --- a/.github/workflows/cherry_pick.yml +++ b/.github/workflows/cherry_pick.yml @@ -36,8 +36,11 @@ jobs: app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} - name: cherry_pick::run_cherry_pick::cherry_pick - run: ./script/cherry-pick ${{ inputs.branch }} ${{ inputs.commit }} ${{ inputs.channel }} + run: ./script/cherry-pick "$BRANCH" "$COMMIT" "$CHANNEL" env: + BRANCH: ${{ inputs.branch }} + COMMIT: ${{ inputs.commit }} + CHANNEL: ${{ inputs.channel }} GIT_COMMITTER_NAME: Zed Zippy GIT_COMMITTER_EMAIL: hi@zed.dev GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }} diff --git a/.github/workflows/community_update_all_top_ranking_issues.yml b/.github/workflows/community_update_all_top_ranking_issues.yml index 59926f35563a4b21e3486ecbd454a4ccf951461e..ef3b4fc39ddb5f0db9b09c5e861547ae8cd7eb08 100644 --- a/.github/workflows/community_update_all_top_ranking_issues.yml +++ b/.github/workflows/community_update_all_top_ranking_issues.yml @@ -22,4 +22,6 @@ jobs: - name: Install dependencies run: uv sync --project script/update_top_ranking_issues -p 3.13 - name: Run script - run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 5393 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token "$GITHUB_TOKEN" --issue-reference-number 5393 diff --git a/.github/workflows/community_update_weekly_top_ranking_issues.yml b/.github/workflows/community_update_weekly_top_ranking_issues.yml index 75ba66b934b5861bd51aef4238a1a4188dddefc3..53b548f2bb4286e5de86d3823e67d75c0413a1cb 100644 --- a/.github/workflows/community_update_weekly_top_ranking_issues.yml +++ b/.github/workflows/community_update_weekly_top_ranking_issues.yml @@ -22,4 +22,6 @@ jobs: - name: Install dependencies run: uv sync --project script/update_top_ranking_issues -p 3.13 - name: Run script - run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 6952 --query-day-interval 7 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token "$GITHUB_TOKEN" --issue-reference-number 6952 --query-day-interval 7 diff --git a/.github/workflows/compare_perf.yml b/.github/workflows/compare_perf.yml index e5a2d4f9c928eac2d1b1cf54ed374f8b0cca5d25..f7d78dbbf6a6d04bc47212b6842f894850288fcc 100644 --- a/.github/workflows/compare_perf.yml +++ b/.github/workflows/compare_perf.yml @@ -37,27 +37,40 @@ jobs: - name: compare_perf::run_perf::install_hyperfine uses: taiki-e/install-action@hyperfine - name: steps::git_checkout - run: git fetch origin ${{ inputs.base }} && git checkout ${{ inputs.base }} + run: git fetch origin "$REF_NAME" && git checkout "$REF_NAME" + env: + REF_NAME: ${{ inputs.base }} - name: compare_perf::run_perf::cargo_perf_test run: |2- - if [ -n "${{ inputs.crate_name }}" ]; then - cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.base }}; + if [ -n "$CRATE_NAME" ]; then + cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME"; else - cargo perf-test -p vim -- --json=${{ inputs.base }}; + cargo perf-test -p vim -- --json="$REF_NAME"; fi + env: + REF_NAME: ${{ inputs.base }} + CRATE_NAME: ${{ inputs.crate_name }} - name: steps::git_checkout - run: git fetch origin ${{ inputs.head }} && git checkout ${{ inputs.head }} + run: git fetch origin "$REF_NAME" && git checkout "$REF_NAME" + env: + REF_NAME: ${{ inputs.head }} - name: compare_perf::run_perf::cargo_perf_test run: |2- - if [ -n "${{ inputs.crate_name }}" ]; then - cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.head }}; + if [ -n "$CRATE_NAME" ]; then + cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME"; else - cargo perf-test -p vim -- --json=${{ inputs.head }}; + cargo perf-test -p vim -- --json="$REF_NAME"; fi + env: + REF_NAME: ${{ inputs.head }} + CRATE_NAME: ${{ inputs.crate_name }} - name: compare_perf::run_perf::compare_runs - run: cargo perf-compare --save=results.md ${{ inputs.base }} ${{ inputs.head }} + run: cargo perf-compare --save=results.md "$BASE" "$HEAD" + env: + BASE: ${{ inputs.base }} + HEAD: ${{ inputs.head }} - name: '@actions/upload-artifact results.md' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml index 2650cce1406b16e691565077b95d07730845664b..37f23b20d2825e9f3d26c456903962a10c2d0081 100644 --- a/.github/workflows/deploy_cloudflare.yml +++ b/.github/workflows/deploy_cloudflare.yml @@ -23,7 +23,10 @@ jobs: - name: Build docs uses: ./.github/actions/build_docs env: + CC: clang + CXX: clang++ DOCS_AMPLITUDE_API_KEY: ${{ secrets.DOCS_AMPLITUDE_API_KEY }} + DOCS_CONSENT_IO_INSTANCE: ${{ secrets.DOCS_CONSENT_IO_INSTANCE }} - name: Deploy Docs uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3 diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index b1bdaf61979452a73380226ce1935b43eb05c32b..89fb6980b65f2d09a6571f140ab016a710be230f 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -119,8 +119,9 @@ jobs: with: token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} - name: deploy_collab::deploy::sign_into_kubernetes - run: | - doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }} + run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 "$CLUSTER_NAME" + env: + CLUSTER_NAME: ${{ secrets.CLUSTER_NAME }} - name: deploy_collab::deploy::start_rollout run: | set -eu @@ -140,7 +141,7 @@ jobs: echo "Deploying collab:$GITHUB_SHA to $ZED_KUBE_NAMESPACE" source script/lib/deploy-helpers.sh - export_vars_for_environment $ZED_KUBE_NAMESPACE + export_vars_for_environment "$ZED_KUBE_NAMESPACE" ZED_DO_CERTIFICATE_ID="$(doctl compute certificate list --format ID --no-header)" export ZED_DO_CERTIFICATE_ID @@ -150,14 +151,14 @@ jobs: export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT export DATABASE_MAX_CONNECTIONS=850 envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f - - kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch + kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}" export ZED_SERVICE_NAME=api export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_API_LOAD_BALANCER_SIZE_UNIT export DATABASE_MAX_CONNECTIONS=60 envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f - - kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch + kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}" defaults: run: diff --git a/.github/workflows/extension_bump.yml b/.github/workflows/extension_bump.yml index ff903eb63d30319b5df5ced9c0ec545bb15cca06..9cc53741e8007a1b3ddd02ad07b191b3ce171cc8 100644 --- a/.github/workflows/extension_bump.yml +++ b/.github/workflows/extension_bump.yml @@ -39,8 +39,8 @@ jobs: run: | CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" - if [[ "${{ github.event_name }}" == "pull_request" ]]; then - PR_FORK_POINT="$(git merge-base --fork-point main)" + if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then + PR_FORK_POINT="$(git merge-base origin/main HEAD)" git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" @@ -64,7 +64,7 @@ jobs: - check_version_changed if: |- (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && - (inputs.force-bump == 'true' || needs.check_version_changed.outputs.version_changed == 'false') + (inputs.force-bump == true || needs.check_version_changed.outputs.version_changed == 'false') runs-on: namespace-profile-2x4-ubuntu-2404 steps: - id: generate-token @@ -82,8 +82,6 @@ jobs: - id: bump-version name: extension_bump::bump_version run: | - OLD_VERSION="${{ needs.check_version_changed.outputs.current_version }}" - BUMP_FILES=("extension.toml") if [[ -f "Cargo.toml" ]]; then BUMP_FILES+=("Cargo.toml") @@ -93,7 +91,7 @@ jobs: --search "version = \"{current_version}"\" \ --replace "version = \"{new_version}"\" \ --current-version "$OLD_VERSION" \ - --no-configured-files ${{ inputs.bump-type }} "${BUMP_FILES[@]}" + --no-configured-files "$BUMP_TYPE" "${BUMP_FILES[@]}" if [[ -f "Cargo.toml" ]]; then cargo update --workspace @@ -102,6 +100,9 @@ jobs: NEW_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" echo "new_version=${NEW_VERSION}" >> "$GITHUB_OUTPUT" + env: + OLD_VERSION: ${{ needs.check_version_changed.outputs.current_version }} + BUMP_TYPE: ${{ inputs.bump-type }} - name: extension_bump::create_pull_request uses: peter-evans/create-pull-request@v7 with: diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml index c74dcdab8df2bb7d22ab403cfe25090e9d1bd512..53de373c1b79dc3ca9a3637642e10998c781580a 100644 --- a/.github/workflows/extension_tests.yml +++ b/.github/workflows/extension_tests.yml @@ -32,7 +32,7 @@ jobs: git fetch origin "$GITHUB_BASE_REF" --depth=350 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)" fi - CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})" + CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")" check_pattern() { local output_name="$1" @@ -109,13 +109,28 @@ jobs: mkdir -p /tmp/ext-scratch mkdir -p /tmp/ext-output ./zed-extension --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output + - name: run_tests::fetch_ts_query_ls + uses: dsaltares/fetch-gh-release-asset@aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c + with: + repo: ribru17/ts_query_ls + version: tags/v3.15.1 + file: ts_query_ls-x86_64-unknown-linux-gnu.tar.gz + - name: run_tests::run_ts_query_ls + run: |- + tar -xf ts_query_ls-x86_64-unknown-linux-gnu.tar.gz + ./ts_query_ls format --check . || { + echo "Found unformatted queries, please format them with ts_query_ls." + echo "For easy use, install the Tree-sitter query extension:" + echo "zed://extension/tree-sitter-query" + false + } - id: compare-versions-check name: extension_bump::compare_versions run: | CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" - if [[ "${{ github.event_name }}" == "pull_request" ]]; then - PR_FORK_POINT="$(git merge-base --fork-point main)" + if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then + PR_FORK_POINT="$(git merge-base origin/main HEAD)" git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" @@ -132,11 +147,14 @@ jobs: echo "current_version=${CURRENT_VERSION}" >> "$GITHUB_OUTPUT" - name: extension_tests::verify_version_did_not_change run: | - if [[ ${{ steps.compare-versions-check.outputs.version_changed }} == "true" && "${{ github.event_name }}" == "pull_request" && "${{ github.event.pull_request.user.login }}" != "zed-zippy[bot]" ]] ; then + if [[ "$VERSION_CHANGED" == "true" && "$GITHUB_EVENT_NAME" == "pull_request" && "$PR_USER_LOGIN" != "zed-zippy[bot]" ]] ; then echo "Version change detected in your change!" echo "Version changes happen in separate PRs and will be performed by the zed-zippy bot" exit 42 fi + env: + VERSION_CHANGED: ${{ steps.compare-versions-check.outputs.version_changed }} + PR_USER_LOGIN: ${{ github.event.pull_request.user.login }} timeout-minutes: 6 tests_pass: needs: @@ -156,11 +174,15 @@ jobs: if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi } - check_result "orchestrate" "${{ needs.orchestrate.result }}" - check_result "check_rust" "${{ needs.check_rust.result }}" - check_result "check_extension" "${{ needs.check_extension.result }}" + check_result "orchestrate" "$RESULT_ORCHESTRATE" + check_result "check_rust" "$RESULT_CHECK_RUST" + check_result "check_extension" "$RESULT_CHECK_EXTENSION" exit $EXIT_CODE + env: + RESULT_ORCHESTRATE: ${{ needs.orchestrate.result }} + RESULT_CHECK_RUST: ${{ needs.check_rust.result }} + RESULT_CHECK_EXTENSION: ${{ needs.check_extension.result }} concurrency: group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} cancel-in-progress: true diff --git a/.github/workflows/extension_workflow_rollout.yml b/.github/workflows/extension_workflow_rollout.yml index 109f40c815dbf5222bef7b7d78d544f2b278c21f..9bfac06d4527985553ba3d04e64c656ee5bf85e4 100644 --- a/.github/workflows/extension_workflow_rollout.yml +++ b/.github/workflows/extension_workflow_rollout.yml @@ -80,9 +80,7 @@ jobs: - id: calc-changes name: extension_workflow_rollout::rollout_workflows_to_extension::get_removed_files run: | - PREV_COMMIT="${{ steps.prev-tag.outputs.prev_commit }}" - - if [ "${{ matrix.repo }}" = "workflows" ]; then + if [ "$MATRIX_REPO" = "workflows" ]; then WORKFLOW_DIR="extensions/workflows" else WORKFLOW_DIR="extensions/workflows/shared" @@ -101,11 +99,12 @@ jobs: echo "Files to remove: $REMOVED_FILES" echo "removed_files=$REMOVED_FILES" >> "$GITHUB_OUTPUT" + env: + PREV_COMMIT: ${{ steps.prev-tag.outputs.prev_commit }} + MATRIX_REPO: ${{ matrix.repo }} working-directory: zed - name: extension_workflow_rollout::rollout_workflows_to_extension::sync_workflow_files run: | - REMOVED_FILES="${{ steps.calc-changes.outputs.removed_files }}" - mkdir -p extension/.github/workflows cd extension/.github/workflows @@ -119,15 +118,18 @@ jobs: cd - > /dev/null - if [ "${{ matrix.repo }}" = "workflows" ]; then + if [ "$MATRIX_REPO" = "workflows" ]; then cp zed/extensions/workflows/*.yml extension/.github/workflows/ else cp zed/extensions/workflows/shared/*.yml extension/.github/workflows/ fi + env: + REMOVED_FILES: ${{ steps.calc-changes.outputs.removed_files }} + MATRIX_REPO: ${{ matrix.repo }} - id: short-sha name: extension_workflow_rollout::rollout_workflows_to_extension::get_short_sha run: | - echo "sha_short=$(git rev-parse --short HEAD)" >> "$GITHUB_OUTPUT" + echo "sha_short=$(git rev-parse --short=7 HEAD)" >> "$GITHUB_OUTPUT" working-directory: zed - id: create-pr name: extension_workflow_rollout::rollout_workflows_to_extension::create_pull_request @@ -148,13 +150,13 @@ jobs: sign-commits: true - name: extension_workflow_rollout::rollout_workflows_to_extension::enable_auto_merge run: | - PR_NUMBER="${{ steps.create-pr.outputs.pull-request-number }}" if [ -n "$PR_NUMBER" ]; then cd extension gh pr merge "$PR_NUMBER" --auto --squash fi env: GH_TOKEN: ${{ steps.generate-token.outputs.token }} + PR_NUMBER: ${{ steps.create-pr.outputs.pull-request-number }} timeout-minutes: 10 create_rollout_tag: needs: diff --git a/.github/workflows/publish_extension_cli.yml b/.github/workflows/publish_extension_cli.yml index 391baac1cb3aa9da76c4fde39aa6909525541a58..75f1b16b007e33d0c4f346a33a1403648f1cd6c6 100644 --- a/.github/workflows/publish_extension_cli.yml +++ b/.github/workflows/publish_extension_cli.yml @@ -27,7 +27,7 @@ jobs: - name: publish_extension_cli::publish_job::build_extension_cli run: cargo build --release --package extension_cli - name: publish_extension_cli::publish_job::upload_binary - run: script/upload-extension-cli ${{ github.sha }} + run: script/upload-extension-cli "$GITHUB_SHA" env: DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} @@ -55,10 +55,10 @@ jobs: - id: short-sha name: publish_extension_cli::get_short_sha run: | - echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT" + echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT" - name: publish_extension_cli::update_sha_in_zed::replace_sha run: | - sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"${{ github.sha }}\"/" \ + sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"$GITHUB_SHA\"/" \ tooling/xtask/src/tasks/workflows/extension_tests.rs - name: publish_extension_cli::update_sha_in_zed::regenerate_workflows run: cargo xtask workflows @@ -97,7 +97,7 @@ jobs: - id: short-sha name: publish_extension_cli::get_short_sha run: | - echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT" + echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT" - name: publish_extension_cli::update_sha_in_extensions::checkout_extensions_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: @@ -105,7 +105,7 @@ jobs: token: ${{ steps.generate-token.outputs.token }} - name: publish_extension_cli::update_sha_in_extensions::replace_sha run: | - sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: ${{ github.sha }}/" \ + sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: $GITHUB_SHA/" \ .github/workflows/ci.yml - name: publish_extension_cli::create_pull_request_extensions uses: peter-evans/create-pull-request@v7 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4442b068a88800e8437d5c6e459acec954308946..8adad5cfba278dc68dd227b86455510278c7a1ae 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -53,6 +53,9 @@ jobs: run_tests_linux: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -177,6 +180,9 @@ jobs: clippy_linux: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -251,8 +257,14 @@ jobs: name: run_tests::check_scripts::download_actionlint run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) - name: run_tests::check_scripts::run_actionlint - run: | - ${{ steps.get_actionlint.outputs.executable }} -color + run: '"$ACTIONLINT_BIN" -color' + env: + ACTIONLINT_BIN: ${{ steps.get_actionlint.outputs.executable }} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust + path: ~/.rustup - name: run_tests::check_scripts::check_xtask_workflows run: | cargo xtask workflows @@ -293,6 +305,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -333,6 +347,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -644,12 +660,7 @@ jobs: - id: generate-webhook-message name: release::generate_slack_message run: | - MESSAGE=$(DRAFT_RESULT="${{ needs.create_draft_release.result }}" - UPLOAD_RESULT="${{ needs.upload_release_assets.result }}" - VALIDATE_RESULT="${{ needs.validate_release_assets.result }}" - AUTO_RELEASE_RESULT="${{ needs.auto_release_preview.result }}" - TAG="$GITHUB_REF_NAME" - RUN_URL="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" + MESSAGE=$(TAG="$GITHUB_REF_NAME" if [ "$DRAFT_RESULT" == "failure" ]; then echo "❌ Draft release creation failed for $TAG: $RUN_URL" @@ -659,19 +670,19 @@ jobs: echo "❌ Release asset upload failed for $TAG: $RELEASE_URL" elif [ "$UPLOAD_RESULT" == "cancelled" ] || [ "$UPLOAD_RESULT" == "skipped" ]; then FAILED_JOBS="" - if [ "${{ needs.run_tests_mac.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_mac"; fi - if [ "${{ needs.run_tests_linux.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_linux"; fi - if [ "${{ needs.run_tests_windows.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_windows"; fi - if [ "${{ needs.clippy_mac.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_mac"; fi - if [ "${{ needs.clippy_linux.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_linux"; fi - if [ "${{ needs.clippy_windows.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_windows"; fi - if [ "${{ needs.check_scripts.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS check_scripts"; fi - if [ "${{ needs.bundle_linux_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_aarch64"; fi - if [ "${{ needs.bundle_linux_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_x86_64"; fi - if [ "${{ needs.bundle_mac_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_aarch64"; fi - if [ "${{ needs.bundle_mac_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_x86_64"; fi - if [ "${{ needs.bundle_windows_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_aarch64"; fi - if [ "${{ needs.bundle_windows_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_x86_64"; fi + if [ "$RESULT_RUN_TESTS_MAC" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_mac"; fi + if [ "$RESULT_RUN_TESTS_LINUX" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_linux"; fi + if [ "$RESULT_RUN_TESTS_WINDOWS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_windows"; fi + if [ "$RESULT_CLIPPY_MAC" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_mac"; fi + if [ "$RESULT_CLIPPY_LINUX" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_linux"; fi + if [ "$RESULT_CLIPPY_WINDOWS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_windows"; fi + if [ "$RESULT_CHECK_SCRIPTS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS check_scripts"; fi + if [ "$RESULT_BUNDLE_LINUX_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_aarch64"; fi + if [ "$RESULT_BUNDLE_LINUX_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_x86_64"; fi + if [ "$RESULT_BUNDLE_MAC_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_aarch64"; fi + if [ "$RESULT_BUNDLE_MAC_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_x86_64"; fi + if [ "$RESULT_BUNDLE_WINDOWS_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_aarch64"; fi + if [ "$RESULT_BUNDLE_WINDOWS_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_x86_64"; fi FAILED_JOBS=$(echo "$FAILED_JOBS" | xargs) if [ "$UPLOAD_RESULT" == "cancelled" ]; then if [ -n "$FAILED_JOBS" ]; then @@ -700,12 +711,29 @@ jobs: echo "message=$MESSAGE" >> "$GITHUB_OUTPUT" env: GH_TOKEN: ${{ github.token }} + DRAFT_RESULT: ${{ needs.create_draft_release.result }} + UPLOAD_RESULT: ${{ needs.upload_release_assets.result }} + VALIDATE_RESULT: ${{ needs.validate_release_assets.result }} + AUTO_RELEASE_RESULT: ${{ needs.auto_release_preview.result }} + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + RESULT_RUN_TESTS_MAC: ${{ needs.run_tests_mac.result }} + RESULT_RUN_TESTS_LINUX: ${{ needs.run_tests_linux.result }} + RESULT_RUN_TESTS_WINDOWS: ${{ needs.run_tests_windows.result }} + RESULT_CLIPPY_MAC: ${{ needs.clippy_mac.result }} + RESULT_CLIPPY_LINUX: ${{ needs.clippy_linux.result }} + RESULT_CLIPPY_WINDOWS: ${{ needs.clippy_windows.result }} + RESULT_CHECK_SCRIPTS: ${{ needs.check_scripts.result }} + RESULT_BUNDLE_LINUX_AARCH64: ${{ needs.bundle_linux_aarch64.result }} + RESULT_BUNDLE_LINUX_X86_64: ${{ needs.bundle_linux_x86_64.result }} + RESULT_BUNDLE_MAC_AARCH64: ${{ needs.bundle_mac_aarch64.result }} + RESULT_BUNDLE_MAC_X86_64: ${{ needs.bundle_mac_x86_64.result }} + RESULT_BUNDLE_WINDOWS_AARCH64: ${{ needs.bundle_windows_aarch64.result }} + RESULT_BUNDLE_WINDOWS_X86_64: ${{ needs.bundle_windows_x86_64.result }} - name: release::send_slack_message - run: | - curl -X POST -H 'Content-type: application/json'\ - --data '{"text":"${{ steps.generate-webhook-message.outputs.message }}"}' "$SLACK_WEBHOOK" + run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"' env: SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} + SLACK_MESSAGE: ${{ steps.generate-webhook-message.outputs.message }} concurrency: group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} cancel-in-progress: true diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index d3f01447e52f418713499b84ad454085fd3cb646..46d8732b08ea658275e1fb21117a09b9e0668933 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -103,6 +103,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -149,6 +151,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -550,11 +554,10 @@ jobs: runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: release::send_slack_message - run: | - curl -X POST -H 'Content-type: application/json'\ - --data '{"text":"❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK" + run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"' env: SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} + SLACK_MESSAGE: '❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}' defaults: run: shell: bash -euxo pipefail {0} diff --git a/.github/workflows/run_bundling.yml b/.github/workflows/run_bundling.yml index 2b536425a1dc4b9663c726fd9259c95e0626efda..7cb1665f9d0bd4fe3b0f3c05527bf39aab5f610a 100644 --- a/.github/workflows/run_bundling.yml +++ b/.github/workflows/run_bundling.yml @@ -19,6 +19,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -58,6 +60,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 diff --git a/.github/workflows/run_cron_unit_evals.yml b/.github/workflows/run_cron_unit_evals.yml index e57b54e4f2249b92630b2d3636ce2316a0814625..2a204a9d40d78bf52f38825b4db060216e348a87 100644 --- a/.github/workflows/run_cron_unit_evals.yml +++ b/.github/workflows/run_cron_unit_evals.yml @@ -16,7 +16,7 @@ jobs: model: - anthropic/claude-sonnet-4-5-latest - anthropic/claude-opus-4-5-latest - - google/gemini-3-pro + - google/gemini-3.1-pro - openai/gpt-5 fail-fast: false steps: diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 07caa6007e87fcd093b40bb9a15108e18b159068..00d69639a53868386157e67aeab5ce7383d32426 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -35,7 +35,7 @@ jobs: git fetch origin "$GITHUB_BASE_REF" --depth=350 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)" fi - CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})" + CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")" check_pattern() { local output_name="$1" @@ -139,6 +139,21 @@ jobs: uses: crate-ci/typos@2d0ce569feab1f8752f1dde43cc2f2aa53236e06 with: config: ./typos.toml + - name: run_tests::fetch_ts_query_ls + uses: dsaltares/fetch-gh-release-asset@aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c + with: + repo: ribru17/ts_query_ls + version: tags/v3.15.1 + file: ts_query_ls-x86_64-unknown-linux-gnu.tar.gz + - name: run_tests::run_ts_query_ls + run: |- + tar -xf ts_query_ls-x86_64-unknown-linux-gnu.tar.gz + ./ts_query_ls format --check . || { + echo "Found unformatted queries, please format them with ts_query_ls." + echo "For easy use, install the Tree-sitter query extension:" + echo "zed://extension/tree-sitter-query" + false + } timeout-minutes: 60 clippy_windows: needs: @@ -175,6 +190,9 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -285,6 +303,9 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -385,6 +406,9 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -428,6 +452,9 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-8x16-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -466,11 +493,53 @@ jobs: run: | rm -rf ./../.cargo timeout-minutes: 60 + check_wasm: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_tests == 'true' + runs-on: namespace-profile-8x16-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust + path: ~/.rustup + - name: run_tests::check_wasm::install_nightly_wasm_toolchain + run: rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown + - name: steps::setup_sccache + run: ./script/setup-sccache + env: + R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }} + R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }} + R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }} + SCCACHE_BUCKET: sccache-zed + - name: run_tests::check_wasm::cargo_check_wasm + run: cargo +nightly -Zbuild-std=std,panic_abort check --target wasm32-unknown-unknown -p gpui_platform + env: + CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS: -C target-feature=+atomics,+bulk-memory,+mutable-globals + - name: steps::show_sccache_stats + run: sccache --show-stats || true + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + timeout-minutes: 60 check_dependencies: needs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-2x4-ubuntu-2404 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -503,6 +572,9 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_docs == 'true' runs-on: namespace-profile-8x16-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -581,8 +653,14 @@ jobs: name: run_tests::check_scripts::download_actionlint run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) - name: run_tests::check_scripts::run_actionlint - run: | - ${{ steps.get_actionlint.outputs.executable }} -color + run: '"$ACTIONLINT_BIN" -color' + env: + ACTIONLINT_BIN: ${{ steps.get_actionlint.outputs.executable }} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust + path: ~/.rustup - name: run_tests::check_scripts::check_xtask_workflows run: | cargo xtask workflows @@ -628,6 +706,10 @@ jobs: with: input: crates/proto/proto/ against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/ + - name: run_tests::check_postgres_and_protobuf_migrations::buf_lint + run: buf lint crates/proto/proto + - name: run_tests::check_postgres_and_protobuf_migrations::check_protobuf_formatting + run: buf format --diff --exit-code crates/proto/proto timeout-minutes: 60 tests_pass: needs: @@ -641,6 +723,7 @@ jobs: - run_tests_mac - doctests - check_workspace_binaries + - check_wasm - check_dependencies - check_docs - check_licenses @@ -658,22 +741,39 @@ jobs: if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi } - check_result "orchestrate" "${{ needs.orchestrate.result }}" - check_result "check_style" "${{ needs.check_style.result }}" - check_result "clippy_windows" "${{ needs.clippy_windows.result }}" - check_result "clippy_linux" "${{ needs.clippy_linux.result }}" - check_result "clippy_mac" "${{ needs.clippy_mac.result }}" - check_result "run_tests_windows" "${{ needs.run_tests_windows.result }}" - check_result "run_tests_linux" "${{ needs.run_tests_linux.result }}" - check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}" - check_result "doctests" "${{ needs.doctests.result }}" - check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}" - check_result "check_dependencies" "${{ needs.check_dependencies.result }}" - check_result "check_docs" "${{ needs.check_docs.result }}" - check_result "check_licenses" "${{ needs.check_licenses.result }}" - check_result "check_scripts" "${{ needs.check_scripts.result }}" + check_result "orchestrate" "$RESULT_ORCHESTRATE" + check_result "check_style" "$RESULT_CHECK_STYLE" + check_result "clippy_windows" "$RESULT_CLIPPY_WINDOWS" + check_result "clippy_linux" "$RESULT_CLIPPY_LINUX" + check_result "clippy_mac" "$RESULT_CLIPPY_MAC" + check_result "run_tests_windows" "$RESULT_RUN_TESTS_WINDOWS" + check_result "run_tests_linux" "$RESULT_RUN_TESTS_LINUX" + check_result "run_tests_mac" "$RESULT_RUN_TESTS_MAC" + check_result "doctests" "$RESULT_DOCTESTS" + check_result "check_workspace_binaries" "$RESULT_CHECK_WORKSPACE_BINARIES" + check_result "check_wasm" "$RESULT_CHECK_WASM" + check_result "check_dependencies" "$RESULT_CHECK_DEPENDENCIES" + check_result "check_docs" "$RESULT_CHECK_DOCS" + check_result "check_licenses" "$RESULT_CHECK_LICENSES" + check_result "check_scripts" "$RESULT_CHECK_SCRIPTS" exit $EXIT_CODE + env: + RESULT_ORCHESTRATE: ${{ needs.orchestrate.result }} + RESULT_CHECK_STYLE: ${{ needs.check_style.result }} + RESULT_CLIPPY_WINDOWS: ${{ needs.clippy_windows.result }} + RESULT_CLIPPY_LINUX: ${{ needs.clippy_linux.result }} + RESULT_CLIPPY_MAC: ${{ needs.clippy_mac.result }} + RESULT_RUN_TESTS_WINDOWS: ${{ needs.run_tests_windows.result }} + RESULT_RUN_TESTS_LINUX: ${{ needs.run_tests_linux.result }} + RESULT_RUN_TESTS_MAC: ${{ needs.run_tests_mac.result }} + RESULT_DOCTESTS: ${{ needs.doctests.result }} + RESULT_CHECK_WORKSPACE_BINARIES: ${{ needs.check_workspace_binaries.result }} + RESULT_CHECK_WASM: ${{ needs.check_wasm.result }} + RESULT_CHECK_DEPENDENCIES: ${{ needs.check_dependencies.result }} + RESULT_CHECK_DOCS: ${{ needs.check_docs.result }} + RESULT_CHECK_LICENSES: ${{ needs.check_licenses.result }} + RESULT_CHECK_SCRIPTS: ${{ needs.check_scripts.result }} concurrency: group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} cancel-in-progress: true diff --git a/.github/workflows/slack_notify_first_responders.yml b/.github/workflows/slack_notify_first_responders.yml index a6f2d557a574778aea6c2a90f9721b5a41bd0724..538d02b582f18db627693b62e439f4142ea29056 100644 --- a/.github/workflows/slack_notify_first_responders.yml +++ b/.github/workflows/slack_notify_first_responders.yml @@ -17,8 +17,9 @@ jobs: id: check-label env: LABEL_NAME: ${{ github.event.label.name }} + FIRST_RESPONDER_LABELS: ${{ env.FIRST_RESPONDER_LABELS }} run: | - if echo '${{ env.FIRST_RESPONDER_LABELS }}' | jq -e --arg label "$LABEL_NAME" 'index($label) != null' > /dev/null; then + if echo "$FIRST_RESPONDER_LABELS" | jq -e --arg label "$LABEL_NAME" 'index($label) != null' > /dev/null; then echo "should_notify=true" >> "$GITHUB_OUTPUT" echo "Label '$LABEL_NAME' requires first responder notification" else diff --git a/.github/workflows/update_duplicate_magnets.yml b/.github/workflows/update_duplicate_magnets.yml index 1c6c5a562532891eb97ceb11f44b81f35612c026..c3832b7bdbec13f74a8136cb1120a682f6e53920 100644 --- a/.github/workflows/update_duplicate_magnets.yml +++ b/.github/workflows/update_duplicate_magnets.yml @@ -21,7 +21,9 @@ jobs: run: pip install requests - name: Update duplicate magnets issue + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | python script/github-find-top-duplicated-bugs.py \ - --github-token ${{ secrets.GITHUB_TOKEN }} \ + --github-token "$GITHUB_TOKEN" \ --issue-number 46355 diff --git a/Cargo.lock b/Cargo.lock index 934e0d1a01482d57e456057860ee45037f39d570..4dbd905beb51bda4f5ff061179d144d9cd255e9a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -76,6 +76,7 @@ dependencies = [ "clock", "collections", "ctor", + "fs", "futures 0.3.31", "gpui", "indoc", @@ -169,7 +170,7 @@ dependencies = [ "context_server", "ctor", "db", - "derive_more 0.99.20", + "derive_more", "editor", "env_logger 0.11.8", "eval_utils", @@ -241,7 +242,7 @@ dependencies = [ "anyhow", "async-broadcast", "async-trait", - "derive_more 2.0.1", + "derive_more", "futures 0.3.31", "log", "serde", @@ -255,7 +256,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44bc1fef9c32f03bce2ab44af35b6f483bfd169bf55cc59beeb2e3b1a00ae4d1" dependencies = [ "anyhow", - "derive_more 2.0.1", + "derive_more", "schemars", "serde", "serde_json", @@ -368,6 +369,7 @@ dependencies = [ "fs", "futures 0.3.31", "fuzzy", + "git", "gpui", "gpui_tokio", "html_to_markdown", @@ -601,6 +603,17 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" +[[package]] +name = "annotate-snippets" +version = "0.12.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c86cd1c51b95d71dde52bca69ed225008f6ff4c8cc825b08042aa1ef823e1980" +dependencies = [ + "anstyle", + "memchr", + "unicode-width", +] + [[package]] name = "anstream" version = "0.6.21" @@ -692,6 +705,15 @@ dependencies = [ "num-traits", ] +[[package]] +name = "ar_archive_writer" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7eb93bbb63b9c227414f6eb3a0adfddca591a8ce1e9b60661bb08969b87e340b" +dependencies = [ + "object 0.37.3", +] + [[package]] name = "arbitrary" version = "1.4.2" @@ -756,19 +778,16 @@ dependencies = [ [[package]] name = "ashpd" -version = "0.12.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "618a409b91d5265798a99e3d1d0b226911605e581c4e7255e83c1e397b172bce" +checksum = "0848bedd08067dca1c02c31cbb371a94ad4f2f8a61a82f2c43d96ec36a395244" dependencies = [ - "async-fs", - "async-net", "enumflags2", "futures-channel", "futures-util", - "rand 0.9.2", + "getrandom 0.4.1", "serde", "serde_repr", - "url", "wayland-backend", "wayland-client", "wayland-protocols", @@ -807,7 +826,7 @@ dependencies = [ "anyhow", "async-trait", "collections", - "derive_more 0.99.20", + "derive_more", "extension", "futures 0.3.31", "gpui", @@ -1005,7 +1024,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8034a681df4aed8b8edbd7fbe472401ecf009251c8b40556b304567052e294c5" dependencies = [ - "async-lock 3.4.1", + "async-lock 3.4.2", "blocking", "futures-lite 2.6.1", ] @@ -1019,7 +1038,7 @@ dependencies = [ "async-channel 2.5.0", "async-executor", "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "blocking", "futures-lite 2.6.1", "once_cell", @@ -1054,9 +1073,9 @@ dependencies = [ [[package]] name = "async-lock" -version = "3.4.1" +version = "3.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc" +checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311" dependencies = [ "event-listener 5.4.1", "event-listener-strategy", @@ -1091,7 +1110,7 @@ checksum = "fc50921ec0055cdd8a16de48773bfeec5c972598674347252c0399676be7da75" dependencies = [ "async-channel 2.5.0", "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "async-signal", "async-task", "blocking", @@ -1119,7 +1138,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c" dependencies = [ "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "atomic-waker", "cfg-if", "futures-core", @@ -1140,7 +1159,7 @@ dependencies = [ "async-channel 1.9.0", "async-global-executor", "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "async-process", "crossbeam-utils", "futures-channel", @@ -1345,6 +1364,7 @@ version = "0.1.0" dependencies = [ "anyhow", "log", + "scopeguard", "simplelog", "tempfile", "windows 0.61.3", @@ -2166,6 +2186,16 @@ dependencies = [ "piper", ] +[[package]] +name = "bmrng" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54df9073108f1558f90ae6c5bf5ab9c917c4185f5527b280c87a993cbead0ac" +dependencies = [ + "futures-core", + "tokio", +] + [[package]] name = "bon" version = "3.8.2" @@ -2748,6 +2778,16 @@ dependencies = [ "target-lexicon 0.12.16", ] +[[package]] +name = "cfg-expr" +version = "0.20.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78cef5b5a1a6827c7322ae2a636368a573006b27cfa76c7ebd53e834daeaab6a" +dependencies = [ + "smallvec", + "target-lexicon 0.13.3", +] + [[package]] name = "cfg-if" version = "1.0.4" @@ -2973,7 +3013,7 @@ dependencies = [ "cloud_llm_client", "collections", "credentials_provider", - "derive_more 0.99.20", + "derive_more", "feature_flags", "fs", "futures 0.3.31", @@ -3411,7 +3451,7 @@ name = "command_palette_hooks" version = "0.1.0" dependencies = [ "collections", - "derive_more 0.99.20", + "derive_more", "gpui", "workspace", ] @@ -3497,6 +3537,16 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "console_error_panic_hook" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" +dependencies = [ + "cfg-if", + "wasm-bindgen", +] + [[package]] name = "const-oid" version = "0.9.6" @@ -3577,15 +3627,18 @@ dependencies = [ [[package]] name = "convert_case" -version = "0.4.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" +checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f" +dependencies = [ + "unicode-segmentation", +] [[package]] name = "convert_case" -version = "0.8.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f" +checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" dependencies = [ "unicode-segmentation", ] @@ -4084,13 +4137,13 @@ dependencies = [ name = "crashes" version = "0.1.0" dependencies = [ - "bincode", "cfg-if", "crash-handler", "futures 0.3.31", "log", "mach2 0.5.0", "minidumper", + "parking_lot", "paths", "release_channel", "serde", @@ -4278,7 +4331,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", - "rand_core 0.6.4", "typenum", ] @@ -4305,6 +4357,20 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "csv_preview" +version = "0.1.0" +dependencies = [ + "anyhow", + "editor", + "feature_flags", + "gpui", + "log", + "text", + "ui", + "workspace", +] + [[package]] name = "ctor" version = "0.4.3" @@ -4643,7 +4709,6 @@ dependencies = [ "sysinfo 0.37.2", "task", "tasks_ui", - "telemetry", "terminal_view", "text", "theme", @@ -4743,34 +4808,23 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" -dependencies = [ - "convert_case 0.4.0", - "proc-macro2", - "quote", - "rustc_version", - "syn 2.0.106", -] - -[[package]] -name = "derive_more" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" +checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" dependencies = [ "derive_more-impl", ] [[package]] name = "derive_more-impl" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" +checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" dependencies = [ + "convert_case 0.10.0", "proc-macro2", "quote", + "rustc_version", "syn 2.0.106", "unicode-xid", ] @@ -4966,11 +5020,13 @@ checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b" [[package]] name = "dispatch2" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" +checksum = "1e0e367e4e7da84520dedcac1901e4da967309406d1e51017ae1abfb97adbd38" dependencies = [ "bitflags 2.10.0", + "block2", + "libc", "objc2", ] @@ -5367,7 +5423,6 @@ dependencies = [ "semver", "serde_json", "settings", - "supermaven", "telemetry", "text", "theme", @@ -6247,6 +6302,12 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" +[[package]] +name = "fixedbitset" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" + [[package]] name = "flate2" version = "1.1.8" @@ -6596,6 +6657,19 @@ dependencies = [ "futures-sink", ] +[[package]] +name = "futures-concurrency" +version = "7.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175cd8cca9e1d45b87f18ffa75088f2099e3c4fe5e2f83e42de112560bea8ea6" +dependencies = [ + "fixedbitset 0.5.7", + "futures-core", + "futures-lite 2.6.1", + "pin-project", + "smallvec", +] + [[package]] name = "futures-core" version = "0.3.31" @@ -7040,13 +7114,26 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "getrandom" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", + "wasip3", +] + [[package]] name = "gh-workflow" version = "0.8.0" source = "git+https://github.com/zed-industries/gh-workflow?rev=c9eac0ed361583e1072860d96776fa52775b82ac#c9eac0ed361583e1072860d96776fa52775b82ac" dependencies = [ "async-trait", - "derive_more 2.0.1", + "derive_more", "derive_setters", "gh-workflow-macros", "indexmap", @@ -7094,6 +7181,19 @@ version = "0.32.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" +[[package]] +name = "gio-sys" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0071fe88dba8e40086c8ff9bbb62622999f49628344b1d1bf490a48a29d80f22" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps 7.0.7", + "windows-sys 0.61.2", +] + [[package]] name = "git" version = "0.1.0" @@ -7102,7 +7202,7 @@ dependencies = [ "askpass", "async-trait", "collections", - "derive_more 0.99.20", + "derive_more", "futures 0.3.31", "git2", "gpui", @@ -7208,6 +7308,7 @@ dependencies = [ "ctor", "db", "editor", + "feature_flags", "futures 0.3.31", "fuzzy", "git", @@ -7228,6 +7329,7 @@ dependencies = [ "pretty_assertions", "project", "prompt_store", + "proto", "rand 0.9.2", "remote", "remote_connection", @@ -7267,6 +7369,50 @@ dependencies = [ "xml-rs", ] +[[package]] +name = "glib" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16de123c2e6c90ce3b573b7330de19be649080ec612033d397d72da265f1bd8b" +dependencies = [ + "bitflags 2.10.0", + "futures-channel", + "futures-core", + "futures-executor", + "futures-task", + "futures-util", + "gio-sys", + "glib-macros", + "glib-sys", + "gobject-sys", + "libc", + "memchr", + "smallvec", +] + +[[package]] +name = "glib-macros" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf59b675301228a696fe01c3073974643365080a76cc3ed5bc2cbc466ad87f17" +dependencies = [ + "heck 0.5.0", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "glib-sys" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d95e1a3a19ae464a7286e14af9a90683c64d70c02532d88d87ce95056af3e6c" +dependencies = [ + "libc", + "system-deps 7.0.7", +] + [[package]] name = "glob" version = "0.3.3" @@ -7342,6 +7488,17 @@ dependencies = [ "workspace", ] +[[package]] +name = "gobject-sys" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dca35da0d19a18f4575f3cb99fe1c9e029a2941af5662f326f738a21edaf294" +dependencies = [ + "glib-sys", + "libc", + "system-deps 7.0.7", +] + [[package]] name = "goblin" version = "0.8.2" @@ -7406,6 +7563,7 @@ name = "gpui" version = "0.2.2" dependencies = [ "anyhow", + "async-channel 2.5.0", "async-task", "backtrace", "bindgen 0.71.1", @@ -7423,14 +7581,18 @@ dependencies = [ "core-text", "core-video", "ctor", - "derive_more 0.99.20", + "derive_more", "embed-resource", "env_logger 0.11.8", "etagere", "foreign-types 0.5.0", "futures 0.3.31", + "futures-concurrency", + "getrandom 0.3.4", "gpui_macros", "gpui_platform", + "gpui_util", + "gpui_web", "http_client", "image", "inventory", @@ -7440,7 +7602,7 @@ dependencies = [ "mach2 0.5.0", "media", "metal", - "naga", + "naga 28.0.0", "num_cpus", "objc", "objc2", @@ -7449,6 +7611,7 @@ dependencies = [ "parking_lot", "pathfinder_geometry", "pin-project", + "pollster 0.4.0", "postage", "pretty_assertions", "profiling", @@ -7464,7 +7627,6 @@ dependencies = [ "serde_json", "slotmap", "smallvec", - "smol", "spin 0.10.0", "stacksafe", "strum 0.27.2", @@ -7472,11 +7634,13 @@ dependencies = [ "taffy", "thiserror 2.0.17", "unicode-segmentation", + "url", "usvg", - "util", "util_macros", "uuid", "waker-fn", + "wasm-bindgen", + "web-time", "windows 0.61.3", "zed-font-kit", "zed-scap", @@ -7494,7 +7658,6 @@ dependencies = [ "calloop", "calloop-wayland-source", "collections", - "cosmic-text", "filedescriptor", "futures 0.3.31", "gpui", @@ -7507,12 +7670,14 @@ dependencies = [ "open", "parking_lot", "pathfinder_geometry", + "pollster 0.4.0", "profiling", "raw-window-handle", "smallvec", "smol", "strum 0.27.2", "swash", + "url", "util", "uuid", "wayland-backend", @@ -7524,7 +7689,6 @@ dependencies = [ "x11-clipboard", "x11rb", "xkbcommon", - "zed-font-kit", "zed-scap", "zed-xim", ] @@ -7535,7 +7699,6 @@ version = "0.1.0" dependencies = [ "anyhow", "async-task", - "bindgen 0.71.1", "block", "cbindgen", "cocoa 0.26.0", @@ -7546,7 +7709,8 @@ dependencies = [ "core-text", "core-video", "ctor", - "derive_more 0.99.20", + "derive_more", + "dispatch2", "etagere", "foreign-types 0.5.0", "futures 0.3.31", @@ -7585,9 +7749,11 @@ dependencies = [ name = "gpui_platform" version = "0.1.0" dependencies = [ + "console_error_panic_hook", "gpui", "gpui_linux", "gpui_macos", + "gpui_web", "gpui_windows", ] @@ -7601,6 +7767,37 @@ dependencies = [ "util", ] +[[package]] +name = "gpui_util" +version = "0.1.0" +dependencies = [ + "anyhow", + "log", +] + +[[package]] +name = "gpui_web" +version = "0.1.0" +dependencies = [ + "anyhow", + "console_error_panic_hook", + "futures 0.3.31", + "gpui", + "gpui_wgpu", + "http_client", + "js-sys", + "log", + "parking_lot", + "raw-window-handle", + "smallvec", + "uuid", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm_thread", + "web-sys", + "web-time", +] + [[package]] name = "gpui_wgpu" version = "0.1.0" @@ -7608,15 +7805,24 @@ dependencies = [ "anyhow", "bytemuck", "collections", + "cosmic-text", "etagere", "gpui", + "gpui_util", + "itertools 0.14.0", + "js-sys", "log", "parking_lot", + "pollster 0.4.0", "profiling", "raw-window-handle", - "smol", - "util", + "smallvec", + "swash", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", "wgpu", + "zed-font-kit", ] [[package]] @@ -8061,7 +8267,7 @@ dependencies = [ "async-fs", "async-tar", "bytes 1.11.1", - "derive_more 0.99.20", + "derive_more", "futures 0.3.31", "http 1.3.1", "http-body 1.0.1", @@ -8837,9 +9043,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.81" +version = "0.3.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec48937a97411dcb524a265206ccd4c90bb711fca92b2792c407f268825b9305" +checksum = "14dc6f6450b3f6d4ed5b16327f38fed626d375a886159ca555bd7822c0c3a5a6" dependencies = [ "once_cell", "wasm-bindgen", @@ -8938,9 +9144,9 @@ dependencies = [ [[package]] name = "jupyter-protocol" -version = "1.2.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c75a69caf8b8e781224badfb76c4a8da4d49856de36ce72ae3cf5d4a1c94e42" +checksum = "4649647741f9794a7a02e3be976f1b248ba28a37dbfc626d5089316fd4fbf4c8" dependencies = [ "async-trait", "bytes 1.11.1", @@ -9271,6 +9477,7 @@ dependencies = [ "open_path_prompt", "picker", "project", + "serde_json", "settings", "ui", "util", @@ -9511,10 +9718,11 @@ dependencies = [ [[package]] name = "libwebrtc" -version = "0.3.10" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.3.26" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ "cxx", + "glib", "jni", "js-sys", "lazy_static", @@ -9608,9 +9816,12 @@ checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092" [[package]] name = "livekit" -version = "0.7.8" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.7.32" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ + "base64 0.22.1", + "bmrng", + "bytes 1.11.1", "chrono", "futures-util", "lazy_static", @@ -9631,11 +9842,12 @@ dependencies = [ [[package]] name = "livekit-api" -version = "0.4.2" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.4.14" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ + "base64 0.21.7", "futures-util", - "http 0.2.12", + "http 1.3.1", "livekit-protocol", "livekit-runtime", "log", @@ -9643,20 +9855,22 @@ dependencies = [ "pbjson-types", "prost 0.12.6", "rand 0.9.2", - "reqwest 0.11.27", + "reqwest 0.12.24", + "rustls-native-certs 0.6.3", "scopeguard", "serde", "sha2", "thiserror 1.0.69", "tokio", - "tokio-tungstenite 0.26.2", + "tokio-rustls 0.26.2", + "tokio-tungstenite 0.28.0", "url", ] [[package]] name = "livekit-protocol" -version = "0.3.9" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.7.1" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ "futures-util", "livekit-runtime", @@ -9664,7 +9878,6 @@ dependencies = [ "pbjson", "pbjson-types", "prost 0.12.6", - "prost-types 0.12.6", "serde", "thiserror 1.0.69", "tokio", @@ -9673,7 +9886,7 @@ dependencies = [ [[package]] name = "livekit-runtime" version = "0.4.0" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ "tokio", "tokio-stream", @@ -9729,7 +9942,6 @@ dependencies = [ "sha2", "simplelog", "smallvec", - "tokio-tungstenite 0.26.2", "ui", "util", "zed-scap", @@ -9824,6 +10036,7 @@ dependencies = [ "ctor", "futures 0.3.31", "gpui", + "gpui_util", "log", "lsp-types", "parking_lot", @@ -9990,6 +10203,7 @@ dependencies = [ "language", "linkify", "log", + "markdown", "markup5ever_rcdom", "mermaid-rs-renderer", "pretty_assertions", @@ -10208,7 +10422,7 @@ dependencies = [ [[package]] name = "mermaid-rs-renderer" version = "0.2.0" -source = "git+https://github.com/zed-industries/mermaid-rs-renderer?branch=fix-font-family-xml-escaping#d91961aa90bc7b0c09c87a13c91d48e2f05c468d" +source = "git+https://github.com/zed-industries/mermaid-rs-renderer?rev=374db9ead5426697c6c2111151d9f246899bc638#374db9ead5426697c6c2111151d9f246899bc638" dependencies = [ "anyhow", "fontdb 0.16.2", @@ -10489,17 +10703,35 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" -[[package]] -name = "multimap" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" - [[package]] name = "naga" version = "28.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "618f667225063219ddfc61251087db8a9aec3c3f0950c916b614e403486f1135" +dependencies = [ + "arrayvec", + "bit-set", + "bitflags 2.10.0", + "cfg-if", + "cfg_aliases 0.2.1", + "codespan-reporting 0.12.0", + "half", + "hashbrown 0.16.1", + "hexf-parse", + "indexmap", + "libm", + "log", + "num-traits", + "once_cell", + "rustc-hash 1.1.0", + "thiserror 2.0.17", + "unicode-ident", +] + +[[package]] +name = "naga" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" dependencies = [ "arrayvec", "bit-set", @@ -10558,9 +10790,9 @@ dependencies = [ [[package]] name = "nbformat" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b10a89a2d910233ec3fca4de359b16ebe95e833c8b2162643ef98c6053a0549d" +checksum = "d4983a40792c45e8639f77ef8e4461c55679cbc618f4b9e83830e8c7e79c8383" dependencies = [ "anyhow", "chrono", @@ -10661,7 +10893,6 @@ dependencies = [ "cfg-if", "cfg_aliases 0.2.1", "libc", - "memoffset", ] [[package]] @@ -10847,6 +11078,22 @@ dependencies = [ "num-iter", "num-traits", "rand 0.8.5", + "smallvec", + "zeroize", +] + +[[package]] +name = "num-bigint-dig" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7f9a86e097b0d187ad0e65667c2f58b9254671e86e7dbb78036b16692eae099" +dependencies = [ + "libm", + "num-integer", + "num-iter", + "num-traits", + "once_cell", + "rand 0.9.2", "serde", "smallvec", "zeroize", @@ -11220,15 +11467,15 @@ checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" [[package]] name = "oo7" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3299dd401feaf1d45afd8fd1c0586f10fcfb22f244bb9afa942cec73503b89d" +checksum = "78f2bfed90f1618b4b48dcad9307f25e14ae894e2949642c87c351601d62cebd" dependencies = [ "aes", "ashpd", "async-fs", "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "blocking", "cbc", "cipher", @@ -11236,15 +11483,15 @@ dependencies = [ "endi", "futures-lite 2.6.1", "futures-util", - "getrandom 0.3.4", + "getrandom 0.4.1", "hkdf", "hmac", "md-5", "num", - "num-bigint-dig", + "num-bigint-dig 0.9.1", "pbkdf2 0.12.2", - "rand 0.9.2", "serde", + "serde_bytes", "sha2", "subtle", "zbus", @@ -12224,7 +12471,7 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ - "fixedbitset", + "fixedbitset 0.4.2", "indexmap", ] @@ -12336,14 +12583,12 @@ name = "picker" version = "0.1.0" dependencies = [ "anyhow", - "ctor", "editor", - "env_logger 0.11.8", "gpui", "menu", "schemars", "serde", - "serde_json", + "settings", "theme", "ui", "ui_input", @@ -12449,6 +12694,7 @@ version = "0.1.0" dependencies = [ "feature_flags", "gpui", + "project", "settings", "smallvec", "theme", @@ -12545,6 +12791,12 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5da3b0203fd7ee5720aa0b5e790b591aa5d3f41c3ed2c34a3a393382198af2f7" +[[package]] +name = "pollster" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f3a9f18d041e6d0e102a0a46750538147e5e8992d3b4873aaafee2520b00ce3" + [[package]] name = "pori" version = "0.0.0" @@ -12602,7 +12854,7 @@ dependencies = [ "log", "parking_lot", "pin-project", - "pollster", + "pollster 0.2.5", "static_assertions", "thiserror 1.0.69", ] @@ -13046,7 +13298,7 @@ dependencies = [ "itertools 0.10.5", "lazy_static", "log", - "multimap 0.8.3", + "multimap", "petgraph", "prost 0.9.0", "prost-types 0.9.0", @@ -13065,7 +13317,7 @@ dependencies = [ "heck 0.5.0", "itertools 0.12.1", "log", - "multimap 0.10.1", + "multimap", "once_cell", "petgraph", "prettyplease", @@ -13155,10 +13407,11 @@ dependencies = [ [[package]] name = "psm" -version = "0.1.27" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e66fcd288453b748497d8fb18bccc83a16b0518e3906d4b8df0a8d42d93dbb1c" +checksum = "3852766467df634d74f0b2d7819bf8dc483a0eb2e3b0f50f756f9cfe8b0d18d8" dependencies = [ + "ar_archive_writer", "cc", ] @@ -13550,7 +13803,7 @@ dependencies = [ "rand 0.8.5", "rand_chacha 0.3.1", "simd_helpers", - "system-deps", + "system-deps 6.2.2", "thiserror 1.0.69", "v_frame", "wasm-bindgen", @@ -14031,6 +14284,7 @@ dependencies = [ "serde", "serde_json", "settings", + "shlex", "smol", "telemetry", "terminal", @@ -14061,7 +14315,6 @@ dependencies = [ "http 0.2.12", "http-body 0.4.6", "hyper 0.14.32", - "hyper-rustls 0.24.2", "hyper-tls", "ipnet", "js-sys", @@ -14071,8 +14324,6 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.12", - "rustls-native-certs 0.6.3", "rustls-pemfile 1.0.4", "serde", "serde_json", @@ -14081,7 +14332,6 @@ dependencies = [ "system-configuration 0.5.1", "tokio", "tokio-native-tls", - "tokio-rustls 0.24.1", "tower-service", "url", "wasm-bindgen", @@ -14105,16 +14355,22 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "hyper 1.7.0", + "hyper-rustls 0.27.7", "hyper-util", "js-sys", "log", "percent-encoding", "pin-project-lite", + "quinn", + "rustls 0.23.33", + "rustls-native-certs 0.8.2", + "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper 1.0.2", "tokio", + "tokio-rustls 0.26.2", "tower 0.5.2", "tower-http 0.6.6", "tower-service", @@ -14132,13 +14388,13 @@ dependencies = [ "bytes 1.11.1", "futures 0.3.31", "gpui", + "gpui_util", "http_client", "http_client_tls", "log", "regex", "serde", "tokio", - "util", "zed-reqwest", ] @@ -14338,7 +14594,7 @@ checksum = "b8573f03f5883dcaebdfcf4725caa1ecb9c15b2ef50c43a07b816e06799bb12d" dependencies = [ "const-oid", "digest", - "num-bigint-dig", + "num-bigint-dig 0.8.6", "num-integer", "num-traits", "pkcs1", @@ -14395,9 +14651,9 @@ dependencies = [ [[package]] name = "runtimelib" -version = "1.2.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d80685459e1e5fa5603182058351ae91c98ca458dfef4e85f0a37be4f7cf1e6c" +checksum = "fa84884e45ed4a1e663120cef3fc11f14d1a2a1933776e1c31599f7bd2dd0c9e" dependencies = [ "async-dispatcher", "async-std", @@ -14772,6 +15028,7 @@ dependencies = [ "futures 0.3.31", "parking_lot", "rand 0.9.2", + "web-time", ] [[package]] @@ -15101,6 +15358,16 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde_bytes" +version = "0.11.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5d440709e79d88e51ac01c4b72fc6cb7314017bb7da9eeff678aa94c10e3ea8" +dependencies = [ + "serde", + "serde_core", +] + [[package]] name = "serde_core" version = "1.0.228" @@ -15292,7 +15559,7 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "derive_more 0.99.20", + "derive_more", "gpui", "log", "schemars", @@ -15511,22 +15778,26 @@ name = "sidebar" version = "0.1.0" dependencies = [ "acp_thread", + "agent", + "agent-client-protocol", "agent_ui", + "assistant_text_thread", "chrono", "editor", "feature_flags", "fs", - "fuzzy", "gpui", - "picker", + "language_model", + "menu", "project", "recent_projects", + "serde_json", "settings", "theme", "ui", - "ui_input", "util", "workspace", + "zed_actions", ] [[package]] @@ -15711,7 +15982,7 @@ dependencies = [ "async-executor", "async-fs", "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "async-net", "async-process", "blocking", @@ -16106,9 +16377,9 @@ checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "stacker" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1f8b29fb42aafcea4edeeb6b2f2d7ecd0d969c48b4cf0d2e64aafc471dd6e59" +checksum = "08d74a23609d509411d10e2176dc2a4346e3b4aea2e7b1869f19fdedbc71c013" dependencies = [ "cc", "cfg-if", @@ -16301,49 +16572,6 @@ dependencies = [ "ztracing", ] -[[package]] -name = "supermaven" -version = "0.1.0" -dependencies = [ - "anyhow", - "client", - "collections", - "edit_prediction_types", - "editor", - "env_logger 0.11.8", - "futures 0.3.31", - "gpui", - "http_client", - "language", - "log", - "postage", - "project", - "serde", - "serde_json", - "settings", - "smol", - "supermaven_api", - "text", - "theme", - "ui", - "unicode-segmentation", - "util", -] - -[[package]] -name = "supermaven_api" -version = "0.1.0" -dependencies = [ - "anyhow", - "futures 0.3.31", - "http_client", - "paths", - "serde", - "serde_json", - "smol", - "util", -] - [[package]] name = "sval" version = "2.15.0" @@ -16779,13 +17007,26 @@ version = "6.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3e535eb8dded36d55ec13eddacd30dec501792ff23a0b1682c38601b8cf2349" dependencies = [ - "cfg-expr", + "cfg-expr 0.15.8", "heck 0.5.0", "pkg-config", "toml 0.8.23", "version-compare", ] +[[package]] +name = "system-deps" +version = "7.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c8f33736f986f16d69b6cb8b03f55ddcad5c41acc4ccc39dd88e84aa805e7f" +dependencies = [ + "cfg-expr 0.20.6", + "heck 0.5.0", + "pkg-config", + "toml 0.9.8", + "version-compare", +] + [[package]] name = "system-interface" version = "0.27.3" @@ -17105,7 +17346,7 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "derive_more 0.99.20", + "derive_more", "fs", "futures 0.3.31", "gpui", @@ -17292,6 +17533,7 @@ dependencies = [ "core-foundation-sys", "sys-locale", "time", + "windows 0.61.3", ] [[package]] @@ -17537,17 +17779,18 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.26.2" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084" +checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" dependencies = [ "futures-util", "log", "rustls 0.23.33", + "rustls-native-certs 0.8.2", "rustls-pki-types", "tokio", "tokio-rustls 0.26.2", - "tungstenite 0.26.2", + "tungstenite 0.28.0", ] [[package]] @@ -18203,9 +18446,9 @@ dependencies = [ [[package]] name = "tungstenite" -version = "0.26.2" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" +checksum = "eadc29d668c91fcc564941132e17b28a7ceb2f3ebf0b9dae3e03fd7a6748eb0d" dependencies = [ "bytes 1.11.1", "data-encoding", @@ -18222,9 +18465,9 @@ dependencies = [ [[package]] name = "tungstenite" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadc29d668c91fcc564941132e17b28a7ceb2f3ebf0b9dae3e03fd7a6748eb0d" +checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" dependencies = [ "bytes 1.11.1", "data-encoding", @@ -18564,6 +18807,7 @@ dependencies = [ "futures-lite 1.13.0", "git2", "globset", + "gpui_util", "indoc", "itertools 0.14.0", "libc", @@ -18883,6 +19127,15 @@ dependencies = [ "wit-bindgen 0.46.0", ] +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen 0.51.0", +] + [[package]] name = "wasite" version = "0.1.0" @@ -18891,9 +19144,9 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.104" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1da10c01ae9f1ae40cbfac0bac3b1e724b320abfcf52229f80b547c0d250e2d" +checksum = "60722a937f594b7fde9adb894d7c092fc1bb6612897c46368d18e7a20208eff2" dependencies = [ "cfg-if", "once_cell", @@ -18902,27 +19155,14 @@ dependencies = [ "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.104" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "671c9a5a66f49d8a47345ab942e2cb93c7d1d0339065d4f8139c486121b43b19" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn 2.0.106", - "wasm-bindgen-shared", -] - [[package]] name = "wasm-bindgen-futures" -version = "0.4.54" +version = "0.4.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e038d41e478cc73bae0ff9b36c60cff1c98b8f38f8d7e8061e79ee63608ac5c" +checksum = "8a89f4650b770e4521aa6573724e2aed4704372151bd0de9d16a3bbabb87441a" dependencies = [ "cfg-if", + "futures-util", "js-sys", "once_cell", "wasm-bindgen", @@ -18931,9 +19171,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.104" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ca60477e4c59f5f2986c50191cd972e3a50d8a95603bc9434501cf156a9a119" +checksum = "0fac8c6395094b6b91c4af293f4c79371c163f9a6f56184d2c9a85f5a95f3950" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -18941,22 +19181,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.104" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f07d2f20d4da7b26400c9f4a0511e6e0345b040694e8a75bd41d578fa4421d7" +checksum = "ab3fabce6159dc20728033842636887e4877688ae94382766e00b180abac9d60" dependencies = [ + "bumpalo", "proc-macro2", "quote", "syn 2.0.106", - "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.104" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bad67dc8b2a1a6e5448428adec4c3e84c43e561d8c9ee8a9e5aabeb193ec41d1" +checksum = "de0e091bdb824da87dc01d967388880d017a0a9bc4f3bdc0d86ee9f9336e3bb5" dependencies = [ "unicode-ident", ] @@ -19000,6 +19240,16 @@ dependencies = [ "wasmparser 0.229.0", ] +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser 0.244.0", +] + [[package]] name = "wasm-metadata" version = "0.201.0" @@ -19035,6 +19285,18 @@ dependencies = [ "wasmparser 0.227.1", ] +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder 0.244.0", + "wasmparser 0.244.0", +] + [[package]] name = "wasm-streams" version = "0.4.2" @@ -19048,6 +19310,18 @@ dependencies = [ "web-sys", ] +[[package]] +name = "wasm_thread" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7516db7f32decdadb1c3b8deb1b7d78b9df7606c5cc2f6241737c2ab3a0258e" +dependencies = [ + "futures 0.3.31", + "js-sys", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "wasmparser" version = "0.201.0" @@ -19097,6 +19371,18 @@ dependencies = [ "serde", ] +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags 2.10.0", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + [[package]] name = "wasmprinter" version = "0.229.0" @@ -19525,9 +19811,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.81" +version = "0.3.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9367c417a924a74cae129e6a2ae3b47fabb1f8995595ab474029da749a8be120" +checksum = "705eceb4ce901230f8625bd1d665128056ccbe4b7408faa625eec1ba80f59a97" dependencies = [ "js-sys", "wasm-bindgen", @@ -19572,6 +19858,7 @@ version = "0.1.0" dependencies = [ "anyhow", "client", + "cloud_api_types", "cloud_llm_client", "futures 0.3.31", "gpui", @@ -19602,25 +19889,27 @@ dependencies = [ [[package]] name = "webrtc-sys" -version = "0.3.7" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.3.23" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ "cc", "cxx", "cxx-build", "glob", "log", + "pkg-config", "webrtc-sys-build", ] [[package]] name = "webrtc-sys-build" -version = "0.3.6" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.3.13" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ + "anyhow", "fs2", "regex", - "reqwest 0.11.27", + "reqwest 0.12.24", "scratch", "semver", "zip 0.6.6", @@ -19634,9 +19923,8 @@ checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3" [[package]] name = "wgpu" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9cb534d5ffd109c7d1135f34cdae29e60eab94855a625dcfe1705f8bc7ad79f" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" dependencies = [ "arrayvec", "bitflags 2.10.0", @@ -19647,7 +19935,7 @@ dependencies = [ "hashbrown 0.16.1", "js-sys", "log", - "naga", + "naga 28.0.1", "parking_lot", "portable-atomic", "profiling", @@ -19664,9 +19952,8 @@ dependencies = [ [[package]] name = "wgpu-core" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bb4c8b5db5f00e56f1f08869d870a0dff7c8bc7ebc01091fec140b0cf0211a9" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" dependencies = [ "arrayvec", "bit-set", @@ -19678,7 +19965,7 @@ dependencies = [ "hashbrown 0.16.1", "indexmap", "log", - "naga", + "naga 28.0.1", "once_cell", "parking_lot", "portable-atomic", @@ -19696,36 +19983,32 @@ dependencies = [ [[package]] name = "wgpu-core-deps-apple" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87b7b696b918f337c486bf93142454080a32a37832ba8a31e4f48221890047da" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" dependencies = [ "wgpu-hal", ] [[package]] name = "wgpu-core-deps-emscripten" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34b251c331f84feac147de3c4aa3aa45112622a95dd7ee1b74384fa0458dbd79" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" dependencies = [ "wgpu-hal", ] [[package]] name = "wgpu-core-deps-windows-linux-android" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68ca976e72b2c9964eb243e281f6ce7f14a514e409920920dcda12ae40febaae" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" dependencies = [ "wgpu-hal", ] [[package]] name = "wgpu-hal" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "293080d77fdd14d6b08a67c5487dfddbf874534bb7921526db56a7b75d7e3bef" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" dependencies = [ "android_system_properties", "arrayvec", @@ -19748,7 +20031,7 @@ dependencies = [ "libloading", "log", "metal", - "naga", + "naga 28.0.1", "ndk-sys", "objc", "once_cell", @@ -19771,9 +20054,8 @@ dependencies = [ [[package]] name = "wgpu-types" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e18308757e594ed2cd27dddbb16a139c42a683819d32a2e0b1b0167552f5840c" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c" dependencies = [ "bitflags 2.10.0", "bytemuck", @@ -20701,6 +20983,15 @@ version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro 0.51.0", +] + [[package]] name = "wit-bindgen-core" version = "0.22.0" @@ -20722,6 +21013,17 @@ dependencies = [ "wit-parser 0.227.1", ] +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck 0.5.0", + "wit-parser 0.244.0", +] + [[package]] name = "wit-bindgen-rt" version = "0.22.0" @@ -20769,6 +21071,22 @@ dependencies = [ "wit-component 0.227.1", ] +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck 0.5.0", + "indexmap", + "prettyplease", + "syn 2.0.106", + "wasm-metadata 0.244.0", + "wit-bindgen-core 0.51.0", + "wit-component 0.244.0", +] + [[package]] name = "wit-bindgen-rust-macro" version = "0.22.0" @@ -20798,6 +21116,21 @@ dependencies = [ "wit-bindgen-rust 0.41.0", ] +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn 2.0.106", + "wit-bindgen-core 0.51.0", + "wit-bindgen-rust 0.51.0", +] + [[package]] name = "wit-component" version = "0.201.0" @@ -20836,6 +21169,25 @@ dependencies = [ "wit-parser 0.227.1", ] +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags 2.10.0", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder 0.244.0", + "wasm-metadata 0.244.0", + "wasmparser 0.244.0", + "wit-parser 0.244.0", +] + [[package]] name = "wit-parser" version = "0.201.0" @@ -20890,6 +21242,24 @@ dependencies = [ "wasmparser 0.229.0", ] +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser 0.244.0", +] + [[package]] name = "witx" version = "0.9.1" @@ -20909,7 +21279,6 @@ dependencies = [ "any_vec", "anyhow", "async-recursion", - "call", "chrono", "client", "clock", @@ -21174,6 +21543,7 @@ checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9" name = "xtask" version = "0.1.0" dependencies = [ + "annotate-snippets", "anyhow", "backtrace", "cargo_metadata", @@ -21182,8 +21552,12 @@ dependencies = [ "gh-workflow", "indexmap", "indoc", + "itertools 0.14.0", + "regex", "serde", "serde_json", + "serde_yaml", + "strum 0.27.2", "toml 0.8.23", "toml_edit 0.22.27", ] @@ -21301,14 +21675,14 @@ dependencies = [ [[package]] name = "zbus" -version = "5.12.0" +version = "5.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b622b18155f7a93d1cd2dc8c01d2d6a44e08fb9ebb7b3f9e6ed101488bad6c91" +checksum = "1bfeff997a0aaa3eb20c4652baf788d2dfa6d2839a0ead0b3ff69ce2f9c4bdd1" dependencies = [ "async-broadcast", "async-executor", "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "async-process", "async-recursion", "async-task", @@ -21319,8 +21693,9 @@ dependencies = [ "futures-core", "futures-lite 2.6.1", "hex", - "nix 0.30.1", + "libc", "ordered-stream", + "rustix 1.1.2", "serde", "serde_repr", "tracing", @@ -21335,9 +21710,9 @@ dependencies = [ [[package]] name = "zbus_macros" -version = "5.12.0" +version = "5.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cdb94821ca8a87ca9c298b5d1cbd80e2a8b67115d99f6e4551ac49e42b6a314" +checksum = "0bbd5a90dbe8feee5b13def448427ae314ccd26a49cac47905cafefb9ff846f1" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -21350,19 +21725,18 @@ dependencies = [ [[package]] name = "zbus_names" -version = "4.2.0" +version = "4.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7be68e64bf6ce8db94f63e72f0c7eb9a60d733f7e0499e628dfab0f84d6bcb97" +checksum = "ffd8af6d5b78619bab301ff3c560a5bd22426150253db278f164d6cf3b72c50f" dependencies = [ "serde", - "static_assertions", "winnow", "zvariant", ] [[package]] name = "zed" -version = "0.226.0" +version = "0.228.0" dependencies = [ "acp_thread", "acp_tools", @@ -21380,7 +21754,6 @@ dependencies = [ "audio", "auto_update", "auto_update_ui", - "bincode", "breadcrumbs", "call", "channel", @@ -21399,6 +21772,7 @@ dependencies = [ "copilot_chat", "copilot_ui", "crashes", + "csv_preview", "dap", "dap_adapters", "db", @@ -21463,6 +21837,7 @@ dependencies = [ "parking_lot", "paths", "picker", + "pkg-config", "pretty_assertions", "profiling", "project", @@ -21490,7 +21865,6 @@ dependencies = [ "smol", "snippet_provider", "snippets_ui", - "supermaven", "svg_preview", "sysinfo 0.37.2", "system_specs", @@ -21979,14 +22353,14 @@ dependencies = [ [[package]] name = "zvariant" -version = "5.8.0" +version = "5.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2be61892e4f2b1772727be11630a62664a1826b62efa43a6fe7449521cb8744c" +checksum = "68b64ef4f40c7951337ddc7023dd03528a57a3ce3408ee9da5e948bd29b232c4" dependencies = [ "endi", "enumflags2", "serde", - "url", + "serde_bytes", "winnow", "zvariant_derive", "zvariant_utils", @@ -21994,9 +22368,9 @@ dependencies = [ [[package]] name = "zvariant_derive" -version = "5.8.0" +version = "5.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da58575a1b2b20766513b1ec59d8e2e68db2745379f961f86650655e862d2006" +checksum = "484d5d975eb7afb52cc6b929c13d3719a20ad650fea4120e6310de3fc55e415c" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -22007,9 +22381,9 @@ dependencies = [ [[package]] name = "zvariant_utils" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6949d142f89f6916deca2232cf26a8afacf2b9fdc35ce766105e104478be599" +checksum = "f75c23a64ef8f40f13a6989991e643554d9bef1d682a281160cf0c1bc389c5e9" dependencies = [ "proc-macro2", "quote", diff --git a/Cargo.toml b/Cargo.toml index 49b765c512accc3a19662da41520061479b8cc44..b8e57bda7e46ea45451fedd6759268235c7d71ab 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,8 +1,8 @@ [workspace] resolver = "2" members = [ - "crates/acp_tools", "crates/acp_thread", + "crates/acp_tools", "crates/action_log", "crates/activity_indicator", "crates/agent", @@ -13,9 +13,9 @@ members = [ "crates/anthropic", "crates/askpass", "crates/assets", - "crates/assistant_text_thread", "crates/assistant_slash_command", "crates/assistant_slash_commands", + "crates/assistant_text_thread", "crates/audio", "crates/auto_update", "crates/auto_update_helper", @@ -32,6 +32,7 @@ members = [ "crates/cloud_api_client", "crates/cloud_api_types", "crates/cloud_llm_client", + "crates/codestral", "crates/collab", "crates/collab_ui", "crates/collections", @@ -44,6 +45,7 @@ members = [ "crates/copilot_chat", "crates/crashes", "crates/credentials_provider", + "crates/csv_preview", "crates/dap", "crates/dap_adapters", "crates/db", @@ -56,9 +58,10 @@ members = [ "crates/diagnostics", "crates/docs_preprocessor", "crates/edit_prediction", + "crates/edit_prediction_cli", + "crates/edit_prediction_context", "crates/edit_prediction_types", "crates/edit_prediction_ui", - "crates/edit_prediction_context", "crates/editor", "crates/encoding_selector", "crates/etw_tracing", @@ -88,9 +91,11 @@ members = [ "crates/gpui_macos", "crates/gpui_macros", "crates/gpui_platform", + "crates/gpui_tokio", + "crates/gpui_util", + "crates/gpui_web", "crates/gpui_wgpu", "crates/gpui_windows", - "crates/gpui_tokio", "crates/html_to_markdown", "crates/http_client", "crates/http_client_tls", @@ -119,8 +124,8 @@ members = [ "crates/media", "crates/menu", "crates/migrator", - "crates/mistral", "crates/miniprofiler_ui", + "crates/mistral", "crates/multi_buffer", "crates/nc", "crates/net", @@ -136,6 +141,7 @@ members = [ "crates/panel", "crates/paths", "crates/picker", + "crates/platform_title_bar", "crates/prettier", "crates/project", "crates/project_benchmarks", @@ -147,7 +153,6 @@ members = [ "crates/refineable", "crates/refineable/derive_refineable", "crates/release_channel", - "crates/scheduler", "crates/remote", "crates/remote_connection", "crates/remote_server", @@ -157,10 +162,10 @@ members = [ "crates/rope", "crates/rpc", "crates/rules_library", + "crates/scheduler", "crates/schema_generator", "crates/search", "crates/session", - "crates/sidebar", "crates/settings", "crates/settings_content", "crates/settings_json", @@ -168,6 +173,7 @@ members = [ "crates/settings_profile_selector", "crates/settings_ui", "crates/shell_command_parser", + "crates/sidebar", "crates/snippet", "crates/snippet_provider", "crates/snippets_ui", @@ -177,9 +183,6 @@ members = [ "crates/storybook", "crates/streaming_diff", "crates/sum_tree", - "crates/supermaven", - "crates/supermaven_api", - "crates/codestral", "crates/svg_preview", "crates/system_specs", "crates/tab_switcher", @@ -195,7 +198,6 @@ members = [ "crates/theme_importer", "crates/theme_selector", "crates/time_format", - "crates/platform_title_bar", "crates/title_bar", "crates/toolchain_selector", "crates/ui", @@ -207,10 +209,10 @@ members = [ "crates/vercel", "crates/vim", "crates/vim_mode_setting", - "crates/which_key", "crates/watch", "crates/web_search", "crates/web_search_providers", + "crates/which_key", "crates/workspace", "crates/worktree", "crates/worktree_benchmarks", @@ -218,7 +220,6 @@ members = [ "crates/zed", "crates/zed_actions", "crates/zed_env_vars", - "crates/edit_prediction_cli", "crates/zeta_prompt", "crates/zlog", "crates/zlog_settings", @@ -298,6 +299,7 @@ copilot_ui = { path = "crates/copilot_ui" } crashes = { path = "crates/crashes" } credentials_provider = { path = "crates/credentials_provider" } crossbeam = "0.8.4" +csv_preview = { path = "crates/csv_preview"} dap = { path = "crates/dap" } dap_adapters = { path = "crates/dap_adapters" } db = { path = "crates/db" } @@ -332,9 +334,11 @@ gpui_linux = { path = "crates/gpui_linux", default-features = false } gpui_macos = { path = "crates/gpui_macos", default-features = false } gpui_macros = { path = "crates/gpui_macros" } gpui_platform = { path = "crates/gpui_platform", default-features = false } +gpui_web = { path = "crates/gpui_web" } gpui_wgpu = { path = "crates/gpui_wgpu" } gpui_windows = { path = "crates/gpui_windows", default-features = false } gpui_tokio = { path = "crates/gpui_tokio" } +gpui_util = { path = "crates/gpui_util" } html_to_markdown = { path = "crates/html_to_markdown" } http_client = { path = "crates/http_client" } http_client_tls = { path = "crates/http_client_tls" } @@ -366,7 +370,7 @@ markdown_preview = { path = "crates/markdown_preview" } svg_preview = { path = "crates/svg_preview" } media = { path = "crates/media" } menu = { path = "crates/menu" } -mermaid-rs-renderer = { git = "https://github.com/zed-industries/mermaid-rs-renderer", branch = "fix-font-family-xml-escaping", default-features = false } +mermaid-rs-renderer = { git = "https://github.com/zed-industries/mermaid-rs-renderer", rev = "374db9ead5426697c6c2111151d9f246899bc638", default-features = false } migrator = { path = "crates/migrator" } mistral = { path = "crates/mistral" } multi_buffer = { path = "crates/multi_buffer" } @@ -423,8 +427,6 @@ sqlez_macros = { path = "crates/sqlez_macros" } story = { path = "crates/story" } streaming_diff = { path = "crates/streaming_diff" } sum_tree = { path = "crates/sum_tree" } -supermaven = { path = "crates/supermaven" } -supermaven_api = { path = "crates/supermaven_api" } codestral = { path = "crates/codestral" } system_specs = { path = "crates/system_specs" } tab_switcher = { path = "crates/tab_switcher" } @@ -479,9 +481,15 @@ alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev any_vec = "0.14" anyhow = "1.0.86" arrayvec = { version = "0.7.4", features = ["serde"] } -ashpd = { version = "0.12.1", default-features = false, features = [ - "async-std", +ashpd = { version = "0.13", default-features = false, features = [ + "async-io", + "notification", + "open_uri", + "file_chooser", + "settings", + "trash" ] } +async-channel = "2.5.0" async-compat = "0.2.1" async-compression = { version = "0.4", features = ["gzip", "futures-io"] } async-dispatcher = "0.1" @@ -530,7 +538,16 @@ criterion = { version = "0.5", features = ["html_reports"] } ctor = "0.4.0" dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "1b461b310481d01e02b2603c16d7144b926339f8" } dashmap = "6.0" -derive_more = "0.99.17" +derive_more = { version = "2.1.1", features = [ + "add", + "add_assign", + "deref", + "deref_mut", + "from_str", + "mul", + "mul_assign", + "not", +] } dirs = "4.0" documented = "0.9.1" dotenvy = "0.15.0" @@ -542,6 +559,7 @@ exec = "0.3.1" fancy-regex = "0.16.0" fork = "0.4.0" futures = "0.3" +futures-concurrency = "7.7.1" futures-lite = "1.13" gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "c9eac0ed361583e1072860d96776fa52775b82ac" } git2 = { version = "0.20.1", default-features = false, features = ["vendored-libgit2"] } @@ -565,11 +583,13 @@ itertools = "0.14.0" json_dotpath = "1.1" jsonschema = "0.37.0" jsonwebtoken = "10.0" -jupyter-protocol = "1.2.0" +jupyter-protocol = "1.4.0" jupyter-websocket-client = "1.0.0" libc = "0.2" libsqlite3-sys = { version = "0.30.1", features = ["bundled"] } linkify = "0.10.0" +libwebrtc = "0.3.26" +livekit = { version = "0.7.32", features = ["tokio", "rustls-tls-native-roots"] } log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] } lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "a4f410987660bf560d1e617cb78117c6b6b9f599" } mach2 = "0.5" @@ -579,7 +599,7 @@ minidumper = "0.8" moka = { version = "0.12.10", features = ["sync"] } naga = { version = "28.0", features = ["wgsl-in"] } nanoid = "0.4" -nbformat = "1.1.0" +nbformat = "1.2.0" nix = "0.29" num-format = "0.4.4" objc = "0.2" @@ -632,6 +652,7 @@ profiling = "1" prost = "0.9" prost-build = "0.9" prost-types = "0.9" +pollster = "0.4.0" pulldown-cmark = { version = "0.13.0", default-features = false } quote = "1.0.9" rand = "0.9" @@ -648,7 +669,7 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "c15662 "stream", ], package = "zed-reqwest", version = "0.12.15-zed" } rsa = "0.9.6" -runtimelib = { version = "1.2.0", default-features = false, features = [ +runtimelib = { version = "1.4.0", default-features = false, features = [ "async-dispatcher-runtime", "aws-lc-rs" ] } rust-embed = { version = "8.4", features = ["include-exclude"] } @@ -756,7 +777,9 @@ wasmtime = { version = "33", default-features = false, features = [ wasmtime-wasi = "33" wax = "0.7" which = "6.0.0" -wgpu = "28.0" +wasm-bindgen = "0.2.113" +web-time = "1.1.0" +wgpu = { git = "https://github.com/zed-industries/wgpu", rev = "9459e95113c5bd116b2cc2c87e8424b28059e17c" } windows-core = "0.61" yawc = "0.2.5" zeroize = "1.8" @@ -767,11 +790,13 @@ zstd = "0.11" version = "0.61" features = [ "Foundation_Numerics", + "Globalization_DateTimeFormatting", "Storage_Search", "Storage_Streams", "System_Threading", "UI_ViewManagement", "Wdk_System_SystemServices", + "Win32_Foundation", "Win32_Globalization", "Win32_Graphics_Direct3D", "Win32_Graphics_Direct3D11", @@ -799,6 +824,7 @@ features = [ "Win32_System_Ole", "Win32_System_Performance", "Win32_System_Pipes", + "Win32_System_RestartManager", "Win32_System_SystemInformation", "Win32_System_SystemServices", "Win32_System_Threading", @@ -821,6 +847,8 @@ notify = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24c notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24cad542c28e04ced02e20325a4ec28a31d" } windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" } calloop = { git = "https://github.com/zed-industries/calloop" } +livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "37835f840d0070d45ac8b31cce6a6ae7aca3f459" } +libwebrtc = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "37835f840d0070d45ac8b31cce6a6ae7aca3f459" } [profile.dev] split-debuginfo = "unpacked" @@ -880,7 +908,6 @@ sidebar = { codegen-units = 1 } snippet = { codegen-units = 1 } snippets_ui = { codegen-units = 1 } story = { codegen-units = 1 } -supermaven_api = { codegen-units = 1 } telemetry_events = { codegen-units = 1 } theme_selector = { codegen-units = 1 } time_format = { codegen-units = 1 } diff --git a/assets/icons/ai_vercel.svg b/assets/icons/ai_vercel.svg new file mode 100644 index 0000000000000000000000000000000000000000..c6cc5796f724e713437c4866053380cf2e14d511 --- /dev/null +++ b/assets/icons/ai_vercel.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/fast_forward.svg b/assets/icons/fast_forward.svg new file mode 100644 index 0000000000000000000000000000000000000000..240bc65aca3558561bb52f2f8c5e860d38596223 --- /dev/null +++ b/assets/icons/fast_forward.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/fast_forward_off.svg b/assets/icons/fast_forward_off.svg new file mode 100644 index 0000000000000000000000000000000000000000..8ea7c41c6582b031f066f590dd425641945aadc9 --- /dev/null +++ b/assets/icons/fast_forward_off.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_icons/gitlab.svg b/assets/icons/file_icons/gitlab.svg new file mode 100644 index 0000000000000000000000000000000000000000..f0faf570b125c7764e769ae60f7a6ce6f7825ceb --- /dev/null +++ b/assets/icons/file_icons/gitlab.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/file_icons/helm.svg b/assets/icons/file_icons/helm.svg new file mode 100644 index 0000000000000000000000000000000000000000..03e702f2d5081c4e96ff4db7ba7428817b08748f --- /dev/null +++ b/assets/icons/file_icons/helm.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/file_icons/yaml.svg b/assets/icons/file_icons/yaml.svg new file mode 100644 index 0000000000000000000000000000000000000000..2c3efd46cd45ff67d6c46d84476d563dd5ac3a73 --- /dev/null +++ b/assets/icons/file_icons/yaml.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/git_commit.svg b/assets/icons/git_commit.svg new file mode 100644 index 0000000000000000000000000000000000000000..38b36ec7efb72275e5e6efbbe761deb54050cfe7 --- /dev/null +++ b/assets/icons/git_commit.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/git_graph.svg b/assets/icons/git_graph.svg index 8f372a305d3fddf2901756108c83d09b31fb657e..7ae33e365d40bfccd9c48e4f7e94b10d3687f8dc 100644 --- a/assets/icons/git_graph.svg +++ b/assets/icons/git_graph.svg @@ -1,4 +1,7 @@ - - + + + + + diff --git a/assets/icons/new_thread.svg b/assets/icons/new_thread.svg new file mode 100644 index 0000000000000000000000000000000000000000..19b8fa25ea30ed47a57a5d5f83d62f2b4b56b61e --- /dev/null +++ b/assets/icons/new_thread.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/open_folder.svg b/assets/icons/open_folder.svg new file mode 100644 index 0000000000000000000000000000000000000000..c4aa32b29cc1048fd4ecd8b1b4d32b68ae0a8ad3 --- /dev/null +++ b/assets/icons/open_folder.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/queue_message.svg b/assets/icons/queue_message.svg new file mode 100644 index 0000000000000000000000000000000000000000..1bdf6738bcf3143fc13a820281cf1cab8531bd36 --- /dev/null +++ b/assets/icons/queue_message.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index f3247e936f2b6d2d5ee5275304ea445729046afa..0b354ef1c039c2fe7dde2f20bb30ef71f067e84d 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -204,6 +204,7 @@ { "context": "Editor && editor_agent_diff", "bindings": { + "alt-y": "agent::Keep", "ctrl-alt-y": "agent::Keep", "ctrl-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", @@ -214,6 +215,7 @@ { "context": "AgentDiff", "bindings": { + "alt-y": "agent::Keep", "ctrl-alt-y": "agent::Keep", "ctrl-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", @@ -333,6 +335,7 @@ "ctrl-alt-k": "agent::ToggleThinkingMode", "ctrl-alt-'": "agent::ToggleThinkingEffortMenu", "ctrl-'": "agent::CycleThinkingEffort", + "ctrl-alt-.": "agent::ToggleFastMode", }, }, { @@ -670,6 +673,9 @@ "use_key_equivalents": true, "bindings": { "ctrl-n": "multi_workspace::NewWorkspaceInWindow", + "left": "agents_sidebar::CollapseSelectedEntry", + "right": "agents_sidebar::ExpandSelectedEntry", + "enter": "menu::Confirm", }, }, { @@ -1309,6 +1315,7 @@ "bindings": { "ctrl-shift-space": "git::WorktreeFromDefaultOnWindow", "ctrl-space": "git::WorktreeFromDefault", + "ctrl-shift-backspace": "git::DeleteWorktree", }, }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 77e01368462cdfcce24cf1cba39d6a2a11cdcce0..052475ddb981c4db5495914096ffd72dee54d80f 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -242,6 +242,7 @@ "context": "AgentDiff", "use_key_equivalents": true, "bindings": { + "cmd-y": "agent::Keep", "cmd-alt-y": "agent::Keep", "cmd-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", @@ -252,6 +253,7 @@ "context": "Editor && editor_agent_diff", "use_key_equivalents": true, "bindings": { + "cmd-y": "agent::Keep", "cmd-alt-y": "agent::Keep", "cmd-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", @@ -377,6 +379,7 @@ "cmd-alt-k": "agent::ToggleThinkingMode", "cmd-alt-'": "agent::ToggleThinkingEffortMenu", "ctrl-'": "agent::CycleThinkingEffort", + "cmd-alt-.": "agent::ToggleFastMode", }, }, { @@ -447,6 +450,13 @@ "down": "search::NextHistoryQuery", }, }, + { + "context": "BufferSearchBar || ProjectSearchBar", + "use_key_equivalents": true, + "bindings": { + "ctrl-enter": "editor::Newline", + }, + }, { "context": "ProjectSearchBar", "use_key_equivalents": true, @@ -731,6 +741,9 @@ "use_key_equivalents": true, "bindings": { "cmd-n": "multi_workspace::NewWorkspaceInWindow", + "left": "agents_sidebar::CollapseSelectedEntry", + "right": "agents_sidebar::ExpandSelectedEntry", + "enter": "menu::Confirm", }, }, { @@ -1407,6 +1420,7 @@ "bindings": { "ctrl-shift-space": "git::WorktreeFromDefaultOnWindow", "ctrl-space": "git::WorktreeFromDefault", + "cmd-shift-backspace": "git::DeleteWorktree", }, }, { diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 51b221c8389d1588d80a8186ddceb68e8cb025c7..ef2b339951382a44433372b34e7e62b082428362 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -203,6 +203,7 @@ "context": "Editor && editor_agent_diff", "use_key_equivalents": true, "bindings": { + "alt-y": "agent::Keep", "ctrl-alt-y": "agent::Keep", "ctrl-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", @@ -214,6 +215,7 @@ "context": "AgentDiff", "use_key_equivalents": true, "bindings": { + "alt-y": "agent::Keep", "ctrl-alt-y": "agent::Keep", "ctrl-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", @@ -335,6 +337,7 @@ "ctrl-alt-k": "agent::ToggleThinkingMode", "ctrl-alt-'": "agent::ToggleThinkingEffortMenu", "ctrl-'": "agent::CycleThinkingEffort", + "ctrl-alt-.": "agent::ToggleFastMode", }, }, { @@ -674,6 +677,9 @@ "use_key_equivalents": true, "bindings": { "ctrl-n": "multi_workspace::NewWorkspaceInWindow", + "left": "agents_sidebar::CollapseSelectedEntry", + "right": "agents_sidebar::ExpandSelectedEntry", + "enter": "menu::Confirm", }, }, { @@ -1330,6 +1336,7 @@ "bindings": { "ctrl-shift-space": "git::WorktreeFromDefaultOnWindow", "ctrl-space": "git::WorktreeFromDefault", + "ctrl-shift-backspace": "git::DeleteWorktree", }, }, { diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 9832ce8fe08fe23d610a1c2ee1a95ad4c2c2574c..1f2742f982bc2165181a797e577b350f5630def9 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -1110,4 +1110,12 @@ "shift-g": "menu::SelectLast", }, }, + { + "context": "NotebookEditor > Editor && VimControl && vim_mode == normal", + + "bindings": { + "j": "notebook::NotebookMoveDown", + "k": "notebook::NotebookMoveUp", + }, + }, ] diff --git a/assets/settings/default.json b/assets/settings/default.json index 0a57472a5f21657cab89bd3e6f64e259a4a220e6..0a824bbe93a0d68a23d934a63eb1fdab1e2f1b02 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -361,8 +361,11 @@ // bracket, brace, single or double quote characters. // For example, when you select text and type '(', Zed will surround the text with (). "use_auto_surround": true, - // Whether indentation should be adjusted based on the context whilst typing. - "auto_indent": true, + // Controls automatic indentation behavior when typing. + // - "syntax_aware": Adjusts indentation based on syntax context (default) + // - "preserve_indent": Preserves current line's indentation on new lines + // - "none": No automatic indentation + "auto_indent": "syntax_aware", // Whether indentation of pasted content should be adjusted based on the context. "auto_indent_on_paste": true, // Controls how the editor handles the autoclosed characters. @@ -799,6 +802,8 @@ // 3. Show files first, then directories: // "files_first" "sort_mode": "directories_first", + // Whether to show error and warning count badges next to file names in the project panel. + "diagnostic_badges": false, // Whether to enable drag-and-drop operations in the project panel. "drag_and_drop": true, // Whether to hide the root entry when only one folder is open in the window; @@ -913,6 +918,10 @@ // Default: inherits editor scrollbar settings // "show": null }, + // Whether to show the addition/deletion change count next to each file in the Git panel. + // + // Default: false + "diff_stats": false, }, "message_editor": { // Whether to automatically replace emoji shortcodes with emoji characters. @@ -1265,8 +1274,6 @@ // // Default: true "skip_focus_for_active_in_search": true, - // Whether to show the git status in the file finder. - "git_status": true, // Whether to use gitignored files when searching. // Only the file Zed had indexed will be used, not necessary all the gitignored files. // @@ -1827,8 +1834,8 @@ " (", " # multi-char path: first char (not opening delimiter, space, or box drawing char)", " [^({\\[<\"'`\\ \\u2500-\\u257F]", - " # middle chars: non-space, and colon/paren only if not followed by digit/paren", - " ([^\\ :(]|[:(][^0-9()])*", + " # middle chars: non-space, and colon/paren only if not followed by digit/paren/space", + " ([^\\ :(]|[:(][^0-9()\\ ])*", " # last char: not closing delimiter or colon", " [^()}\\]>\"'`.,;:\\ ]", " |", @@ -2222,6 +2229,9 @@ "vercel": { "api_url": "https://api.v0.dev/v1", }, + "vercel_ai_gateway": { + "api_url": "https://ai-gateway.vercel.sh/v1", + }, "x_ai": { "api_url": "https://api.x.ai/v1", }, diff --git a/assets/settings/default_semantic_token_rules.json b/assets/settings/default_semantic_token_rules.json index c5e9d1438cad583e78bc3e109b4bc79c62aa7ac5..65b20a7423aef3c3221f9f80e345fd503627d98d 100644 --- a/assets/settings/default_semantic_token_rules.json +++ b/assets/settings/default_semantic_token_rules.json @@ -2,7 +2,9 @@ // // These rules map LSP semantic token types to syntax theme styles. // To customize, add rules to "semantic_token_rules" in your settings.json. -// User-defined rules are prepended to these defaults and take precedence. +// User-defined rules are prepended and take highest precedence. +// Extension language rules are applied next. +// These built-in defaults are applied last. // // Each rule has the following properties: // - `token_type`: The LSP semantic token type to match. If omitted, matches all types. diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index fea3236e1697e3af189da2e6a0f14d70a6f1c6f6..1b9271918884dc020986577926d9578e3a6f049c 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -2,55 +2,23 @@ mod connection; mod diff; mod mention; mod terminal; - -/// Key used in ACP ToolCall meta to store the tool's programmatic name. -/// This is a workaround since ACP's ToolCall doesn't have a dedicated name field. -pub const TOOL_NAME_META_KEY: &str = "tool_name"; - -/// Key used in ACP ToolCall meta to store the session id when a subagent is spawned. -pub const SUBAGENT_SESSION_ID_META_KEY: &str = "subagent_session_id"; - -/// Helper to extract tool name from ACP meta -pub fn tool_name_from_meta(meta: &Option) -> Option { - meta.as_ref() - .and_then(|m| m.get(TOOL_NAME_META_KEY)) - .and_then(|v| v.as_str()) - .map(|s| SharedString::from(s.to_owned())) -} - -/// Helper to extract subagent session id from ACP meta -pub fn subagent_session_id_from_meta(meta: &Option) -> Option { - meta.as_ref() - .and_then(|m| m.get(SUBAGENT_SESSION_ID_META_KEY)) - .and_then(|v| v.as_str()) - .map(|s| acp::SessionId::from(s.to_string())) -} - -/// Helper to create meta with tool name -pub fn meta_with_tool_name(tool_name: &str) -> acp::Meta { - acp::Meta::from_iter([(TOOL_NAME_META_KEY.into(), tool_name.into())]) -} -use collections::HashSet; -pub use connection::*; -pub use diff::*; -use language::language_settings::FormatOnSave; -pub use mention::*; -use project::lsp_store::{FormatTrigger, LspFormatTarget}; -use serde::{Deserialize, Serialize}; -use serde_json::to_string_pretty; - -use task::{Shell, ShellBuilder}; -pub use terminal::*; - use action_log::{ActionLog, ActionLogTelemetry}; use agent_client_protocol::{self as acp}; use anyhow::{Context as _, Result, anyhow}; +use collections::HashSet; +pub use connection::*; +pub use diff::*; use futures::{FutureExt, channel::oneshot, future::BoxFuture}; use gpui::{AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity}; use itertools::Itertools; +use language::language_settings::FormatOnSave; use language::{Anchor, Buffer, BufferSnapshot, LanguageRegistry, Point, ToPoint, text_diff}; use markdown::Markdown; +pub use mention::*; +use project::lsp_store::{FormatTrigger, LspFormatTarget}; use project::{AgentLocation, Project, git_store::GitStoreCheckpoint}; +use serde::{Deserialize, Serialize}; +use serde_json::to_string_pretty; use std::collections::HashMap; use std::error::Error; use std::fmt::{Formatter, Write}; @@ -59,11 +27,51 @@ use std::process::ExitStatus; use std::rc::Rc; use std::time::{Duration, Instant}; use std::{fmt::Display, mem, path::PathBuf, sync::Arc}; +use task::{Shell, ShellBuilder}; +pub use terminal::*; use text::Bias; use ui::App; use util::{ResultExt, get_default_system_shell_preferring_bash, paths::PathStyle}; use uuid::Uuid; +/// Key used in ACP ToolCall meta to store the tool's programmatic name. +/// This is a workaround since ACP's ToolCall doesn't have a dedicated name field. +pub const TOOL_NAME_META_KEY: &str = "tool_name"; + +/// Helper to extract tool name from ACP meta +pub fn tool_name_from_meta(meta: &Option) -> Option { + meta.as_ref() + .and_then(|m| m.get(TOOL_NAME_META_KEY)) + .and_then(|v| v.as_str()) + .map(|s| SharedString::from(s.to_owned())) +} + +/// Helper to create meta with tool name +pub fn meta_with_tool_name(tool_name: &str) -> acp::Meta { + acp::Meta::from_iter([(TOOL_NAME_META_KEY.into(), tool_name.into())]) +} + +/// Key used in ACP ToolCall meta to store the session id and message indexes +pub const SUBAGENT_SESSION_INFO_META_KEY: &str = "subagent_session_info"; + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct SubagentSessionInfo { + /// The session id of the subagent sessiont that was spawned + pub session_id: acp::SessionId, + /// The index of the message of the start of the "turn" run by this tool call + pub message_start_index: usize, + /// The index of the output of the message that the subagent has returned + #[serde(skip_serializing_if = "Option::is_none")] + pub message_end_index: Option, +} + +/// Helper to extract subagent session id from ACP meta +pub fn subagent_session_info_from_meta(meta: &Option) -> Option { + meta.as_ref() + .and_then(|m| m.get(SUBAGENT_SESSION_INFO_META_KEY)) + .and_then(|v| serde_json::from_value(v.clone()).ok()) +} + #[derive(Debug)] pub struct UserMessage { pub id: Option, @@ -102,6 +110,7 @@ impl UserMessage { pub struct AssistantMessage { pub chunks: Vec, pub indented: bool, + pub is_subagent_output: bool, } impl AssistantMessage { @@ -222,7 +231,7 @@ pub struct ToolCall { pub raw_input_markdown: Option>, pub raw_output: Option, pub tool_name: Option, - pub subagent_session_id: Option, + pub subagent_session_info: Option, } impl ToolCall { @@ -261,7 +270,7 @@ impl ToolCall { let tool_name = tool_name_from_meta(&tool_call.meta); - let subagent_session = subagent_session_id_from_meta(&tool_call.meta); + let subagent_session_info = subagent_session_info_from_meta(&tool_call.meta); let result = Self { id: tool_call.tool_call_id, @@ -276,7 +285,7 @@ impl ToolCall { raw_input_markdown, raw_output: tool_call.raw_output, tool_name, - subagent_session_id: subagent_session, + subagent_session_info, }; Ok(result) } @@ -309,8 +318,8 @@ impl ToolCall { self.status = status.into(); } - if let Some(subagent_session_id) = subagent_session_id_from_meta(&meta) { - self.subagent_session_id = Some(subagent_session_id); + if let Some(subagent_session_info) = subagent_session_info_from_meta(&meta) { + self.subagent_session_info = Some(subagent_session_info); } if let Some(title) = title { @@ -401,7 +410,7 @@ impl ToolCall { pub fn is_subagent(&self) -> bool { self.tool_name.as_ref().is_some_and(|s| s == "spawn_agent") - || self.subagent_session_id.is_some() + || self.subagent_session_info.is_some() } pub fn to_markdown(&self, cx: &App) -> String { @@ -961,6 +970,10 @@ pub struct AcpThread { pending_terminal_output: HashMap>>, pending_terminal_exit: HashMap, had_error: bool, + /// The user's unsent prompt text, persisted so it can be restored when reloading the thread. + draft_prompt: Option>, + /// The initial scroll position for the thread view, set during session registration. + ui_scroll_position: Option, } impl From<&AcpThread> for ActionLogTelemetry { @@ -983,7 +996,7 @@ pub enum AcpThreadEvent { ToolAuthorizationReceived(acp::ToolCallId), Retry(RetryStatus), SubagentSpawned(acp::SessionId), - Stopped, + Stopped(acp::StopReason), Error, LoadError(LoadError), PromptCapabilitiesUpdated, @@ -1198,6 +1211,8 @@ impl AcpThread { pending_terminal_output: HashMap::default(), pending_terminal_exit: HashMap::default(), had_error: false, + draft_prompt: None, + ui_scroll_position: None, } } @@ -1209,6 +1224,22 @@ impl AcpThread { self.prompt_capabilities.clone() } + pub fn draft_prompt(&self) -> Option<&[acp::ContentBlock]> { + self.draft_prompt.as_deref() + } + + pub fn set_draft_prompt(&mut self, prompt: Option>) { + self.draft_prompt = prompt; + } + + pub fn ui_scroll_position(&self) -> Option { + self.ui_scroll_position + } + + pub fn set_ui_scroll_position(&mut self, position: Option) { + self.ui_scroll_position = position; + } + pub fn connection(&self) -> &Rc { &self.connection } @@ -1425,6 +1456,7 @@ impl AcpThread { && let AgentThreadEntry::AssistantMessage(AssistantMessage { chunks, indented: existing_indented, + is_subagent_output: _, }) = last_entry && *existing_indented == indented { @@ -1456,6 +1488,7 @@ impl AcpThread { AgentThreadEntry::AssistantMessage(AssistantMessage { chunks: vec![chunk], indented, + is_subagent_output: false, }), cx, ); @@ -1525,7 +1558,7 @@ impl AcpThread { raw_input_markdown: None, raw_output: None, tool_name: None, - subagent_session_id: None, + subagent_session_info: None, }; self.push_entry(AgentThreadEntry::ToolCall(failed_tool_call), cx); return Ok(()); @@ -1589,6 +1622,7 @@ impl AcpThread { let agent_telemetry_id = self.connection().telemetry_id(); let session = self.session_id(); + let parent_session_id = self.parent_session_id(); if let ToolCallStatus::Completed | ToolCallStatus::Failed = status { let status = if matches!(status, ToolCallStatus::Completed) { "completed" @@ -1599,6 +1633,7 @@ impl AcpThread { "Agent Tool Call Completed", agent_telemetry_id, session, + parent_session_id, status ); } @@ -1687,10 +1722,14 @@ impl AcpThread { pub fn tool_call_for_subagent(&self, session_id: &acp::SessionId) -> Option<&ToolCall> { self.entries.iter().find_map(|entry| match entry { - AgentThreadEntry::ToolCall(tool_call) - if tool_call.subagent_session_id.as_ref() == Some(session_id) => - { - Some(tool_call) + AgentThreadEntry::ToolCall(tool_call) => { + if let Some(subagent_session_info) = &tool_call.subagent_session_info + && &subagent_session_info.session_id == session_id + { + Some(tool_call) + } else { + None + } } _ => None, }) @@ -1698,6 +1737,7 @@ impl AcpThread { pub fn resolve_locations(&mut self, id: acp::ToolCallId, cx: &mut Context) { let project = self.project.clone(); + let should_update_agent_location = self.parent_session_id.is_none(); let Some((_, tool_call)) = self.tool_call_mut(&id) else { return; }; @@ -1733,7 +1773,7 @@ impl AcpThread { } else { false }; - if !should_ignore { + if !should_ignore && should_update_agent_location { project.set_agent_location(Some(location.into()), cx); } }); @@ -1964,8 +2004,10 @@ impl AcpThread { .await?; this.update(cx, |this, cx| { - this.project - .update(cx, |project, cx| project.set_agent_location(None, cx)); + if this.parent_session_id.is_none() { + this.project + .update(cx, |project, cx| project.set_agent_location(None, cx)); + } let Ok(response) = response else { // tx dropped, just return return Ok(None); @@ -2033,7 +2075,7 @@ impl AcpThread { } } - cx.emit(AcpThreadEvent::Stopped); + cx.emit(AcpThreadEvent::Stopped(r.stop_reason)); Ok(Some(r)) } Err(e) => { @@ -2237,6 +2279,7 @@ impl AcpThread { let limit = limit.unwrap_or(u32::MAX); let project = self.project.clone(); let action_log = self.action_log.clone(); + let should_update_agent_location = self.parent_session_id.is_none(); cx.spawn(async move |this, cx| { let load = project.update(cx, |project, cx| { let path = project @@ -2287,15 +2330,17 @@ impl AcpThread { let start = snapshot.anchor_before(start_position); let end = snapshot.anchor_before(Point::new(line.saturating_add(limit), 0)); - project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: start, - }), - cx, - ); - }); + if should_update_agent_location { + project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: start, + }), + cx, + ); + }); + } Ok(snapshot.text_for_range(start..end).collect::()) }) @@ -2309,6 +2354,7 @@ impl AcpThread { ) -> Task> { let project = self.project.clone(); let action_log = self.action_log.clone(); + let should_update_agent_location = self.parent_session_id.is_none(); cx.spawn(async move |this, cx| { let load = project.update(cx, |project, cx| { let path = project @@ -2336,18 +2382,20 @@ impl AcpThread { }) .await; - project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: edits - .last() - .map(|(range, _)| range.end) - .unwrap_or(Anchor::min_for_buffer(buffer.read(cx).remote_id())), - }), - cx, - ); - }); + if should_update_agent_location { + project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: edits + .last() + .map(|(range, _)| range.end) + .unwrap_or(Anchor::min_for_buffer(buffer.read(cx).remote_id())), + }), + cx, + ); + }); + } let format_on_save = cx.update(|cx| { action_log.update(cx, |action_log, cx| { @@ -2549,6 +2597,16 @@ impl AcpThread { self.terminals.insert(terminal_id.clone(), entity.clone()); entity } + + pub fn mark_as_subagent_output(&mut self, cx: &mut Context) { + for entry in self.entries.iter_mut().rev() { + if let AgentThreadEntry::AssistantMessage(assistant_message) = entry { + assistant_message.is_subagent_output = true; + cx.notify(); + return; + } + } + } } fn markdown_for_raw_output( diff --git a/crates/acp_thread/src/connection.rs b/crates/acp_thread/src/connection.rs index 0becded53762be7c96789b0d31191fd9cbc02bfe..773508f1c898c39d713d5779c82384caf8f190ec 100644 --- a/crates/acp_thread/src/connection.rs +++ b/crates/acp_thread/src/connection.rs @@ -496,6 +496,7 @@ mod test_support { //! - `create_test_png_base64` for generating test images use std::sync::Arc; + use std::sync::atomic::{AtomicUsize, Ordering}; use action_log::ActionLog; use collections::HashMap; @@ -621,7 +622,9 @@ mod test_support { _cwd: &Path, cx: &mut gpui::App, ) -> Task>> { - let session_id = acp::SessionId::new(self.sessions.lock().len().to_string()); + static NEXT_SESSION_ID: AtomicUsize = AtomicUsize::new(0); + let session_id = + acp::SessionId::new(NEXT_SESSION_ID.fetch_add(1, Ordering::SeqCst).to_string()); let action_log = cx.new(|_| ActionLog::new(project.clone())); let thread = cx.new(|cx| { AcpThread::new( diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs index 8886b458d623237b74f715d3c1d0def33fbefa7d..08b1b9bdf24d1ff9980164c1af8b3e60bd2f3339 100644 --- a/crates/acp_thread/src/diff.rs +++ b/crates/acp_thread/src/diff.rs @@ -149,6 +149,16 @@ impl Diff { } } + pub fn file_path(&self, cx: &App) -> Option { + match self { + Self::Pending(PendingDiff { new_buffer, .. }) => new_buffer + .read(cx) + .file() + .map(|file| file.full_path(cx).to_string_lossy().into_owned()), + Self::Finalized(FinalizedDiff { path, .. }) => Some(path.clone()), + } + } + pub fn multibuffer(&self) -> &Entity { match self { Self::Pending(PendingDiff { multibuffer, .. }) => multibuffer, diff --git a/crates/acp_thread/src/mention.rs b/crates/acp_thread/src/mention.rs index 5769d13860f2466f95fe7dd67c1f908812e40c2d..b63eec154a40de8909d13de2a4e1bd3e9d1e06f3 100644 --- a/crates/acp_thread/src/mention.rs +++ b/crates/acp_thread/src/mention.rs @@ -254,6 +254,41 @@ impl MentionUri { } } + pub fn tooltip_text(&self) -> Option { + match self { + MentionUri::File { abs_path } | MentionUri::Directory { abs_path } => { + Some(abs_path.to_string_lossy().into_owned().into()) + } + MentionUri::Symbol { + abs_path, + line_range, + .. + } => Some( + format!( + "{}:{}-{}", + abs_path.display(), + line_range.start(), + line_range.end() + ) + .into(), + ), + MentionUri::Selection { + abs_path: Some(path), + line_range, + .. + } => Some( + format!( + "{}:{}-{}", + path.display(), + line_range.start(), + line_range.end() + ) + .into(), + ), + _ => None, + } + } + pub fn icon_path(&self, cx: &mut App) -> SharedString { match self { MentionUri::File { abs_path } => { diff --git a/crates/action_log/Cargo.toml b/crates/action_log/Cargo.toml index 8488df691e40ea3bcfc04f4f6f74964fba7863dd..b1a1bf824fb770b8378e596fd0c799a7cf98b13d 100644 --- a/crates/action_log/Cargo.toml +++ b/crates/action_log/Cargo.toml @@ -20,6 +20,7 @@ buffer_diff.workspace = true log.workspace = true clock.workspace = true collections.workspace = true +fs.workspace = true futures.workspace = true gpui.workspace = true language.workspace = true diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 1157d8d6f881ecb33df8104dd4be04bd9d846b5e..5679f3c58fe52057f7a4a0faa24d5b5db2b5e497 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -1,14 +1,20 @@ use anyhow::{Context as _, Result}; use buffer_diff::BufferDiff; use clock; -use collections::BTreeMap; +use collections::{BTreeMap, HashMap}; +use fs::MTime; use futures::{FutureExt, StreamExt, channel::mpsc}; use gpui::{ App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity, }; use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint}; use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle}; -use std::{cmp, ops::Range, sync::Arc}; +use std::{ + cmp, + ops::Range, + path::{Path, PathBuf}, + sync::Arc, +}; use text::{Edit, Patch, Rope}; use util::{RangeExt, ResultExt as _}; @@ -48,8 +54,14 @@ pub struct ActionLog { tracked_buffers: BTreeMap, TrackedBuffer>, /// The project this action log is associated with project: Entity, + /// An action log to forward all public methods to + /// Useful in cases like subagents, where we want to track individual diffs for this subagent, + /// but also want to associate the reads/writes with a parent review experience + linked_action_log: Option>, /// Stores undo information for the most recent reject operation last_reject_undo: Option, + /// Tracks the last time files were read by the agent, to detect external modifications + file_read_times: HashMap, } impl ActionLog { @@ -58,14 +70,47 @@ impl ActionLog { Self { tracked_buffers: BTreeMap::default(), project, + linked_action_log: None, last_reject_undo: None, + file_read_times: HashMap::default(), } } + pub fn with_linked_action_log(mut self, linked_action_log: Entity) -> Self { + self.linked_action_log = Some(linked_action_log); + self + } + pub fn project(&self) -> &Entity { &self.project } + pub fn file_read_time(&self, path: &Path) -> Option { + self.file_read_times.get(path).copied() + } + + fn update_file_read_time(&mut self, buffer: &Entity, cx: &App) { + let buffer = buffer.read(cx); + if let Some(file) = buffer.file() { + if let Some(local_file) = file.as_local() { + if let Some(mtime) = file.disk_state().mtime() { + let abs_path = local_file.abs_path(cx); + self.file_read_times.insert(abs_path, mtime); + } + } + } + } + + fn remove_file_read_time(&mut self, buffer: &Entity, cx: &App) { + let buffer = buffer.read(cx); + if let Some(file) = buffer.file() { + if let Some(local_file) = file.as_local() { + let abs_path = local_file.abs_path(cx); + self.file_read_times.remove(&abs_path); + } + } + } + fn track_buffer_internal( &mut self, buffer: Entity, @@ -496,16 +541,70 @@ impl ActionLog { /// Track a buffer as read by agent, so we can notify the model about user edits. pub fn buffer_read(&mut self, buffer: Entity, cx: &mut Context) { + self.buffer_read_impl(buffer, true, cx); + } + + fn buffer_read_impl( + &mut self, + buffer: Entity, + record_file_read_time: bool, + cx: &mut Context, + ) { + if let Some(linked_action_log) = &self.linked_action_log { + // We don't want to share read times since the other agent hasn't read it necessarily + linked_action_log.update(cx, |log, cx| { + log.buffer_read_impl(buffer.clone(), false, cx); + }); + } + if record_file_read_time { + self.update_file_read_time(&buffer, cx); + } self.track_buffer_internal(buffer, false, cx); } /// Mark a buffer as created by agent, so we can refresh it in the context pub fn buffer_created(&mut self, buffer: Entity, cx: &mut Context) { + self.buffer_created_impl(buffer, true, cx); + } + + fn buffer_created_impl( + &mut self, + buffer: Entity, + record_file_read_time: bool, + cx: &mut Context, + ) { + if let Some(linked_action_log) = &self.linked_action_log { + // We don't want to share read times since the other agent hasn't read it necessarily + linked_action_log.update(cx, |log, cx| { + log.buffer_created_impl(buffer.clone(), false, cx); + }); + } + if record_file_read_time { + self.update_file_read_time(&buffer, cx); + } self.track_buffer_internal(buffer, true, cx); } /// Mark a buffer as edited by agent, so we can refresh it in the context pub fn buffer_edited(&mut self, buffer: Entity, cx: &mut Context) { + self.buffer_edited_impl(buffer, true, cx); + } + + fn buffer_edited_impl( + &mut self, + buffer: Entity, + record_file_read_time: bool, + cx: &mut Context, + ) { + if let Some(linked_action_log) = &self.linked_action_log { + // We don't want to share read times since the other agent hasn't read it necessarily + linked_action_log.update(cx, |log, cx| { + log.buffer_edited_impl(buffer.clone(), false, cx); + }); + } + if record_file_read_time { + self.update_file_read_time(&buffer, cx); + } let new_version = buffer.read(cx).version(); let tracked_buffer = self.track_buffer_internal(buffer, false, cx); if let TrackedBufferStatus::Deleted = tracked_buffer.status { @@ -517,6 +616,9 @@ impl ActionLog { } pub fn will_delete_buffer(&mut self, buffer: Entity, cx: &mut Context) { + // Ok to propagate file read time removal to linked action log + self.remove_file_read_time(&buffer, cx); + let has_linked_action_log = self.linked_action_log.is_some(); let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx); match tracked_buffer.status { TrackedBufferStatus::Created { .. } => { @@ -524,12 +626,24 @@ impl ActionLog { cx.notify(); } TrackedBufferStatus::Modified => { - buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); tracked_buffer.status = TrackedBufferStatus::Deleted; - tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx); + if !has_linked_action_log { + buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); + tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx); + } } + TrackedBufferStatus::Deleted => {} } + + if let Some(linked_action_log) = &mut self.linked_action_log { + linked_action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx)); + } + + if has_linked_action_log && let Some(tracked_buffer) = self.tracked_buffers.get(&buffer) { + tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx); + } + cx.notify(); } @@ -914,15 +1028,6 @@ impl ActionLog { .collect() } - /// Returns all tracked buffers for debugging purposes - #[cfg(any(test, feature = "test-support"))] - pub fn tracked_buffers_for_debug( - &self, - _cx: &App, - ) -> impl Iterator, &TrackedBuffer)> { - self.tracked_buffers.iter() - } - /// Iterate over buffers changed since last read or edited by the model pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator> { self.tracked_buffers @@ -2634,6 +2739,515 @@ mod tests { assert!(!action_log.read_with(cx, |log, _| log.has_pending_undo())); } + #[gpui::test] + async fn test_linked_action_log_buffer_read(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + }); + + // Neither log considers the buffer stale immediately after reading it. + let child_stale = cx.read(|cx| { + child_log + .read(cx) + .stale_buffers(cx) + .cloned() + .collect::>() + }); + let parent_stale = cx.read(|cx| { + parent_log + .read(cx) + .stale_buffers(cx) + .cloned() + .collect::>() + }); + assert!(child_stale.is_empty()); + assert!(parent_stale.is_empty()); + + // Simulate a user edit after the agent read the file. + cx.update(|cx| { + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..5, "goodbye")], None, cx).unwrap(); + }); + }); + cx.run_until_parked(); + + // Both child and parent should see the buffer as stale because both tracked + // it at the pre-edit version via buffer_read forwarding. + let child_stale = cx.read(|cx| { + child_log + .read(cx) + .stale_buffers(cx) + .cloned() + .collect::>() + }); + let parent_stale = cx.read(|cx| { + parent_log + .read(cx) + .stale_buffers(cx) + .cloned() + .collect::>() + }); + assert_eq!(child_stale, vec![buffer.clone()]); + assert_eq!(parent_stale, vec![buffer]); + } + + #[gpui::test] + async fn test_linked_action_log_buffer_edited(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| { + buffer + .edit([(Point::new(1, 0)..Point::new(1, 3), "DEF")], None, cx) + .unwrap(); + }); + child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + cx.run_until_parked(); + + let expected_hunks = vec![( + buffer, + vec![HunkStatus { + range: Point::new(1, 0)..Point::new(2, 0), + diff_status: DiffHunkStatusKind::Modified, + old_text: "def\n".into(), + }], + )]; + assert_eq!( + unreviewed_hunks(&child_log, cx), + expected_hunks, + "child should track the agent edit" + ); + assert_eq!( + unreviewed_hunks(&parent_log, cx), + expected_hunks, + "parent should also track the agent edit via linked log forwarding" + ); + } + + #[gpui::test] + async fn test_linked_action_log_buffer_created(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({})).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| { + project.find_project_path("dir/new_file", cx) + }) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| buffer.set_text("hello", cx)); + child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + project + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) + .await + .unwrap(); + cx.run_until_parked(); + + let expected_hunks = vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(0, 0)..Point::new(0, 5), + diff_status: DiffHunkStatusKind::Added, + old_text: "".into(), + }], + )]; + assert_eq!( + unreviewed_hunks(&child_log, cx), + expected_hunks, + "child should track the created file" + ); + assert_eq!( + unreviewed_hunks(&parent_log, cx), + expected_hunks, + "parent should also track the created file via linked log forwarding" + ); + } + + #[gpui::test] + async fn test_linked_action_log_will_delete_buffer(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello\n"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx)); + }); + project + .update(cx, |project, cx| project.delete_file(file_path, false, cx)) + .unwrap() + .await + .unwrap(); + cx.run_until_parked(); + + let expected_hunks = vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(0, 0)..Point::new(0, 0), + diff_status: DiffHunkStatusKind::Deleted, + old_text: "hello\n".into(), + }], + )]; + assert_eq!( + unreviewed_hunks(&child_log, cx), + expected_hunks, + "child should track the deleted file" + ); + assert_eq!( + unreviewed_hunks(&parent_log, cx), + expected_hunks, + "parent should also track the deleted file via linked log forwarding" + ); + } + + /// Simulates the subagent scenario: two child logs linked to the same parent, each + /// editing a different file. The parent accumulates all edits while each child + /// only sees its own. + #[gpui::test] + async fn test_linked_action_log_independent_tracking(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/dir"), + json!({ + "file_a": "content of a", + "file_b": "content of b", + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log_1 = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + let child_log_2 = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_a_path = project + .read_with(cx, |project, cx| { + project.find_project_path("dir/file_a", cx) + }) + .unwrap(); + let file_b_path = project + .read_with(cx, |project, cx| { + project.find_project_path("dir/file_b", cx) + }) + .unwrap(); + let buffer_a = project + .update(cx, |project, cx| project.open_buffer(file_a_path, cx)) + .await + .unwrap(); + let buffer_b = project + .update(cx, |project, cx| project.open_buffer(file_b_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log_1.update(cx, |log, cx| log.buffer_read(buffer_a.clone(), cx)); + buffer_a.update(cx, |buffer, cx| { + buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap(); + }); + child_log_1.update(cx, |log, cx| log.buffer_edited(buffer_a.clone(), cx)); + + child_log_2.update(cx, |log, cx| log.buffer_read(buffer_b.clone(), cx)); + buffer_b.update(cx, |buffer, cx| { + buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap(); + }); + child_log_2.update(cx, |log, cx| log.buffer_edited(buffer_b.clone(), cx)); + }); + cx.run_until_parked(); + + let child_1_changed: Vec<_> = cx.read(|cx| { + child_log_1 + .read(cx) + .changed_buffers(cx) + .into_keys() + .collect() + }); + let child_2_changed: Vec<_> = cx.read(|cx| { + child_log_2 + .read(cx) + .changed_buffers(cx) + .into_keys() + .collect() + }); + let parent_changed: Vec<_> = cx.read(|cx| { + parent_log + .read(cx) + .changed_buffers(cx) + .into_keys() + .collect() + }); + + assert_eq!( + child_1_changed, + vec![buffer_a.clone()], + "child 1 should only track file_a" + ); + assert_eq!( + child_2_changed, + vec![buffer_b.clone()], + "child 2 should only track file_b" + ); + assert_eq!(parent_changed.len(), 2, "parent should track both files"); + assert!( + parent_changed.contains(&buffer_a) && parent_changed.contains(&buffer_b), + "parent should contain both buffer_a and buffer_b" + ); + } + + #[gpui::test] + async fn test_file_read_time_recorded_on_buffer_read(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "file_read_time should be None before buffer_read" + ); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + }); + + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "file_read_time should be recorded after buffer_read" + ); + } + + #[gpui::test] + async fn test_file_read_time_recorded_on_buffer_edited(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "file_read_time should be None before buffer_edited" + ); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "file_read_time should be recorded after buffer_edited" + ); + } + + #[gpui::test] + async fn test_file_read_time_recorded_on_buffer_created(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "existing content"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "file_read_time should be None before buffer_created" + ); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); + }); + + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "file_read_time should be recorded after buffer_created" + ); + } + + #[gpui::test] + async fn test_file_read_time_removed_on_delete(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + }); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "file_read_time should exist after buffer_read" + ); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx)); + }); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "file_read_time should be removed after will_delete_buffer" + ); + } + + #[gpui::test] + async fn test_file_read_time_not_forwarded_to_linked_action_log(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + }); + assert!( + child_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "child should record file_read_time on buffer_read" + ); + assert!( + parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "parent should NOT get file_read_time from child's buffer_read" + ); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + assert!( + parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "parent should NOT get file_read_time from child's buffer_edited" + ); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); + }); + assert!( + parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "parent should NOT get file_read_time from child's buffer_created" + ); + } + #[derive(Debug, PartialEq)] struct HunkStatus { range: Range, diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 759c6e3b9c8c228a6ae6bea5330819b97200b603..a93c2d2062b7472f8ed94a6ea0947a685edd204f 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -14,6 +14,7 @@ mod tools; use context_server::ContextServerId; pub use db::*; +use itertools::Itertools; pub use native_agent_server::NativeAgentServer; pub use pattern_extraction::*; pub use shell_command_parser::extract_commands; @@ -51,6 +52,7 @@ use std::path::{Path, PathBuf}; use std::rc::Rc; use std::sync::Arc; use util::ResultExt; +use util::path_list::PathList; use util::rel_path::RelPath; #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] @@ -349,11 +351,14 @@ impl NativeAgent { let session_id = thread.id().clone(); let parent_session_id = thread.parent_thread_id(); let title = thread.title(); + let draft_prompt = thread.draft_prompt().map(Vec::from); + let scroll_position = thread.ui_scroll_position(); + let token_usage = thread.latest_token_usage(); let project = thread.project.clone(); let action_log = thread.action_log.clone(); let prompt_capabilities_rx = thread.prompt_capabilities_rx.clone(); let acp_thread = cx.new(|cx| { - acp_thread::AcpThread::new( + let mut acp_thread = acp_thread::AcpThread::new( parent_session_id, title, connection, @@ -362,18 +367,24 @@ impl NativeAgent { session_id.clone(), prompt_capabilities_rx, cx, - ) + ); + acp_thread.set_draft_prompt(draft_prompt); + acp_thread.set_ui_scroll_position(scroll_position); + acp_thread.update_token_usage(token_usage, cx); + acp_thread }); let registry = LanguageModelRegistry::read_global(cx); let summarization_model = registry.thread_summary_model().map(|c| c.model); let weak = cx.weak_entity(); + let weak_thread = thread_handle.downgrade(); thread_handle.update(cx, |thread, cx| { thread.set_summarization_model(summarization_model, cx); thread.add_default_tools( Rc::new(NativeThreadEnvironment { acp_thread: acp_thread.downgrade(), + thread: weak_thread, agent: weak, }) as _, cx, @@ -840,19 +851,36 @@ impl NativeAgent { return; } - let database_future = ThreadsDatabase::connect(cx); - let (id, db_thread) = - thread.update(cx, |thread, cx| (thread.id().clone(), thread.to_db(cx))); + let id = thread.read(cx).id().clone(); let Some(session) = self.sessions.get_mut(&id) else { return; }; + + let folder_paths = PathList::new( + &self + .project + .read(cx) + .visible_worktrees(cx) + .map(|worktree| worktree.read(cx).abs_path().to_path_buf()) + .collect::>(), + ); + + let draft_prompt = session.acp_thread.read(cx).draft_prompt().map(Vec::from); + let database_future = ThreadsDatabase::connect(cx); + let db_thread = thread.update(cx, |thread, cx| { + thread.set_draft_prompt(draft_prompt); + thread.to_db(cx) + }); let thread_store = self.thread_store.clone(); session.pending_save = cx.spawn(async move |_, cx| { let Some(database) = database_future.await.map_err(|err| anyhow!(err)).log_err() else { return; }; let db_thread = db_thread.await; - database.save_thread(id, db_thread).await.log_err(); + database + .save_thread(id, db_thread, folder_paths) + .await + .log_err(); thread_store.update(cx, |store, cx| store.reload(cx)); }); } @@ -1462,16 +1490,6 @@ impl NativeAgentSessionList { } } - fn to_session_info(entry: DbThreadMetadata) -> AgentSessionInfo { - AgentSessionInfo { - session_id: entry.id, - cwd: None, - title: Some(entry.title), - updated_at: Some(entry.updated_at), - meta: None, - } - } - pub fn thread_store(&self) -> &Entity { &self.thread_store } @@ -1487,7 +1505,7 @@ impl AgentSessionList for NativeAgentSessionList { .thread_store .read(cx) .entries() - .map(Self::to_session_info) + .map(|entry| AgentSessionInfo::from(&entry)) .collect(); Task::ready(Ok(AgentSessionListResponse::new(sessions))) } @@ -1576,17 +1594,19 @@ impl acp_thread::AgentSessionSetTitle for NativeAgentSessionSetTitle { pub struct NativeThreadEnvironment { agent: WeakEntity, + thread: WeakEntity, acp_thread: WeakEntity, } impl NativeThreadEnvironment { pub(crate) fn create_subagent_thread( - agent: WeakEntity, - parent_thread_entity: Entity, + &self, label: String, - initial_prompt: String, cx: &mut App, ) -> Result> { + let Some(parent_thread_entity) = self.thread.upgrade() else { + anyhow::bail!("Parent thread no longer exists".to_string()); + }; let parent_thread = parent_thread_entity.read(cx); let current_depth = parent_thread.depth(); @@ -1605,28 +1625,29 @@ impl NativeThreadEnvironment { let session_id = subagent_thread.read(cx).id().clone(); - let acp_thread = agent.update(cx, |agent, cx| { + let acp_thread = self.agent.update(cx, |agent, cx| { agent.register_session(subagent_thread.clone(), cx) })?; - Self::prompt_subagent( - session_id, - subagent_thread, - acp_thread, - parent_thread_entity, - initial_prompt, - cx, - ) + let depth = current_depth + 1; + + telemetry::event!( + "Subagent Started", + session = parent_thread_entity.read(cx).id().to_string(), + subagent_session = session_id.to_string(), + depth, + is_resumed = false, + ); + + self.prompt_subagent(session_id, subagent_thread, acp_thread) } pub(crate) fn resume_subagent_thread( - agent: WeakEntity, - parent_thread_entity: Entity, + &self, session_id: acp::SessionId, - follow_up_prompt: String, cx: &mut App, ) -> Result> { - let (subagent_thread, acp_thread) = agent.update(cx, |agent, _cx| { + let (subagent_thread, acp_thread) = self.agent.update(cx, |agent, _cx| { let session = agent .sessions .get(&session_id) @@ -1634,31 +1655,35 @@ impl NativeThreadEnvironment { anyhow::Ok((session.thread.clone(), session.acp_thread.clone())) })??; - Self::prompt_subagent( - session_id, - subagent_thread, - acp_thread, - parent_thread_entity, - follow_up_prompt, - cx, - ) + let depth = subagent_thread.read(cx).depth(); + + if let Some(parent_thread_entity) = self.thread.upgrade() { + telemetry::event!( + "Subagent Started", + session = parent_thread_entity.read(cx).id().to_string(), + subagent_session = session_id.to_string(), + depth, + is_resumed = true, + ); + } + + self.prompt_subagent(session_id, subagent_thread, acp_thread) } fn prompt_subagent( + &self, session_id: acp::SessionId, subagent_thread: Entity, acp_thread: Entity, - parent_thread_entity: Entity, - prompt: String, - cx: &mut App, ) -> Result> { + let Some(parent_thread_entity) = self.thread.upgrade() else { + anyhow::bail!("Parent thread no longer exists".to_string()); + }; Ok(Rc::new(NativeSubagentHandle::new( session_id, subagent_thread, acp_thread, parent_thread_entity, - prompt, - cx, )) as _) } } @@ -1697,36 +1722,16 @@ impl ThreadEnvironment for NativeThreadEnvironment { }) } - fn create_subagent( - &self, - parent_thread_entity: Entity, - label: String, - initial_prompt: String, - cx: &mut App, - ) -> Result> { - Self::create_subagent_thread( - self.agent.clone(), - parent_thread_entity, - label, - initial_prompt, - cx, - ) + fn create_subagent(&self, label: String, cx: &mut App) -> Result> { + self.create_subagent_thread(label, cx) } fn resume_subagent( &self, - parent_thread_entity: Entity, session_id: acp::SessionId, - follow_up_prompt: String, cx: &mut App, ) -> Result> { - Self::resume_subagent_thread( - self.agent.clone(), - parent_thread_entity, - session_id, - follow_up_prompt, - cx, - ) + self.resume_subagent_thread(session_id, cx) } } @@ -1742,8 +1747,7 @@ pub struct NativeSubagentHandle { session_id: acp::SessionId, parent_thread: WeakEntity, subagent_thread: Entity, - wait_for_prompt_to_complete: Shared>, - _subscription: Subscription, + acp_thread: Entity, } impl NativeSubagentHandle { @@ -1752,71 +1756,12 @@ impl NativeSubagentHandle { subagent_thread: Entity, acp_thread: Entity, parent_thread_entity: Entity, - prompt: String, - cx: &mut App, ) -> Self { - let ratio_before_prompt = subagent_thread - .read(cx) - .latest_token_usage() - .map(|usage| usage.ratio()); - - parent_thread_entity.update(cx, |parent_thread, _cx| { - parent_thread.register_running_subagent(subagent_thread.downgrade()) - }); - - let task = acp_thread.update(cx, |acp_thread, cx| { - acp_thread.send(vec![prompt.into()], cx) - }); - - let (token_limit_tx, token_limit_rx) = oneshot::channel::<()>(); - let mut token_limit_tx = Some(token_limit_tx); - - let subscription = cx.subscribe( - &subagent_thread, - move |_thread, event: &TokenUsageUpdated, _cx| { - if let Some(usage) = &event.0 { - let old_ratio = ratio_before_prompt - .clone() - .unwrap_or(TokenUsageRatio::Normal); - let new_ratio = usage.ratio(); - if old_ratio == TokenUsageRatio::Normal && new_ratio == TokenUsageRatio::Warning - { - if let Some(tx) = token_limit_tx.take() { - tx.send(()).ok(); - } - } - } - }, - ); - - let wait_for_prompt_to_complete = cx - .background_spawn(async move { - futures::select! { - response = task.fuse() => match response { - Ok(Some(response)) =>{ - match response.stop_reason { - acp::StopReason::Cancelled => SubagentPromptResult::Cancelled, - acp::StopReason::MaxTokens => SubagentPromptResult::Error("The agent reached the maximum number of tokens.".into()), - acp::StopReason::MaxTurnRequests => SubagentPromptResult::Error("The agent reached the maximum number of allowed requests between user turns. Try prompting again.".into()), - acp::StopReason::Refusal => SubagentPromptResult::Error("The agent refused to process that prompt. Try again.".into()), - acp::StopReason::EndTurn | _ => SubagentPromptResult::Completed, - } - - } - Ok(None) => SubagentPromptResult::Error("No response from the agent. You can try messaging again.".into()), - Err(error) => SubagentPromptResult::Error(error.to_string()), - }, - _ = token_limit_rx.fuse() => SubagentPromptResult::ContextWindowWarning, - } - }) - .shared(); - NativeSubagentHandle { session_id, subagent_thread, parent_thread: parent_thread_entity.downgrade(), - wait_for_prompt_to_complete, - _subscription: subscription, + acp_thread, } } } @@ -1826,22 +1771,100 @@ impl SubagentHandle for NativeSubagentHandle { self.session_id.clone() } - fn wait_for_output(&self, cx: &AsyncApp) -> Task> { - let thread = self.subagent_thread.clone(); - let wait_for_prompt = self.wait_for_prompt_to_complete.clone(); + fn num_entries(&self, cx: &App) -> usize { + self.acp_thread.read(cx).entries().len() + } + fn send(&self, message: String, cx: &AsyncApp) -> Task> { + let thread = self.subagent_thread.clone(); + let acp_thread = self.acp_thread.clone(); let subagent_session_id = self.session_id.clone(); let parent_thread = self.parent_thread.clone(); cx.spawn(async move |cx| { - let result = match wait_for_prompt.await { + let (task, _subscription) = cx.update(|cx| { + let ratio_before_prompt = thread + .read(cx) + .latest_token_usage() + .map(|usage| usage.ratio()); + + parent_thread + .update(cx, |parent_thread, _cx| { + parent_thread.register_running_subagent(thread.downgrade()) + }) + .ok(); + + let task = acp_thread.update(cx, |acp_thread, cx| { + acp_thread.send(vec![message.into()], cx) + }); + + let (token_limit_tx, token_limit_rx) = oneshot::channel::<()>(); + let mut token_limit_tx = Some(token_limit_tx); + + let subscription = cx.subscribe( + &thread, + move |_thread, event: &TokenUsageUpdated, _cx| { + if let Some(usage) = &event.0 { + let old_ratio = ratio_before_prompt + .clone() + .unwrap_or(TokenUsageRatio::Normal); + let new_ratio = usage.ratio(); + if old_ratio == TokenUsageRatio::Normal + && new_ratio == TokenUsageRatio::Warning + { + if let Some(tx) = token_limit_tx.take() { + tx.send(()).ok(); + } + } + } + }, + ); + + let wait_for_prompt = cx + .background_spawn(async move { + futures::select! { + response = task.fuse() => match response { + Ok(Some(response)) => { + match response.stop_reason { + acp::StopReason::Cancelled => SubagentPromptResult::Cancelled, + acp::StopReason::MaxTokens => SubagentPromptResult::Error("The agent reached the maximum number of tokens.".into()), + acp::StopReason::MaxTurnRequests => SubagentPromptResult::Error("The agent reached the maximum number of allowed requests between user turns. Try prompting again.".into()), + acp::StopReason::Refusal => SubagentPromptResult::Error("The agent refused to process that prompt. Try again.".into()), + acp::StopReason::EndTurn | _ => SubagentPromptResult::Completed, + } + } + Ok(None) => SubagentPromptResult::Error("No response from the agent. You can try messaging again.".into()), + Err(error) => SubagentPromptResult::Error(error.to_string()), + }, + _ = token_limit_rx.fuse() => SubagentPromptResult::ContextWindowWarning, + } + }); + + (wait_for_prompt, subscription) + }); + + let result = match task.await { SubagentPromptResult::Completed => thread.read_with(cx, |thread, _cx| { thread .last_message() - .map(|m| m.to_markdown()) + .and_then(|message| { + let content = message.as_agent_message()? + .content + .iter() + .filter_map(|c| match c { + AgentMessageContent::Text(text) => Some(text.as_str()), + _ => None, + }) + .join("\n\n"); + if content.is_empty() { + None + } else { + Some( content) + } + }) .context("No response from subagent") }), - SubagentPromptResult::Cancelled => Err(anyhow!("User cancelled")), + SubagentPromptResult::Cancelled => Err(anyhow!("User canceled")), SubagentPromptResult::Error(message) => Err(anyhow!("{message}")), SubagentPromptResult::ContextWindowWarning => { thread.update(cx, |thread, cx| thread.cancel(cx)).await; @@ -1910,7 +1933,9 @@ mod internal_tests { use gpui::TestAppContext; use indoc::formatdoc; use language_model::fake_provider::{FakeLanguageModel, FakeLanguageModelProvider}; - use language_model::{LanguageModelProviderId, LanguageModelProviderName}; + use language_model::{ + LanguageModelCompletionEvent, LanguageModelProviderId, LanguageModelProviderName, + }; use serde_json::json; use settings::SettingsStore; use util::{path, rel_path::rel_path}; @@ -2542,6 +2567,13 @@ mod internal_tests { cx.run_until_parked(); model.send_last_completion_stream_text_chunk("Lorem."); + model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate( + language_model::TokenUsage { + input_tokens: 150, + output_tokens: 75, + ..Default::default() + }, + )); model.end_last_completion_stream(); cx.run_until_parked(); summary_model @@ -2571,6 +2603,24 @@ mod internal_tests { cx.run_until_parked(); + // Set a draft prompt with rich content blocks before saving. + let draft_blocks = vec![ + acp::ContentBlock::Text(acp::TextContent::new("Check out ")), + acp::ContentBlock::ResourceLink(acp::ResourceLink::new("b.md", uri.to_string())), + acp::ContentBlock::Text(acp::TextContent::new(" please")), + ]; + acp_thread.update(cx, |thread, _cx| { + thread.set_draft_prompt(Some(draft_blocks.clone())); + }); + thread.update(cx, |thread, _cx| { + thread.set_ui_scroll_position(Some(gpui::ListOffset { + item_ix: 5, + offset_in_item: gpui::px(12.5), + })); + }); + thread.update(cx, |_thread, cx| cx.notify()); + cx.run_until_parked(); + // Close the session so it can be reloaded from disk. cx.update(|cx| connection.clone().close_session(&session_id, cx)) .await @@ -2608,6 +2658,29 @@ mod internal_tests { "} ) }); + + // Ensure the draft prompt with rich content blocks survived the round-trip. + acp_thread.read_with(cx, |thread, _| { + assert_eq!(thread.draft_prompt(), Some(draft_blocks.as_slice())); + }); + + // Ensure token usage survived the round-trip. + acp_thread.read_with(cx, |thread, _| { + let usage = thread + .token_usage() + .expect("token usage should be restored after reload"); + assert_eq!(usage.input_tokens, 150); + assert_eq!(usage.output_tokens, 75); + }); + + // Ensure scroll position survived the round-trip. + acp_thread.read_with(cx, |thread, _| { + let scroll = thread + .ui_scroll_position() + .expect("scroll position should be restored after reload"); + assert_eq!(scroll.item_ix, 5); + assert_eq!(scroll.offset_in_item, gpui::px(12.5)); + }); } fn thread_entries( diff --git a/crates/agent/src/db.rs b/crates/agent/src/db.rs index 14ec9bb9af92c2f9720af5714c7344b986f5f7b5..2c9b33e4efc4f22059e2914589ca6c635b51c0e5 100644 --- a/crates/agent/src/db.rs +++ b/crates/agent/src/db.rs @@ -8,6 +8,7 @@ use collections::{HashMap, IndexMap}; use futures::{FutureExt, future::Shared}; use gpui::{BackgroundExecutor, Global, Task}; use indoc::indoc; +use language_model::Speed; use parking_lot::Mutex; use serde::{Deserialize, Serialize}; use sqlez::{ @@ -17,23 +18,13 @@ use sqlez::{ }; use std::sync::Arc; use ui::{App, SharedString}; +use util::path_list::PathList; use zed_env_vars::ZED_STATELESS; pub type DbMessage = crate::Message; pub type DbSummary = crate::legacy_thread::DetailedSummaryState; pub type DbLanguageModel = crate::legacy_thread::SerializedLanguageModel; -/// Metadata about the git worktree associated with an agent thread. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AgentGitWorktreeInfo { - /// The branch name in the git worktree. - pub branch: String, - /// Absolute path to the git worktree on disk. - pub worktree_path: std::path::PathBuf, - /// The base branch/commit the worktree was created from. - pub base_ref: String, -} - #[derive(Debug, Clone, Serialize, Deserialize)] pub struct DbThreadMetadata { pub id: acp::SessionId, @@ -41,10 +32,22 @@ pub struct DbThreadMetadata { #[serde(alias = "summary")] pub title: SharedString, pub updated_at: DateTime, - /// Denormalized from `DbThread::git_worktree_info.branch` for efficient - /// listing without decompressing thread data. The blob is the source of - /// truth; this column is populated on save for query convenience. - pub worktree_branch: Option, + pub created_at: Option>, + /// The workspace folder paths this thread was created against, sorted + /// lexicographically. Used for grouping threads by project in the sidebar. + pub folder_paths: PathList, +} + +impl From<&DbThreadMetadata> for acp_thread::AgentSessionInfo { + fn from(meta: &DbThreadMetadata) -> Self { + Self { + session_id: meta.id.clone(), + cwd: None, + title: Some(meta.title.clone()), + updated_at: Some(meta.updated_at), + meta: None, + } + } } #[derive(Debug, Serialize, Deserialize)] @@ -69,7 +72,21 @@ pub struct DbThread { #[serde(default)] pub subagent_context: Option, #[serde(default)] - pub git_worktree_info: Option, + pub speed: Option, + #[serde(default)] + pub thinking_enabled: bool, + #[serde(default)] + pub thinking_effort: Option, + #[serde(default)] + pub draft_prompt: Option>, + #[serde(default)] + pub ui_scroll_position: Option, +} + +#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] +pub struct SerializedScrollPosition { + pub item_ix: usize, + pub offset_in_item: f32, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -108,7 +125,11 @@ impl SharedThread { profile: None, imported: true, subagent_context: None, - git_worktree_info: None, + speed: None, + thinking_enabled: false, + thinking_effort: None, + draft_prompt: None, + ui_scroll_position: None, } } @@ -283,7 +304,11 @@ impl DbThread { profile: thread.profile, imported: false, subagent_context: None, - git_worktree_info: None, + speed: None, + thinking_enabled: false, + thinking_effort: None, + draft_prompt: None, + ui_scroll_position: None, }) } } @@ -389,12 +414,24 @@ impl ThreadsDatabase { } if let Ok(mut s) = connection.exec(indoc! {" - ALTER TABLE threads ADD COLUMN worktree_branch TEXT + ALTER TABLE threads ADD COLUMN folder_paths TEXT; + ALTER TABLE threads ADD COLUMN folder_paths_order TEXT; "}) { s().ok(); } + if let Ok(mut s) = connection.exec(indoc! {" + ALTER TABLE threads ADD COLUMN created_at TEXT; + "}) + { + if s().is_ok() { + connection.exec(indoc! {" + UPDATE threads SET created_at = updated_at WHERE created_at IS NULL + "})?()?; + } + } + let db = Self { executor, connection: Arc::new(Mutex::new(connection)), @@ -407,6 +444,7 @@ impl ThreadsDatabase { connection: &Arc>, id: acp::SessionId, thread: DbThread, + folder_paths: &PathList, ) -> Result<()> { const COMPRESSION_LEVEL: i32 = 3; @@ -423,10 +461,16 @@ impl ThreadsDatabase { .subagent_context .as_ref() .map(|ctx| ctx.parent_thread_id.0.clone()); - let worktree_branch = thread - .git_worktree_info - .as_ref() - .map(|info| info.branch.clone()); + let serialized_folder_paths = folder_paths.serialize(); + let (folder_paths_str, folder_paths_order_str): (Option, Option) = + if folder_paths.is_empty() { + (None, None) + } else { + ( + Some(serialized_folder_paths.paths), + Some(serialized_folder_paths.order), + ) + }; let json_data = serde_json::to_string(&SerializedThread { thread, version: DbThread::VERSION, @@ -438,18 +482,31 @@ impl ThreadsDatabase { let data_type = DataType::Zstd; let data = compressed; - let mut insert = connection.exec_bound::<(Arc, Option>, Option, String, String, DataType, Vec)>(indoc! {" - INSERT OR REPLACE INTO threads (id, parent_id, worktree_branch, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?, ?, ?) + let created_at = Utc::now().to_rfc3339(); + + let mut insert = connection.exec_bound::<(Arc, Option>, Option, Option, String, String, DataType, Vec, String)>(indoc! {" + INSERT INTO threads (id, parent_id, folder_paths, folder_paths_order, summary, updated_at, data_type, data, created_at) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9) + ON CONFLICT(id) DO UPDATE SET + parent_id = excluded.parent_id, + folder_paths = excluded.folder_paths, + folder_paths_order = excluded.folder_paths_order, + summary = excluded.summary, + updated_at = excluded.updated_at, + data_type = excluded.data_type, + data = excluded.data "})?; insert(( id.0, parent_id, - worktree_branch, + folder_paths_str, + folder_paths_order_str, title, updated_at, data_type, data, + created_at, ))?; Ok(()) @@ -462,20 +519,35 @@ impl ThreadsDatabase { let connection = connection.lock(); let mut select = connection - .select_bound::<(), (Arc, Option>, Option, String, String)>(indoc! {" - SELECT id, parent_id, worktree_branch, summary, updated_at FROM threads ORDER BY updated_at DESC + .select_bound::<(), (Arc, Option>, Option, Option, String, String, Option)>(indoc! {" + SELECT id, parent_id, folder_paths, folder_paths_order, summary, updated_at, created_at FROM threads ORDER BY updated_at DESC, created_at DESC "})?; let rows = select(())?; let mut threads = Vec::new(); - for (id, parent_id, worktree_branch, summary, updated_at) in rows { + for (id, parent_id, folder_paths, folder_paths_order, summary, updated_at, created_at) in rows { + let folder_paths = folder_paths + .map(|paths| { + PathList::deserialize(&util::path_list::SerializedPathList { + paths, + order: folder_paths_order.unwrap_or_default(), + }) + }) + .unwrap_or_default(); + let created_at = created_at + .as_deref() + .map(DateTime::parse_from_rfc3339) + .transpose()? + .map(|dt| dt.with_timezone(&Utc)); + threads.push(DbThreadMetadata { id: acp::SessionId::new(id), parent_session_id: parent_id.map(acp::SessionId::new), title: summary.into(), updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc), - worktree_branch, + created_at, + folder_paths, }); } @@ -509,11 +581,16 @@ impl ThreadsDatabase { }) } - pub fn save_thread(&self, id: acp::SessionId, thread: DbThread) -> Task> { + pub fn save_thread( + &self, + id: acp::SessionId, + thread: DbThread, + folder_paths: PathList, + ) -> Task> { let connection = self.connection.clone(); self.executor - .spawn(async move { Self::save_thread_sync(&connection, id, thread) }) + .spawn(async move { Self::save_thread_sync(&connection, id, thread, &folder_paths) }) } pub fn delete_thread(&self, id: acp::SessionId) -> Task> { @@ -609,12 +686,16 @@ mod tests { profile: None, imported: false, subagent_context: None, - git_worktree_info: None, + speed: None, + thinking_enabled: false, + thinking_effort: None, + draft_prompt: None, + ui_scroll_position: None, } } #[gpui::test] - async fn test_list_threads_orders_by_updated_at(cx: &mut TestAppContext) { + async fn test_list_threads_orders_by_created_at(cx: &mut TestAppContext) { let database = ThreadsDatabase::new(cx.executor()).unwrap(); let older_id = session_id("thread-a"); @@ -630,11 +711,11 @@ mod tests { ); database - .save_thread(older_id.clone(), older_thread) + .save_thread(older_id.clone(), older_thread, PathList::default()) .await .unwrap(); database - .save_thread(newer_id.clone(), newer_thread) + .save_thread(newer_id.clone(), newer_thread, PathList::default()) .await .unwrap(); @@ -659,11 +740,11 @@ mod tests { ); database - .save_thread(thread_id.clone(), original_thread) + .save_thread(thread_id.clone(), original_thread, PathList::default()) .await .unwrap(); database - .save_thread(thread_id.clone(), updated_thread) + .save_thread(thread_id.clone(), updated_thread, PathList::default()) .await .unwrap(); @@ -675,6 +756,10 @@ mod tests { entries[0].updated_at, Utc.with_ymd_and_hms(2024, 1, 2, 0, 0, 0).unwrap() ); + assert!( + entries[0].created_at.is_some(), + "created_at should be populated" + ); } #[test] @@ -693,6 +778,22 @@ mod tests { ); } + #[test] + fn test_draft_prompt_defaults_to_none() { + let json = r#"{ + "title": "Old Thread", + "messages": [], + "updated_at": "2024-01-01T00:00:00Z" + }"#; + + let db_thread: DbThread = serde_json::from_str(json).expect("Failed to deserialize"); + + assert!( + db_thread.draft_prompt.is_none(), + "Legacy threads without draft_prompt field should default to None" + ); + } + #[gpui::test] async fn test_subagent_context_roundtrips_through_save_load(cx: &mut TestAppContext) { let database = ThreadsDatabase::new(cx.executor()).unwrap(); @@ -710,7 +811,7 @@ mod tests { }); database - .save_thread(child_id.clone(), child_thread) + .save_thread(child_id.clone(), child_thread, PathList::default()) .await .unwrap(); @@ -738,7 +839,7 @@ mod tests { ); database - .save_thread(thread_id.clone(), thread) + .save_thread(thread_id.clone(), thread, PathList::default()) .await .unwrap(); @@ -755,92 +856,96 @@ mod tests { } #[gpui::test] - async fn test_git_worktree_info_roundtrip(cx: &mut TestAppContext) { + async fn test_folder_paths_roundtrip(cx: &mut TestAppContext) { let database = ThreadsDatabase::new(cx.executor()).unwrap(); - let thread_id = session_id("worktree-thread"); - let mut thread = make_thread( - "Worktree Thread", + let thread_id = session_id("folder-thread"); + let thread = make_thread( + "Folder Thread", Utc.with_ymd_and_hms(2024, 6, 15, 12, 0, 0).unwrap(), ); - thread.git_worktree_info = Some(AgentGitWorktreeInfo { - branch: "zed/agent/a4Xiu".to_string(), - worktree_path: std::path::PathBuf::from("/repo/worktrees/zed/agent/a4Xiu"), - base_ref: "main".to_string(), - }); + + let folder_paths = PathList::new(&[ + std::path::PathBuf::from("/home/user/project-a"), + std::path::PathBuf::from("/home/user/project-b"), + ]); database - .save_thread(thread_id.clone(), thread) + .save_thread(thread_id.clone(), thread, folder_paths.clone()) .await .unwrap(); - let loaded = database - .load_thread(thread_id) - .await - .unwrap() - .expect("thread should exist"); - - let info = loaded - .git_worktree_info - .expect("git_worktree_info should be restored"); - assert_eq!(info.branch, "zed/agent/a4Xiu"); - assert_eq!( - info.worktree_path, - std::path::PathBuf::from("/repo/worktrees/zed/agent/a4Xiu") - ); - assert_eq!(info.base_ref, "main"); + let threads = database.list_threads().await.unwrap(); + assert_eq!(threads.len(), 1); + assert_eq!(threads[0].folder_paths, folder_paths); } #[gpui::test] - async fn test_session_list_includes_worktree_meta(cx: &mut TestAppContext) { + async fn test_folder_paths_empty_when_not_set(cx: &mut TestAppContext) { let database = ThreadsDatabase::new(cx.executor()).unwrap(); - // Save a thread with worktree info - let worktree_id = session_id("wt-thread"); - let mut worktree_thread = make_thread( - "With Worktree", + let thread_id = session_id("no-folder-thread"); + let thread = make_thread( + "No Folder Thread", Utc.with_ymd_and_hms(2024, 6, 15, 12, 0, 0).unwrap(), ); - worktree_thread.git_worktree_info = Some(AgentGitWorktreeInfo { - branch: "zed/agent/bR9kz".to_string(), - worktree_path: std::path::PathBuf::from("/repo/worktrees/zed/agent/bR9kz"), - base_ref: "develop".to_string(), - }); database - .save_thread(worktree_id.clone(), worktree_thread) + .save_thread(thread_id.clone(), thread, PathList::default()) .await .unwrap(); - // Save a thread without worktree info - let plain_id = session_id("plain-thread"); - let plain_thread = make_thread( - "Without Worktree", - Utc.with_ymd_and_hms(2024, 6, 15, 11, 0, 0).unwrap(), + let threads = database.list_threads().await.unwrap(); + assert_eq!(threads.len(), 1); + assert!(threads[0].folder_paths.is_empty()); + } + + #[test] + fn test_scroll_position_defaults_to_none() { + let json = r#"{ + "title": "Old Thread", + "messages": [], + "updated_at": "2024-01-01T00:00:00Z" + }"#; + + let db_thread: DbThread = serde_json::from_str(json).expect("Failed to deserialize"); + + assert!( + db_thread.ui_scroll_position.is_none(), + "Legacy threads without scroll_position field should default to None" + ); + } + + #[gpui::test] + async fn test_scroll_position_roundtrips_through_save_load(cx: &mut TestAppContext) { + let database = ThreadsDatabase::new(cx.executor()).unwrap(); + + let thread_id = session_id("thread-with-scroll"); + + let mut thread = make_thread( + "Thread With Scroll", + Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap(), ); + thread.ui_scroll_position = Some(SerializedScrollPosition { + item_ix: 42, + offset_in_item: 13.5, + }); database - .save_thread(plain_id.clone(), plain_thread) + .save_thread(thread_id.clone(), thread, PathList::default()) .await .unwrap(); - // List threads and verify worktree_branch is populated correctly - let threads = database.list_threads().await.unwrap(); - assert_eq!(threads.len(), 2); - - let wt_entry = threads - .iter() - .find(|t| t.id == worktree_id) - .expect("should find worktree thread"); - assert_eq!(wt_entry.worktree_branch.as_deref(), Some("zed/agent/bR9kz")); - - let plain_entry = threads - .iter() - .find(|t| t.id == plain_id) - .expect("should find plain thread"); - assert!( - plain_entry.worktree_branch.is_none(), - "plain thread should have no worktree_branch" - ); + let loaded = database + .load_thread(thread_id) + .await + .unwrap() + .expect("thread should exist"); + + let scroll = loaded + .ui_scroll_position + .expect("scroll_position should be restored"); + assert_eq!(scroll.item_ix, 42); + assert!((scroll.offset_in_item - 13.5).abs() < f32::EPSILON); } } diff --git a/crates/agent/src/edit_agent.rs b/crates/agent/src/edit_agent.rs index 3e67cba1b63f4136a03b88c3007aee99489a6e80..e122d6b2884a593daa819457835d3d00690f5a7d 100644 --- a/crates/agent/src/edit_agent.rs +++ b/crates/agent/src/edit_agent.rs @@ -2,6 +2,7 @@ mod create_file_parser; mod edit_parser; #[cfg(test)] mod evals; +pub mod reindent; pub mod streaming_fuzzy_matcher; use crate::{Template, Templates}; @@ -24,9 +25,10 @@ use language_model::{ LanguageModelToolChoice, MessageContent, Role, }; use project::{AgentLocation, Project}; +use reindent::{IndentDelta, Reindenter}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use std::{cmp, iter, mem, ops::Range, pin::Pin, sync::Arc, task::Poll}; +use std::{mem, ops::Range, pin::Pin, sync::Arc, task::Poll}; use streaming_diff::{CharOperation, StreamingDiff}; use streaming_fuzzy_matcher::StreamingFuzzyMatcher; @@ -82,6 +84,7 @@ pub struct EditAgent { templates: Arc, edit_format: EditFormat, thinking_allowed: bool, + update_agent_location: bool, } impl EditAgent { @@ -92,6 +95,7 @@ impl EditAgent { templates: Arc, edit_format: EditFormat, allow_thinking: bool, + update_agent_location: bool, ) -> Self { EditAgent { model, @@ -100,6 +104,7 @@ impl EditAgent { templates, edit_format, thinking_allowed: allow_thinking, + update_agent_location, } } @@ -166,56 +171,73 @@ impl EditAgent { output_events_tx: mpsc::UnboundedSender, cx: &mut AsyncApp, ) -> Result<()> { - cx.update(|cx| { - buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); - self.action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::max_for_buffer(buffer.read(cx).remote_id()), - }), - cx, - ) - }); + let buffer_id = cx.update(|cx| { + let buffer_id = buffer.read(cx).remote_id(); + if self.update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: language::Anchor::min_for_buffer(buffer_id), + }), + cx, + ) + }); + } + buffer_id + }); + + let send_edit_event = || { output_events_tx .unbounded_send(EditAgentOutputEvent::Edited( - Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id()), + Anchor::min_max_range_for_buffer(buffer_id), )) - .ok(); - }); - + .ok() + }; + let set_agent_location = |cx: &mut _| { + if self.update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: language::Anchor::max_for_buffer(buffer_id), + }), + cx, + ) + }) + } + }; + let mut first_chunk = true; while let Some(event) = parse_rx.next().await { match event? { CreateFileParserEvent::NewTextChunk { chunk } => { - let buffer_id = cx.update(|cx| { - buffer.update(cx, |buffer, cx| buffer.append(chunk, cx)); + cx.update(|cx| { + buffer.update(cx, |buffer, cx| { + if mem::take(&mut first_chunk) { + buffer.set_text(chunk, cx) + } else { + buffer.append(chunk, cx) + } + }); self.action_log .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::max_for_buffer( - buffer.read(cx).remote_id(), - ), - }), - cx, - ) - }); - buffer.read(cx).remote_id() + set_agent_location(cx); }); - output_events_tx - .unbounded_send(EditAgentOutputEvent::Edited( - Anchor::min_max_range_for_buffer(buffer_id), - )) - .ok(); + send_edit_event(); } } } + if first_chunk { + cx.update(|cx| { + buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); + self.action_log + .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + set_agent_location(cx); + }); + send_edit_event(); + } + Ok(()) } @@ -287,15 +309,17 @@ impl EditAgent { if let Some(old_range) = old_range { let old_range = snapshot.anchor_before(old_range.start) ..snapshot.anchor_before(old_range.end); - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: old_range.end, - }), - cx, - ); - }); + if self.update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: old_range.end, + }), + cx, + ); + }); + } output_events .unbounded_send(EditAgentOutputEvent::ResolvingEditRange(old_range)) .ok(); @@ -368,15 +392,17 @@ impl EditAgent { }); self.action_log .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: max_edit_end, - }), - cx, - ); - }); + if self.update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: max_edit_end, + }), + cx, + ); + }); + } (min_edit_start, max_edit_end) }); output_events @@ -540,15 +566,8 @@ impl EditAgent { let compute_edits = cx.background_spawn(async move { let buffer_start_indent = snapshot .line_indent_for_row(snapshot.offset_to_point(resolved_old_text.range.start).row); - let indent_delta = if buffer_start_indent.tabs > 0 { - IndentDelta::Tabs( - buffer_start_indent.tabs as isize - resolved_old_text.indent.tabs as isize, - ) - } else { - IndentDelta::Spaces( - buffer_start_indent.spaces as isize - resolved_old_text.indent.spaces as isize, - ) - }; + let indent_delta = + reindent::compute_indent_delta(buffer_start_indent, resolved_old_text.indent); let old_text = snapshot .text_for_range(resolved_old_text.range.clone()) @@ -595,8 +614,7 @@ impl EditAgent { delta: IndentDelta, mut stream: impl Unpin + Stream>, ) -> impl Stream> { - let mut buffer = String::new(); - let mut in_leading_whitespace = true; + let mut reindenter = Reindenter::new(delta); let mut done = false; futures::stream::poll_fn(move |cx| { while !done { @@ -609,55 +627,10 @@ impl EditAgent { _ => return Poll::Ready(None), }; - buffer.push_str(&chunk); - - let mut indented_new_text = String::new(); - let mut start_ix = 0; - let mut newlines = buffer.match_indices('\n').peekable(); - loop { - let (line_end, is_pending_line) = match newlines.next() { - Some((ix, _)) => (ix, false), - None => (buffer.len(), true), - }; - let line = &buffer[start_ix..line_end]; - - if in_leading_whitespace { - if let Some(non_whitespace_ix) = line.find(|c| delta.character() != c) { - // We found a non-whitespace character, adjust - // indentation based on the delta. - let new_indent_len = - cmp::max(0, non_whitespace_ix as isize + delta.len()) as usize; - indented_new_text - .extend(iter::repeat(delta.character()).take(new_indent_len)); - indented_new_text.push_str(&line[non_whitespace_ix..]); - in_leading_whitespace = false; - } else if is_pending_line { - // We're still in leading whitespace and this line is incomplete. - // Stop processing until we receive more input. - break; - } else { - // This line is entirely whitespace. Push it without indentation. - indented_new_text.push_str(line); - } - } else { - indented_new_text.push_str(line); - } - - if is_pending_line { - start_ix = line_end; - break; - } else { - in_leading_whitespace = true; - indented_new_text.push('\n'); - start_ix = line_end + 1; - } - } - buffer.replace_range(..start_ix, ""); - + let mut indented_new_text = reindenter.push(&chunk); // This was the last chunk, push all the buffered content as-is. if is_last_chunk { - indented_new_text.push_str(&buffer); - buffer.clear(); + indented_new_text.push_str(&reindenter.finish()); done = true; } @@ -736,6 +709,7 @@ impl EditAgent { temperature: None, thinking_allowed: self.thinking_allowed, thinking_effort: None, + speed: None, }; Ok(self.model.stream_completion_text(request, cx).await?.stream) @@ -747,28 +721,6 @@ struct ResolvedOldText { indent: LineIndent, } -#[derive(Copy, Clone, Debug)] -enum IndentDelta { - Spaces(isize), - Tabs(isize), -} - -impl IndentDelta { - fn character(&self) -> char { - match self { - IndentDelta::Spaces(_) => ' ', - IndentDelta::Tabs(_) => '\t', - } - } - - fn len(&self) -> isize { - match self { - IndentDelta::Spaces(n) => *n, - IndentDelta::Tabs(n) => *n, - } - } -} - #[cfg(test)] mod tests { use super::*; @@ -1194,19 +1146,16 @@ mod tests { ); cx.run_until_parked(); - assert_matches!( - drain_events(&mut events).as_slice(), - [EditAgentOutputEvent::Edited(_)] - ); + assert_eq!(drain_events(&mut events).as_slice(), []); assert_eq!( buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "" + "abc\ndef\nghi" ); assert_eq!( project.read_with(cx, |project, _| project.agent_location()), Some(AgentLocation { buffer: buffer.downgrade(), - position: language::Anchor::max_for_buffer( + position: language::Anchor::min_for_buffer( cx.update(|cx| buffer.read(cx).remote_id()) ), }) @@ -1290,6 +1239,32 @@ mod tests { ); } + #[gpui::test] + async fn test_overwrite_no_content(cx: &mut TestAppContext) { + let agent = init_test(cx).await; + let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx)); + let (chunks_tx, chunks_rx) = mpsc::unbounded::<&str>(); + let (apply, mut events) = agent.overwrite_with_chunks( + buffer.clone(), + chunks_rx.map(|chunk| Ok(chunk.to_string())), + &mut cx.to_async(), + ); + + drop(chunks_tx); + cx.run_until_parked(); + + let result = apply.await; + assert!(result.is_ok(),); + assert_matches!( + drain_events(&mut events).as_slice(), + [EditAgentOutputEvent::Edited { .. }] + ); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), + "" + ); + } + #[gpui::test(iterations = 100)] async fn test_indent_new_text_chunks(mut rng: StdRng) { let chunks = to_random_chunks(&mut rng, " abc\n def\n ghi"); @@ -1426,6 +1401,7 @@ mod tests { Templates::new(), EditFormat::XmlTags, thinking_allowed, + true, ) } diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs index cdf6c1c0b3f6440e4827c8b74b47a32d997b092f..2e8818b101995b374cf8172547c45b55c27c6f26 100644 --- a/crates/agent/src/edit_agent/evals.rs +++ b/crates/agent/src/edit_agent/evals.rs @@ -1469,6 +1469,7 @@ impl EditAgentTest { Templates::new(), edit_format, true, + true, ), project, judge_model, diff --git a/crates/agent/src/edit_agent/reindent.rs b/crates/agent/src/edit_agent/reindent.rs new file mode 100644 index 0000000000000000000000000000000000000000..7f08749e475f6acfcf63013abd9139574112e4b5 --- /dev/null +++ b/crates/agent/src/edit_agent/reindent.rs @@ -0,0 +1,214 @@ +use language::LineIndent; +use std::{cmp, iter}; + +#[derive(Copy, Clone, Debug)] +pub enum IndentDelta { + Spaces(isize), + Tabs(isize), +} + +impl IndentDelta { + pub fn character(&self) -> char { + match self { + IndentDelta::Spaces(_) => ' ', + IndentDelta::Tabs(_) => '\t', + } + } + + pub fn len(&self) -> isize { + match self { + IndentDelta::Spaces(n) => *n, + IndentDelta::Tabs(n) => *n, + } + } +} + +pub fn compute_indent_delta(buffer_indent: LineIndent, query_indent: LineIndent) -> IndentDelta { + if buffer_indent.tabs > 0 { + IndentDelta::Tabs(buffer_indent.tabs as isize - query_indent.tabs as isize) + } else { + IndentDelta::Spaces(buffer_indent.spaces as isize - query_indent.spaces as isize) + } +} + +/// Synchronous re-indentation adapter. Buffers incomplete lines and applies +/// an `IndentDelta` to each line's leading whitespace before emitting it. +pub struct Reindenter { + delta: IndentDelta, + buffer: String, + in_leading_whitespace: bool, +} + +impl Reindenter { + pub fn new(delta: IndentDelta) -> Self { + Self { + delta, + buffer: String::new(), + in_leading_whitespace: true, + } + } + + /// Feed a chunk of text and return the re-indented portion that is + /// ready to emit. Incomplete trailing lines are buffered internally. + pub fn push(&mut self, chunk: &str) -> String { + self.buffer.push_str(chunk); + self.drain(false) + } + + /// Flush any remaining buffered content (call when the stream is done). + pub fn finish(&mut self) -> String { + self.drain(true) + } + + fn drain(&mut self, is_final: bool) -> String { + let mut indented = String::new(); + let mut start_ix = 0; + let mut newlines = self.buffer.match_indices('\n'); + loop { + let (line_end, is_pending_line) = match newlines.next() { + Some((ix, _)) => (ix, false), + None => (self.buffer.len(), true), + }; + let line = &self.buffer[start_ix..line_end]; + + if self.in_leading_whitespace { + if let Some(non_whitespace_ix) = line.find(|c| self.delta.character() != c) { + // We found a non-whitespace character, adjust indentation + // based on the delta. + let new_indent_len = + cmp::max(0, non_whitespace_ix as isize + self.delta.len()) as usize; + indented.extend(iter::repeat(self.delta.character()).take(new_indent_len)); + indented.push_str(&line[non_whitespace_ix..]); + self.in_leading_whitespace = false; + } else if is_pending_line && !is_final { + // We're still in leading whitespace and this line is incomplete. + // Stop processing until we receive more input. + break; + } else { + // This line is entirely whitespace. Push it without indentation. + indented.push_str(line); + } + } else { + indented.push_str(line); + } + + if is_pending_line { + start_ix = line_end; + break; + } else { + self.in_leading_whitespace = true; + indented.push('\n'); + start_ix = line_end + 1; + } + } + self.buffer.replace_range(..start_ix, ""); + if is_final { + indented.push_str(&self.buffer); + self.buffer.clear(); + } + indented + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_indent_single_chunk() { + let mut r = Reindenter::new(IndentDelta::Spaces(2)); + let out = r.push(" abc\n def\n ghi"); + // All three lines are emitted: "ghi" starts with spaces but + // contains non-whitespace, so it's processed immediately. + assert_eq!(out, " abc\n def\n ghi"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_outdent_tabs() { + let mut r = Reindenter::new(IndentDelta::Tabs(-2)); + let out = r.push("\t\t\t\tabc\n\t\tdef\n\t\t\t\t\t\tghi"); + assert_eq!(out, "\t\tabc\ndef\n\t\t\t\tghi"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_incremental_chunks() { + let mut r = Reindenter::new(IndentDelta::Spaces(2)); + // Feed " ab" — the `a` is non-whitespace, so the line is + // processed immediately even without a trailing newline. + let out = r.push(" ab"); + assert_eq!(out, " ab"); + // Feed "c\n" — appended to the already-processed line (no longer + // in leading whitespace). + let out = r.push("c\n"); + assert_eq!(out, "c\n"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_zero_delta() { + let mut r = Reindenter::new(IndentDelta::Spaces(0)); + let out = r.push(" hello\n world\n"); + assert_eq!(out, " hello\n world\n"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_clamp_negative_indent() { + let mut r = Reindenter::new(IndentDelta::Spaces(-10)); + let out = r.push(" abc\n"); + // max(0, 2 - 10) = 0, so no leading spaces. + assert_eq!(out, "abc\n"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_whitespace_only_lines() { + let mut r = Reindenter::new(IndentDelta::Spaces(2)); + let out = r.push(" \n code\n"); + // First line is all whitespace — emitted verbatim. Second line is indented. + assert_eq!(out, " \n code\n"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_compute_indent_delta_spaces() { + let buffer = LineIndent { + tabs: 0, + spaces: 8, + line_blank: false, + }; + let query = LineIndent { + tabs: 0, + spaces: 4, + line_blank: false, + }; + let delta = compute_indent_delta(buffer, query); + assert_eq!(delta.len(), 4); + assert_eq!(delta.character(), ' '); + } + + #[test] + fn test_compute_indent_delta_tabs() { + let buffer = LineIndent { + tabs: 2, + spaces: 0, + line_blank: false, + }; + let query = LineIndent { + tabs: 3, + spaces: 0, + line_blank: false, + }; + let delta = compute_indent_delta(buffer, query); + assert_eq!(delta.len(), -1); + assert_eq!(delta.character(), '\t'); + } +} diff --git a/crates/agent/src/native_agent_server.rs b/crates/agent/src/native_agent_server.rs index 4d8bdaf698cb6bc50f6080c9b029954242a56f14..18c41670ac4b4ba3146fb207992a7020a44fbd5f 100644 --- a/crates/agent/src/native_agent_server.rs +++ b/crates/agent/src/native_agent_server.rs @@ -1,4 +1,4 @@ -use std::{any::Any, path::Path, rc::Rc, sync::Arc}; +use std::{any::Any, rc::Rc, sync::Arc}; use agent_client_protocol as acp; use agent_servers::{AgentServer, AgentServerDelegate}; @@ -35,19 +35,10 @@ impl AgentServer for NativeAgentServer { fn connect( &self, - _root_dir: Option<&Path>, delegate: AgentServerDelegate, cx: &mut App, - ) -> Task< - Result<( - Rc, - Option, - )>, - > { - log::debug!( - "NativeAgentServer::connect called for path: {:?}", - _root_dir - ); + ) -> Task>> { + log::debug!("NativeAgentServer::connect"); let project = delegate.project().clone(); let fs = self.fs.clone(); let thread_store = self.thread_store.clone(); @@ -66,10 +57,7 @@ impl AgentServer for NativeAgentServer { let connection = NativeAgentConnection(agent); log::debug!("NativeAgentServer connection established successfully"); - Ok(( - Rc::new(connection) as Rc, - None, - )) + Ok(Rc::new(connection) as Rc) }) } diff --git a/crates/agent/src/tests/edit_file_thread_test.rs b/crates/agent/src/tests/edit_file_thread_test.rs index 069bf0349299e6f4952f673cbf7607e52d48d9c5..3beb5cb0d51abc55fbf3cf0849ced248a9d1fa5c 100644 --- a/crates/agent/src/tests/edit_file_thread_test.rs +++ b/crates/agent/src/tests/edit_file_thread_test.rs @@ -50,9 +50,9 @@ async fn test_edit_file_tool_in_thread_context(cx: &mut TestAppContext) { // Add just the tools we need for this test let language_registry = project.read(cx).languages().clone(); thread.add_tool(crate::ReadFileTool::new( - cx.weak_entity(), project.clone(), thread.action_log().clone(), + true, )); thread.add_tool(crate::EditFileTool::new( project.clone(), diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index 139242fdee9da968986b3fc9537bf9e5292b7dc5..0993b43a13ced62000692bf2b0b35d3ab7fb68e7 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -159,7 +159,7 @@ impl crate::TerminalHandle for FakeTerminalHandle { struct FakeSubagentHandle { session_id: acp::SessionId, - wait_for_summary_task: Shared>, + send_task: Shared>, } impl SubagentHandle for FakeSubagentHandle { @@ -167,8 +167,12 @@ impl SubagentHandle for FakeSubagentHandle { self.session_id.clone() } - fn wait_for_output(&self, cx: &AsyncApp) -> Task> { - let task = self.wait_for_summary_task.clone(); + fn num_entries(&self, _cx: &App) -> usize { + unimplemented!() + } + + fn send(&self, _message: String, cx: &AsyncApp) -> Task> { + let task = self.send_task.clone(); cx.background_spawn(async move { Ok(task.await) }) } } @@ -203,13 +207,7 @@ impl crate::ThreadEnvironment for FakeThreadEnvironment { Task::ready(Ok(handle as Rc)) } - fn create_subagent( - &self, - _parent_thread: Entity, - _label: String, - _initial_prompt: String, - _cx: &mut App, - ) -> Result> { + fn create_subagent(&self, _label: String, _cx: &mut App) -> Result> { Ok(self .subagent_handle .clone() @@ -248,13 +246,7 @@ impl crate::ThreadEnvironment for MultiTerminalEnvironment { Task::ready(Ok(handle as Rc)) } - fn create_subagent( - &self, - _parent_thread: Entity, - _label: String, - _initial_prompt: String, - _cx: &mut App, - ) -> Result> { + fn create_subagent(&self, _label: String, _cx: &mut App) -> Result> { unimplemented!() } } @@ -285,8 +277,17 @@ async fn test_echo(cx: &mut TestAppContext) { let events = events.collect().await; thread.update(cx, |thread, _cx| { - assert_eq!(thread.last_message().unwrap().role(), Role::Assistant); - assert_eq!(thread.last_message().unwrap().to_markdown(), "Hello\n") + assert_eq!( + thread.last_received_or_pending_message().unwrap().role(), + Role::Assistant + ); + assert_eq!( + thread + .last_received_or_pending_message() + .unwrap() + .to_markdown(), + "Hello\n" + ) }); assert_eq!(stop_events(events), vec![acp::StopReason::EndTurn]); } @@ -310,11 +311,11 @@ async fn test_terminal_tool_timeout_kills_handle(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::TerminalToolInput { + ToolInput::resolved(crate::TerminalToolInput { command: "sleep 1000".to_string(), cd: ".".to_string(), timeout_ms: Some(5), - }, + }), event_stream, cx, ) @@ -377,11 +378,11 @@ async fn test_terminal_tool_without_timeout_does_not_kill_handle(cx: &mut TestAp let _task = cx.update(|cx| { tool.run( - crate::TerminalToolInput { + ToolInput::resolved(crate::TerminalToolInput { command: "sleep 1000".to_string(), cd: ".".to_string(), timeout_ms: None, - }, + }), event_stream, cx, ) @@ -438,9 +439,15 @@ async fn test_thinking(cx: &mut TestAppContext) { let events = events.collect().await; thread.update(cx, |thread, _cx| { - assert_eq!(thread.last_message().unwrap().role(), Role::Assistant); assert_eq!( - thread.last_message().unwrap().to_markdown(), + thread.last_received_or_pending_message().unwrap().role(), + Role::Assistant + ); + assert_eq!( + thread + .last_received_or_pending_message() + .unwrap() + .to_markdown(), indoc! {" Think Hello @@ -718,7 +725,7 @@ async fn test_basic_tool_calls(cx: &mut TestAppContext) { thread.update(cx, |thread, _cx| { assert!( thread - .last_message() + .last_received_or_pending_message() .unwrap() .as_agent_message() .unwrap() @@ -755,7 +762,7 @@ async fn test_streaming_tool_calls(cx: &mut TestAppContext) { if let Ok(ThreadEvent::ToolCall(tool_call)) = event { thread.update(cx, |thread, _cx| { // Look for a tool use in the thread's last message - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); let last_content = agent_message.content.last().unwrap(); if let AgentMessageContent::ToolUse(last_tool_use) = last_content { @@ -1225,7 +1232,7 @@ async fn test_concurrent_tool_calls(cx: &mut TestAppContext) { assert_eq!(stop_reasons, vec![acp::StopReason::EndTurn]); thread.update(cx, |thread, _cx| { - let last_message = thread.last_message().unwrap(); + let last_message = thread.last_received_or_pending_message().unwrap(); let agent_message = last_message.as_agent_message().unwrap(); let text = agent_message .content @@ -1931,7 +1938,7 @@ async fn test_cancellation(cx: &mut TestAppContext) { .collect::>() .await; thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); assert_eq!( agent_message.content, @@ -2000,7 +2007,7 @@ async fn test_terminal_tool_cancellation_captures_output(cx: &mut TestAppContext // Verify the tool result contains the terminal output, not just "Tool canceled by user" thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); let tool_use = agent_message @@ -2156,7 +2163,7 @@ async fn verify_thread_recovery( let events = events.collect::>().await; thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); assert_eq!( agent_message.content, @@ -2465,7 +2472,7 @@ async fn test_terminal_tool_stopped_via_terminal_card_button(cx: &mut TestAppCon // Verify the tool result indicates user stopped thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); let tool_use = agent_message @@ -2560,7 +2567,7 @@ async fn test_terminal_tool_timeout_expires(cx: &mut TestAppContext) { // Verify the tool result indicates timeout, not user stopped thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); let tool_use = agent_message @@ -2624,6 +2631,84 @@ async fn test_in_progress_send_canceled_by_next_send(cx: &mut TestAppContext) { assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]); } +#[gpui::test] +async fn test_retry_cancelled_promptly_on_new_send(cx: &mut TestAppContext) { + // Regression test: when a completion fails with a retryable error (e.g. upstream 500), + // the retry loop waits on a timer. If the user switches models and sends a new message + // during that delay, the old turn should exit immediately instead of retrying with the + // stale model. + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let model_a = model.as_fake(); + + // Start a turn with model_a. + let events_1 = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Hello"], cx) + }) + .unwrap(); + cx.run_until_parked(); + assert_eq!(model_a.completion_count(), 1); + + // Model returns a retryable upstream 500. The turn enters the retry delay. + model_a.send_last_completion_stream_error( + LanguageModelCompletionError::UpstreamProviderError { + message: "Internal server error".to_string(), + status: http_client::StatusCode::INTERNAL_SERVER_ERROR, + retry_after: None, + }, + ); + model_a.end_last_completion_stream(); + cx.run_until_parked(); + + // The old completion was consumed; model_a has no pending requests yet because the + // retry timer hasn't fired. + assert_eq!(model_a.completion_count(), 0); + + // Switch to model_b and send a new message. This cancels the old turn. + let model_b = Arc::new(FakeLanguageModel::with_id_and_thinking( + "fake", "model-b", "Model B", false, + )); + thread.update(cx, |thread, cx| { + thread.set_model(model_b.clone(), cx); + }); + let events_2 = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Continue"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + // model_b should have received its completion request. + assert_eq!(model_b.as_fake().completion_count(), 1); + + // Advance the clock well past the retry delay (BASE_RETRY_DELAY = 5s). + cx.executor().advance_clock(Duration::from_secs(10)); + cx.run_until_parked(); + + // model_a must NOT have received another completion request — the cancelled turn + // should have exited during the retry delay rather than retrying with the old model. + assert_eq!( + model_a.completion_count(), + 0, + "old model should not receive a retry request after cancellation" + ); + + // Complete model_b's turn. + model_b + .as_fake() + .send_last_completion_stream_text_chunk("Done!"); + model_b + .as_fake() + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)); + model_b.as_fake().end_last_completion_stream(); + + let events_1 = events_1.collect::>().await; + assert_eq!(stop_events(events_1), vec![acp::StopReason::Cancelled]); + + let events_2 = events_2.collect::>().await; + assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]); +} + #[gpui::test] async fn test_subsequent_successful_sends_dont_cancel(cx: &mut TestAppContext) { let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; @@ -3456,7 +3541,7 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) { events.collect::>().await; thread.read_with(cx, |thread, _cx| { assert_eq!( - thread.last_message(), + thread.last_received_or_pending_message(), Some(Message::Agent(AgentMessage { content: vec![AgentMessageContent::Text("Done".into())], tool_results: IndexMap::default(), @@ -3520,6 +3605,113 @@ async fn test_send_max_retries_exceeded(cx: &mut TestAppContext) { )); } +#[gpui::test] +async fn test_streaming_tool_completes_when_llm_stream_ends_without_final_input( + cx: &mut TestAppContext, +) { + init_test(cx); + always_allow_tools(cx); + + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + thread.update(cx, |thread, _cx| { + thread.add_tool(StreamingEchoTool); + }); + + let _events = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Use the streaming_echo tool"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + // Send a partial tool use (is_input_complete = false), simulating the LLM + // streaming input for a tool. + let tool_use = LanguageModelToolUse { + id: "tool_1".into(), + name: "streaming_echo".into(), + raw_input: r#"{"text": "partial"}"#.into(), + input: json!({"text": "partial"}), + is_input_complete: false, + thought_signature: None, + }; + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone())); + cx.run_until_parked(); + + // Send a stream error WITHOUT ever sending is_input_complete = true. + // Before the fix, this would deadlock: the tool waits for more partials + // (or cancellation), run_turn_internal waits for the tool, and the sender + // keeping the channel open lives inside RunningTurn. + fake_model.send_last_completion_stream_error( + LanguageModelCompletionError::UpstreamProviderError { + message: "Internal server error".to_string(), + status: http_client::StatusCode::INTERNAL_SERVER_ERROR, + retry_after: None, + }, + ); + fake_model.end_last_completion_stream(); + + // Advance past the retry delay so run_turn_internal retries. + cx.executor().advance_clock(Duration::from_secs(5)); + cx.run_until_parked(); + + // The retry request should contain the streaming tool's error result, + // proving the tool terminated and its result was forwarded. + let completion = fake_model + .pending_completions() + .pop() + .expect("No running turn"); + assert_eq!( + completion.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec!["Use the streaming_echo tool".into()], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![language_model::MessageContent::ToolUse(tool_use.clone())], + cache: false, + reasoning_details: None, + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![language_model::MessageContent::ToolResult( + LanguageModelToolResult { + tool_use_id: tool_use.id.clone(), + tool_name: tool_use.name, + is_error: true, + content: "Failed to receive tool input: tool input was not fully received" + .into(), + output: Some( + "Failed to receive tool input: tool input was not fully received" + .into() + ), + } + )], + cache: true, + reasoning_details: None, + }, + ] + ); + + // Finish the retry round so the turn completes cleanly. + fake_model.send_last_completion_stream_text_chunk("Done"); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + thread.read_with(cx, |thread, _cx| { + assert!( + thread.is_turn_complete(), + "Thread should not be stuck; the turn should have completed", + ); + }); +} + /// Filters out the stop events for asserting against in tests fn stop_events(result_events: Vec>) -> Vec { result_events @@ -3575,6 +3767,7 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { ToolRequiringPermission::NAME: true, InfiniteTool::NAME: true, CancellationAwareTool::NAME: true, + StreamingEchoTool::NAME: true, (TerminalTool::NAME): true, } } @@ -3991,11 +4184,11 @@ async fn test_terminal_tool_permission_rules(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::TerminalToolInput { + ToolInput::resolved(crate::TerminalToolInput { command: "rm -rf /".to_string(), cd: ".".to_string(), timeout_ms: None, - }, + }), event_stream, cx, ) @@ -4043,11 +4236,11 @@ async fn test_terminal_tool_permission_rules(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::TerminalToolInput { + ToolInput::resolved(crate::TerminalToolInput { command: "echo hello".to_string(), cd: ".".to_string(), timeout_ms: None, - }, + }), event_stream, cx, ) @@ -4101,11 +4294,11 @@ async fn test_terminal_tool_permission_rules(cx: &mut TestAppContext) { let _task = cx.update(|cx| { tool.run( - crate::TerminalToolInput { + ToolInput::resolved(crate::TerminalToolInput { command: "sudo rm file".to_string(), cd: ".".to_string(), timeout_ms: None, - }, + }), event_stream, cx, ) @@ -4148,11 +4341,11 @@ async fn test_terminal_tool_permission_rules(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::TerminalToolInput { + ToolInput::resolved(crate::TerminalToolInput { command: "echo hello".to_string(), cd: ".".to_string(), timeout_ms: None, - }, + }), event_stream, cx, ) @@ -4306,6 +4499,160 @@ async fn test_subagent_tool_call_end_to_end(cx: &mut TestAppContext) { subagent task response + ## Assistant + + Response + + "#}, + ); +} + +#[gpui::test] +async fn test_subagent_tool_output_does_not_include_thinking(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + LanguageModelRegistry::test(cx); + }); + cx.update(|cx| { + cx.update_flags(true, vec!["subagents".to_string()]); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/", + json!({ + "a": { + "b.md": "Lorem" + } + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + let agent = NativeAgent::new( + project.clone(), + thread_store.clone(), + Templates::new(), + None, + fs.clone(), + &mut cx.to_async(), + ) + .await + .unwrap(); + let connection = Rc::new(NativeAgentConnection(agent.clone())); + + let acp_thread = cx + .update(|cx| { + connection + .clone() + .new_session(project.clone(), Path::new(""), cx) + }) + .await + .unwrap(); + let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone()); + let thread = agent.read_with(cx, |agent, _| { + agent.sessions.get(&session_id).unwrap().thread.clone() + }); + let model = Arc::new(FakeLanguageModel::default()); + + // Ensure empty threads are not saved, even if they get mutated. + thread.update(cx, |thread, cx| { + thread.set_model(model.clone(), cx); + }); + cx.run_until_parked(); + + let send = acp_thread.update(cx, |thread, cx| thread.send_raw("Prompt", cx)); + cx.run_until_parked(); + model.send_last_completion_stream_text_chunk("spawning subagent"); + let subagent_tool_input = SpawnAgentToolInput { + label: "label".to_string(), + message: "subagent task prompt".to_string(), + session_id: None, + }; + let subagent_tool_use = LanguageModelToolUse { + id: "subagent_1".into(), + name: SpawnAgentTool::NAME.into(), + raw_input: serde_json::to_string(&subagent_tool_input).unwrap(), + input: serde_json::to_value(&subagent_tool_input).unwrap(), + is_input_complete: true, + thought_signature: None, + }; + model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + subagent_tool_use, + )); + model.end_last_completion_stream(); + + cx.run_until_parked(); + + let subagent_session_id = thread.read_with(cx, |thread, cx| { + thread + .running_subagent_ids(cx) + .get(0) + .expect("subagent thread should be running") + .clone() + }); + + let subagent_thread = agent.read_with(cx, |agent, _cx| { + agent + .sessions + .get(&subagent_session_id) + .expect("subagent session should exist") + .acp_thread + .clone() + }); + + model.send_last_completion_stream_text_chunk("subagent task response 1"); + model.send_last_completion_stream_event(LanguageModelCompletionEvent::Thinking { + text: "thinking more about the subagent task".into(), + signature: None, + }); + model.send_last_completion_stream_text_chunk("subagent task response 2"); + model.end_last_completion_stream(); + + cx.run_until_parked(); + + assert_eq!( + subagent_thread.read_with(cx, |thread, cx| thread.to_markdown(cx)), + indoc! {" + ## User + + subagent task prompt + + ## Assistant + + subagent task response 1 + + + thinking more about the subagent task + + + subagent task response 2 + + "} + ); + + model.send_last_completion_stream_text_chunk("Response"); + model.end_last_completion_stream(); + + send.await.unwrap(); + + assert_eq!( + acp_thread.read_with(cx, |thread, cx| thread.to_markdown(cx)), + indoc! {r#" + ## User + + Prompt + + ## Assistant + + spawning subagent + + **Tool Call: label** + Status: Completed + + subagent task response 1 + + subagent task response 2 ## Assistant @@ -5309,11 +5656,11 @@ async fn test_edit_file_tool_deny_rule_blocks_edit(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::EditFileToolInput { + ToolInput::resolved(crate::EditFileToolInput { display_description: "Edit sensitive file".to_string(), path: "root/sensitive_config.txt".into(), mode: crate::EditFileMode::Edit, - }, + }), event_stream, cx, ) @@ -5359,9 +5706,9 @@ async fn test_delete_path_tool_deny_rule_blocks_deletion(cx: &mut TestAppContext let task = cx.update(|cx| { tool.run( - crate::DeletePathToolInput { + ToolInput::resolved(crate::DeletePathToolInput { path: "root/important_data.txt".to_string(), - }, + }), event_stream, cx, ) @@ -5411,10 +5758,10 @@ async fn test_move_path_tool_denies_if_destination_denied(cx: &mut TestAppContex let task = cx.update(|cx| { tool.run( - crate::MovePathToolInput { + ToolInput::resolved(crate::MovePathToolInput { source_path: "root/safe.txt".to_string(), destination_path: "root/protected/safe.txt".to_string(), - }, + }), event_stream, cx, ) @@ -5467,10 +5814,10 @@ async fn test_move_path_tool_denies_if_source_denied(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::MovePathToolInput { + ToolInput::resolved(crate::MovePathToolInput { source_path: "root/secret.txt".to_string(), destination_path: "root/public/not_secret.txt".to_string(), - }, + }), event_stream, cx, ) @@ -5525,10 +5872,10 @@ async fn test_copy_path_tool_deny_rule_blocks_copy(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::CopyPathToolInput { + ToolInput::resolved(crate::CopyPathToolInput { source_path: "root/confidential.txt".to_string(), destination_path: "root/dest/copy.txt".to_string(), - }, + }), event_stream, cx, ) @@ -5580,12 +5927,12 @@ async fn test_save_file_tool_denies_if_any_path_denied(cx: &mut TestAppContext) let task = cx.update(|cx| { tool.run( - crate::SaveFileToolInput { + ToolInput::resolved(crate::SaveFileToolInput { paths: vec![ std::path::PathBuf::from("root/normal.txt"), std::path::PathBuf::from("root/readonly/config.txt"), ], - }, + }), event_stream, cx, ) @@ -5632,9 +5979,9 @@ async fn test_save_file_tool_respects_deny_rules(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::SaveFileToolInput { + ToolInput::resolved(crate::SaveFileToolInput { paths: vec![std::path::PathBuf::from("root/config.secret")], - }, + }), event_stream, cx, ) @@ -5676,7 +6023,7 @@ async fn test_web_search_tool_deny_rule_blocks_search(cx: &mut TestAppContext) { let input: crate::WebSearchToolInput = serde_json::from_value(json!({"query": "internal.company.com secrets"})).unwrap(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let result = task.await; assert!(result.is_err(), "expected search to be blocked"); @@ -5741,11 +6088,11 @@ async fn test_edit_file_tool_allow_rule_skips_confirmation(cx: &mut TestAppConte let _task = cx.update(|cx| { tool.run( - crate::EditFileToolInput { + ToolInput::resolved(crate::EditFileToolInput { display_description: "Edit README".to_string(), path: "root/README.md".into(), mode: crate::EditFileMode::Edit, - }, + }), event_stream, cx, ) @@ -5811,11 +6158,11 @@ async fn test_edit_file_tool_allow_still_prompts_for_local_settings(cx: &mut Tes let (event_stream, mut rx) = crate::ToolCallEventStream::test(); let _task = cx.update(|cx| { tool.run( - crate::EditFileToolInput { + ToolInput::resolved(crate::EditFileToolInput { display_description: "Edit local settings".to_string(), path: "root/.zed/settings.json".into(), mode: crate::EditFileMode::Edit, - }, + }), event_stream, cx, ) @@ -5855,7 +6202,7 @@ async fn test_fetch_tool_deny_rule_blocks_url(cx: &mut TestAppContext) { let input: crate::FetchToolInput = serde_json::from_value(json!({"url": "https://internal.company.com/api"})).unwrap(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let result = task.await; assert!(result.is_err(), "expected fetch to be blocked"); @@ -5893,7 +6240,7 @@ async fn test_fetch_tool_allow_rule_skips_confirmation(cx: &mut TestAppContext) let input: crate::FetchToolInput = serde_json::from_value(json!({"url": "https://docs.rs/some-crate"})).unwrap(); - let _task = cx.update(|cx| tool.run(input, event_stream, cx)); + let _task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); cx.run_until_parked(); diff --git a/crates/agent/src/tests/test_tools.rs b/crates/agent/src/tests/test_tools.rs index 0ed2eef90271538c575cc84b56a28df106e4bd41..ac179c590a93824813afa338d9deed16b4d00ebd 100644 --- a/crates/agent/src/tests/test_tools.rs +++ b/crates/agent/src/tests/test_tools.rs @@ -3,6 +3,57 @@ use agent_settings::AgentSettings; use gpui::{App, SharedString, Task}; use std::future; use std::sync::atomic::{AtomicBool, Ordering}; +use std::time::Duration; + +/// A streaming tool that echoes its input, used to test streaming tool +/// lifecycle (e.g. partial delivery and cleanup when the LLM stream ends +/// before `is_input_complete`). +#[derive(JsonSchema, Serialize, Deserialize)] +pub struct StreamingEchoToolInput { + /// The text to echo. + pub text: String, +} + +pub struct StreamingEchoTool; + +impl AgentTool for StreamingEchoTool { + type Input = StreamingEchoToolInput; + type Output = String; + + const NAME: &'static str = "streaming_echo"; + + fn supports_input_streaming() -> bool { + true + } + + fn kind() -> acp::ToolKind { + acp::ToolKind::Other + } + + fn initial_title( + &self, + _input: Result, + _cx: &mut App, + ) -> SharedString { + "Streaming Echo".into() + } + + fn run( + self: Arc, + mut input: ToolInput, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + cx.spawn(async move |_cx| { + while input.recv_partial().await.is_some() {} + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + Ok(input.text) + }) + } +} /// A tool that echoes its input #[derive(JsonSchema, Serialize, Deserialize)] @@ -33,11 +84,17 @@ impl AgentTool for EchoTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, _event_stream: ToolCallEventStream, - _cx: &mut App, + cx: &mut App, ) -> Task> { - Task::ready(Ok(input.text)) + cx.spawn(async move |_cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + Ok(input.text) + }) } } @@ -74,7 +131,7 @@ impl AgentTool for DelayTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, _event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> @@ -83,6 +140,10 @@ impl AgentTool for DelayTool { { let executor = cx.background_executor().clone(); cx.foreground_executor().spawn(async move { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; executor.timer(Duration::from_millis(input.ms)).await; Ok("Ding".to_string()) }) @@ -114,28 +175,38 @@ impl AgentTool for ToolRequiringPermission { fn run( self: Arc, - _input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx); - let decision = decide_permission_from_settings(Self::NAME, &[String::new()], settings); - - let authorize = match decision { - ToolPermissionDecision::Allow => None, - ToolPermissionDecision::Deny(reason) => { - return Task::ready(Err(reason)); - } - ToolPermissionDecision::Confirm => { - let context = crate::ToolPermissionContext::new( - "tool_requiring_permission", - vec![String::new()], - ); - Some(event_stream.authorize("Authorize?", context, cx)) - } - }; + cx.spawn(async move |cx| { + let _input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + let decision = cx.update(|cx| { + decide_permission_from_settings( + Self::NAME, + &[String::new()], + AgentSettings::get_global(cx), + ) + }); + + let authorize = match decision { + ToolPermissionDecision::Allow => None, + ToolPermissionDecision::Deny(reason) => { + return Err(reason); + } + ToolPermissionDecision::Confirm => Some(cx.update(|cx| { + let context = crate::ToolPermissionContext::new( + "tool_requiring_permission", + vec![String::new()], + ); + event_stream.authorize("Authorize?", context, cx) + })), + }; - cx.foreground_executor().spawn(async move { if let Some(authorize) = authorize { authorize.await.map_err(|e| e.to_string())?; } @@ -169,11 +240,15 @@ impl AgentTool for InfiniteTool { fn run( self: Arc, - _input: Self::Input, + input: ToolInput, _event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { cx.foreground_executor().spawn(async move { + let _input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; future::pending::<()>().await; unreachable!() }) @@ -221,11 +296,15 @@ impl AgentTool for CancellationAwareTool { fn run( self: Arc, - _input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { cx.foreground_executor().spawn(async move { + let _input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; // Wait for cancellation - this tool does nothing but wait to be cancelled event_stream.cancelled_by_user().await; self.was_cancelled.store(true, Ordering::SeqCst); @@ -276,10 +355,16 @@ impl AgentTool for WordListTool { fn run( self: Arc, - _input: Self::Input, + input: ToolInput, _event_stream: ToolCallEventStream, - _cx: &mut App, + cx: &mut App, ) -> Task> { - Task::ready(Ok("ok".to_string())) + cx.spawn(async move |_cx| { + let _input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + Ok("ok".to_string()) + }) } } diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 5d4de36cb69335de7a77eb7ad7a15f75b8e2b0b7..73102929ac58caaf96b06e6ab74ded698cbe86e3 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -1,16 +1,14 @@ use crate::{ - AgentGitWorktreeInfo, ContextServerRegistry, CopyPathTool, CreateDirectoryTool, - DbLanguageModel, DbThread, DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, - FindPathTool, GrepTool, ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, - ReadFileTool, RestoreFileFromDiskTool, SaveFileTool, SpawnAgentTool, StreamingEditFileTool, + ContextServerRegistry, CopyPathTool, CreateDirectoryTool, DbLanguageModel, DbThread, + DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, FindPathTool, GrepTool, + ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, ReadFileTool, + RestoreFileFromDiskTool, SaveFileTool, SpawnAgentTool, StreamingEditFileTool, SystemPromptTemplate, Template, Templates, TerminalTool, ToolPermissionDecision, WebSearchTool, decide_permission_from_settings, }; use acp_thread::{MentionUri, UserMessageId}; use action_log::ActionLog; -use feature_flags::{ - FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag, SubagentsFeatureFlag, -}; +use feature_flags::{FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag}; use agent_client_protocol as acp; use agent_settings::{ @@ -40,16 +38,19 @@ use language_model::{ LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse, - LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID, + LanguageModelToolUseId, Role, SelectedModel, Speed, StopReason, TokenUsage, + ZED_CLOUD_PROVIDER_ID, }; use project::Project; use prompt_store::ProjectContext; use schemars::{JsonSchema, Schema}; +use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use settings::{LanguageModelSelection, Settings, ToolPermissionMode, update_settings_file}; use smol::stream::StreamExt; use std::{ collections::BTreeMap, + marker::PhantomData, ops::RangeInclusive, path::Path, rc::Rc, @@ -602,8 +603,13 @@ pub trait TerminalHandle { } pub trait SubagentHandle { + /// The session ID of this subagent thread fn id(&self) -> acp::SessionId; - fn wait_for_output(&self, cx: &AsyncApp) -> Task>; + /// The current number of entries in the thread. + /// Useful for knowing where the next turn will begin + fn num_entries(&self, cx: &App) -> usize; + /// Runs a turn for a given message and returns both the response and the index of that output message. + fn send(&self, message: String, cx: &AsyncApp) -> Task>; } pub trait ThreadEnvironment { @@ -615,19 +621,11 @@ pub trait ThreadEnvironment { cx: &mut AsyncApp, ) -> Task>>; - fn create_subagent( - &self, - parent_thread: Entity, - label: String, - initial_prompt: String, - cx: &mut App, - ) -> Result>; + fn create_subagent(&self, label: String, cx: &mut App) -> Result>; fn resume_subagent( &self, - _parent_thread: Entity, _session_id: acp::SessionId, - _follow_up_prompt: String, _cx: &mut App, ) -> Result> { Err(anyhow::anyhow!( @@ -890,20 +888,20 @@ pub struct Thread { summarization_model: Option>, thinking_enabled: bool, thinking_effort: Option, + speed: Option, prompt_capabilities_tx: watch::Sender, pub(crate) prompt_capabilities_rx: watch::Receiver, pub(crate) project: Entity, pub(crate) action_log: Entity, - /// Tracks the last time files were read by the agent, to detect external modifications - pub(crate) file_read_times: HashMap, /// True if this thread was imported from a shared thread and can be synced. imported: bool, /// If this is a subagent thread, contains context about the parent subagent_context: Option, + /// The user's unsent prompt text, persisted so it can be restored when reloading the thread. + draft_prompt: Option>, + ui_scroll_position: Option, /// Weak references to running subagent threads for cancellation propagation running_subagents: Vec>, - /// Git worktree info if this thread is running in an agent worktree. - git_worktree_info: Option, } impl Thread { @@ -920,12 +918,16 @@ impl Thread { let context_server_registry = parent_thread.read(cx).context_server_registry.clone(); let templates = parent_thread.read(cx).templates.clone(); let model = parent_thread.read(cx).model().cloned(); - let mut thread = Self::new( + let parent_action_log = parent_thread.read(cx).action_log().clone(); + let action_log = + cx.new(|_cx| ActionLog::new(project.clone()).with_linked_action_log(parent_action_log)); + let mut thread = Self::new_internal( project, project_context, context_server_registry, templates, model, + action_log, cx, ); thread.subagent_context = Some(SubagentContext { @@ -942,6 +944,26 @@ impl Thread { templates: Arc, model: Option>, cx: &mut Context, + ) -> Self { + Self::new_internal( + project.clone(), + project_context, + context_server_registry, + templates, + model, + cx.new(|_cx| ActionLog::new(project)), + cx, + ) + } + + fn new_internal( + project: Entity, + project_context: Entity, + context_server_registry: Entity, + templates: Arc, + model: Option>, + action_log: Entity, + cx: &mut Context, ) -> Self { let settings = AgentSettings::get_global(cx); let profile_id = settings.default_profile.clone(); @@ -953,7 +975,6 @@ impl Thread { .default_model .as_ref() .and_then(|model| model.effort.clone()); - let action_log = cx.new(|_cx| ActionLog::new(project.clone())); let (prompt_capabilities_tx, prompt_capabilities_rx) = watch::channel(Self::prompt_capabilities(model.as_deref())); Self { @@ -985,16 +1006,17 @@ impl Thread { model, summarization_model: None, thinking_enabled: enable_thinking, + speed: None, thinking_effort, prompt_capabilities_tx, prompt_capabilities_rx, project, action_log, - file_read_times: HashMap::default(), imported: false, subagent_context: None, + draft_prompt: None, + ui_scroll_position: None, running_subagents: Vec::new(), - git_worktree_info: None, } } @@ -1143,10 +1165,6 @@ impl Thread { let profile_id = db_thread .profile .unwrap_or_else(|| settings.default_profile.clone()); - let thinking_effort = settings - .default_model - .as_ref() - .and_then(|model| model.effort.clone()); let mut model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| { db_thread @@ -1175,12 +1193,6 @@ impl Thread { watch::channel(Self::prompt_capabilities(model.as_deref())); let action_log = cx.new(|_| ActionLog::new(project.clone())); - // TODO: We should serialize the user's configured thinking parameter on `DbThread` - // rather than deriving it from the model's capability. A user may have explicitly - // toggled thinking off for a model that supports it, and we'd lose that preference here. - let enable_thinking = model - .as_deref() - .is_some_and(|model| model.supports_thinking()); Self { id, @@ -1208,18 +1220,22 @@ impl Thread { templates, model, summarization_model: None, - thinking_enabled: enable_thinking, - thinking_effort, + thinking_enabled: db_thread.thinking_enabled, + thinking_effort: db_thread.thinking_effort, + speed: db_thread.speed, project, action_log, updated_at: db_thread.updated_at, prompt_capabilities_tx, prompt_capabilities_rx, - file_read_times: HashMap::default(), imported: db_thread.imported, subagent_context: db_thread.subagent_context, + draft_prompt: db_thread.draft_prompt, + ui_scroll_position: db_thread.ui_scroll_position.map(|sp| gpui::ListOffset { + item_ix: sp.item_ix, + offset_in_item: gpui::px(sp.offset_in_item), + }), running_subagents: Vec::new(), - git_worktree_info: db_thread.git_worktree_info, } } @@ -1240,7 +1256,16 @@ impl Thread { profile: Some(self.profile_id.clone()), imported: self.imported, subagent_context: self.subagent_context.clone(), - git_worktree_info: self.git_worktree_info.clone(), + speed: self.speed, + thinking_enabled: self.thinking_enabled, + thinking_effort: self.thinking_effort.clone(), + draft_prompt: self.draft_prompt.clone(), + ui_scroll_position: self.ui_scroll_position.map(|lo| { + crate::db::SerializedScrollPosition { + item_ix: lo.item_ix, + offset_in_item: lo.offset_in_item.as_f32(), + } + }), }; cx.background_spawn(async move { @@ -1282,19 +1307,42 @@ impl Thread { self.messages.is_empty() && self.title.is_none() } + pub fn draft_prompt(&self) -> Option<&[acp::ContentBlock]> { + self.draft_prompt.as_deref() + } + + pub fn set_draft_prompt(&mut self, prompt: Option>) { + self.draft_prompt = prompt; + } + + pub fn ui_scroll_position(&self) -> Option { + self.ui_scroll_position + } + + pub fn set_ui_scroll_position(&mut self, position: Option) { + self.ui_scroll_position = position; + } + pub fn model(&self) -> Option<&Arc> { self.model.as_ref() } pub fn set_model(&mut self, model: Arc, cx: &mut Context) { let old_usage = self.latest_token_usage(); - self.model = Some(model); + self.model = Some(model.clone()); let new_caps = Self::prompt_capabilities(self.model.as_deref()); let new_usage = self.latest_token_usage(); if old_usage != new_usage { cx.emit(TokenUsageUpdated(new_usage)); } self.prompt_capabilities_tx.send(new_caps).log_err(); + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| thread.set_model(model.clone(), cx)) + .ok(); + } + cx.notify() } @@ -1307,7 +1355,15 @@ impl Thread { model: Option>, cx: &mut Context, ) { - self.summarization_model = model; + self.summarization_model = model.clone(); + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| { + thread.set_summarization_model(model.clone(), cx) + }) + .ok(); + } cx.notify() } @@ -1317,6 +1373,12 @@ impl Thread { pub fn set_thinking_enabled(&mut self, enabled: bool, cx: &mut Context) { self.thinking_enabled = enabled; + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| thread.set_thinking_enabled(enabled, cx)) + .ok(); + } cx.notify(); } @@ -1325,11 +1387,39 @@ impl Thread { } pub fn set_thinking_effort(&mut self, effort: Option, cx: &mut Context) { - self.thinking_effort = effort; + self.thinking_effort = effort.clone(); + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| { + thread.set_thinking_effort(effort.clone(), cx) + }) + .ok(); + } cx.notify(); } - pub fn last_message(&self) -> Option { + pub fn speed(&self) -> Option { + self.speed + } + + pub fn set_speed(&mut self, speed: Speed, cx: &mut Context) { + self.speed = Some(speed); + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| thread.set_speed(speed, cx)) + .ok(); + } + cx.notify(); + } + + pub fn last_message(&self) -> Option<&Message> { + self.messages.last() + } + + #[cfg(any(test, feature = "test-support"))] + pub fn last_received_or_pending_message(&self) -> Option { if let Some(message) = self.pending_message.clone() { Some(Message::Agent(message)) } else { @@ -1342,6 +1432,9 @@ impl Thread { environment: Rc, cx: &mut Context, ) { + // Only update the agent location for the root thread, not for subagents. + let update_agent_location = self.parent_thread_id().is_none(); + let language_registry = self.project.read(cx).languages().clone(); self.add_tool(CopyPathTool::new(self.project.clone())); self.add_tool(CreateDirectoryTool::new(self.project.clone())); @@ -1359,8 +1452,8 @@ impl Thread { self.add_tool(StreamingEditFileTool::new( self.project.clone(), cx.weak_entity(), + self.action_log.clone(), language_registry, - Templates::new(), )); self.add_tool(FetchTool::new(self.project.read(cx).client().http_client())); self.add_tool(FindPathTool::new(self.project.clone())); @@ -1370,17 +1463,17 @@ impl Thread { self.add_tool(NowTool); self.add_tool(OpenTool::new(self.project.clone())); self.add_tool(ReadFileTool::new( - cx.weak_entity(), self.project.clone(), self.action_log.clone(), + update_agent_location, )); self.add_tool(SaveFileTool::new(self.project.clone())); self.add_tool(RestoreFileFromDiskTool::new(self.project.clone())); self.add_tool(TerminalTool::new(self.project.clone(), environment.clone())); self.add_tool(WebSearchTool); - if cx.has_flag::() && self.depth() < MAX_SUBAGENT_DEPTH { - self.add_tool(SpawnAgentTool::new(cx.weak_entity(), environment)); + if self.depth() < MAX_SUBAGENT_DEPTH { + self.add_tool(SpawnAgentTool::new(environment)); } } @@ -1393,6 +1486,7 @@ impl Thread { self.tools.insert(T::NAME.into(), tool.erase()); } + #[cfg(any(test, feature = "test-support"))] pub fn remove_tool(&mut self, name: &str) -> bool { self.tools.remove(name).is_some() } @@ -1406,12 +1500,18 @@ impl Thread { return; } - self.profile_id = profile_id; + self.profile_id = profile_id.clone(); // Swap to the profile's preferred model when available. if let Some(model) = Self::resolve_profile_model(&self.profile_id, cx) { self.set_model(model, cx); } + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| thread.set_profile(profile_id.clone(), cx)) + .ok(); + } } pub fn cancel(&mut self, cx: &mut Context) -> Task<()> { @@ -1664,6 +1764,7 @@ impl Thread { event_stream: event_stream.clone(), tools: self.enabled_tools(profile, &model, cx), cancellation_tx, + streaming_tool_inputs: HashMap::default(), _task: cx.spawn(async move |this, cx| { log::debug!("Starting agent turn execution"); @@ -1730,6 +1831,9 @@ impl Thread { telemetry::event!( "Agent Thread Completion", thread_id = this.read_with(cx, |this, _| this.id.to_string())?, + parent_thread_id = this.read_with(cx, |this, _| this + .parent_thread_id() + .map(|id| id.to_string()))?, prompt_id = this.read_with(cx, |this, _| this.prompt_id.to_string())?, model = model.telemetry_id(), model_provider = model.provider_id().to_string(), @@ -1814,6 +1918,19 @@ impl Thread { // that need their own permits. drop(events); + // Drop streaming tool input senders that never received their final input. + // This prevents deadlock when the LLM stream ends (e.g. because of an error) + // before sending a tool use with `is_input_complete: true`. + this.update(cx, |this, _cx| { + if let Some(running_turn) = this.running_turn.as_mut() { + if running_turn.streaming_tool_inputs.is_empty() { + return; + } + log::warn!("Dropping partial tool inputs because the stream ended"); + running_turn.streaming_tool_inputs.drain(); + } + })?; + let end_turn = tool_results.is_empty(); while let Some(tool_result) = tool_results.next().await { log::debug!("Tool finished {:?}", tool_result); @@ -1856,7 +1973,15 @@ impl Thread { })??; let timer = cx.background_executor().timer(retry.duration); event_stream.send_retry(retry); - timer.await; + futures::select! { + _ = timer.fuse() => {} + _ = cancellation_rx.changed().fuse() => { + if *cancellation_rx.borrow() { + log::debug!("Turn cancelled during retry delay, exiting"); + return Ok(()); + } + } + } this.update(cx, |this, _cx| { if let Some(Message::Agent(message)) = this.messages.last() { if message.tool_results.is_empty() { @@ -1988,6 +2113,7 @@ impl Thread { telemetry::event!( "Agent Thread Completion Usage Updated", thread_id = self.id.to_string(), + parent_thread_id = self.parent_thread_id().map(|id| id.to_string()), prompt_id = self.prompt_id.to_string(), model = self.model.as_ref().map(|m| m.telemetry_id()), model_provider = self.model.as_ref().map(|m| m.provider_id().to_string()), @@ -2068,10 +2194,6 @@ impl Thread { self.send_or_update_tool_use(&tool_use, title, kind, event_stream); - if !tool_use.is_input_complete { - return None; - } - let Some(tool) = tool else { let content = format!("No tool named {} exists", tool_use.name); return Some(Task::ready(LanguageModelToolResult { @@ -2083,9 +2205,72 @@ impl Thread { })); }; + if !tool_use.is_input_complete { + if tool.supports_input_streaming() { + let running_turn = self.running_turn.as_mut()?; + if let Some(sender) = running_turn.streaming_tool_inputs.get(&tool_use.id) { + sender.send_partial(tool_use.input); + return None; + } + + let (sender, tool_input) = ToolInputSender::channel(); + sender.send_partial(tool_use.input); + running_turn + .streaming_tool_inputs + .insert(tool_use.id.clone(), sender); + + let tool = tool.clone(); + log::debug!("Running streaming tool {}", tool_use.name); + return Some(self.run_tool( + tool, + tool_input, + tool_use.id, + tool_use.name, + event_stream, + cancellation_rx, + cx, + )); + } else { + return None; + } + } + + if let Some(sender) = self + .running_turn + .as_mut()? + .streaming_tool_inputs + .remove(&tool_use.id) + { + sender.send_final(tool_use.input); + return None; + } + + log::debug!("Running tool {}", tool_use.name); + let tool_input = ToolInput::ready(tool_use.input); + Some(self.run_tool( + tool, + tool_input, + tool_use.id, + tool_use.name, + event_stream, + cancellation_rx, + cx, + )) + } + + fn run_tool( + &self, + tool: Arc, + tool_input: ToolInput, + tool_use_id: LanguageModelToolUseId, + tool_name: Arc, + event_stream: &ThreadEventStream, + cancellation_rx: watch::Receiver, + cx: &mut Context, + ) -> Task { let fs = self.project.read(cx).fs().clone(); let tool_event_stream = ToolCallEventStream::new( - tool_use.id.clone(), + tool_use_id.clone(), event_stream.clone(), Some(fs), cancellation_rx, @@ -2094,9 +2279,8 @@ impl Thread { acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::InProgress), ); let supports_images = self.model().is_some_and(|model| model.supports_images()); - let tool_result = tool.run(tool_use.input, tool_event_stream, cx); - log::debug!("Running tool {}", tool_use.name); - Some(cx.foreground_executor().spawn(async move { + let tool_result = tool.run(tool_input, tool_event_stream, cx); + cx.foreground_executor().spawn(async move { let (is_error, output) = match tool_result.await { Ok(mut output) => { if let LanguageModelToolResultContent::Image(_) = &output.llm_output @@ -2114,13 +2298,13 @@ impl Thread { }; LanguageModelToolResult { - tool_use_id: tool_use.id, - tool_name: tool_use.name, + tool_use_id, + tool_name, is_error, content: output.llm_output, output: Some(output.raw_output), } - })) + }) } fn handle_tool_use_json_parse_error_event( @@ -2165,20 +2349,18 @@ impl Thread { ) { // Ensure the last message ends in the current tool use let last_message = self.pending_message(); - let push_new_tool_use = last_message.content.last_mut().is_none_or(|content| { + + let has_tool_use = last_message.content.iter_mut().rev().any(|content| { if let AgentMessageContent::ToolUse(last_tool_use) = content { if last_tool_use.id == tool_use.id { *last_tool_use = tool_use.clone(); - false - } else { - true + return true; } - } else { - true } + false }); - if push_new_tool_use { + if !has_tool_use { event_stream.send_tool_call( &tool_use.id, &tool_use.name, @@ -2321,7 +2503,12 @@ impl Thread { anyhow::Ok(()) }; - if generate.await.context("failed to generate title").is_ok() { + if generate + .await + .context("failed to generate thread title") + .log_err() + .is_some() + { _ = this.update(cx, |this, cx| this.set_title(title.into(), cx)); } _ = this.update(cx, |this, _| this.pending_title_generation = None); @@ -2406,6 +2593,7 @@ impl Thread { name: tool_name.to_string(), description: tool.description().to_string(), input_schema: tool.input_schema(model.tool_input_format()).log_err()?, + use_input_streaming: tool.supports_input_streaming(), }) }) .collect::>() @@ -2437,6 +2625,7 @@ impl Thread { temperature: AgentSettings::temperature_for_model(model, cx), thinking_allowed: self.thinking_enabled, thinking_effort: self.thinking_effort.clone(), + speed: self.speed(), }; log::debug!("Completion request built successfully"); @@ -2459,7 +2648,8 @@ impl Thread { } } - let use_streaming_edit_tool = cx.has_flag::(); + let use_streaming_edit_tool = + cx.has_flag::() && model.supports_streaming_tools(); let mut tools = self .tools @@ -2776,6 +2966,9 @@ struct RunningTurn { /// Sender to signal tool cancellation. When cancel is called, this is /// set to true so all tools can detect user-initiated cancellation. cancellation_tx: watch::Sender, + /// Senders for tools that support input streaming and have already been + /// started but are still receiving input from the LLM. + streaming_tool_inputs: HashMap, } impl RunningTurn { @@ -2795,6 +2988,103 @@ pub struct TitleUpdated; impl EventEmitter for Thread {} +/// A channel-based wrapper that delivers tool input to a running tool. +/// +/// For non-streaming tools, created via `ToolInput::ready()` so `.recv()` resolves immediately. +/// For streaming tools, partial JSON snapshots arrive via `.recv_partial()` as the LLM streams +/// them, followed by the final complete input available through `.recv()`. +pub struct ToolInput { + partial_rx: mpsc::UnboundedReceiver, + final_rx: oneshot::Receiver, + _phantom: PhantomData, +} + +impl ToolInput { + #[cfg(any(test, feature = "test-support"))] + pub fn resolved(input: impl Serialize) -> Self { + let value = serde_json::to_value(input).expect("failed to serialize tool input"); + Self::ready(value) + } + + pub fn ready(value: serde_json::Value) -> Self { + let (partial_tx, partial_rx) = mpsc::unbounded(); + drop(partial_tx); + let (final_tx, final_rx) = oneshot::channel(); + final_tx.send(value).ok(); + Self { + partial_rx, + final_rx, + _phantom: PhantomData, + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn test() -> (ToolInputSender, Self) { + let (sender, input) = ToolInputSender::channel(); + (sender, input.cast()) + } + + /// Wait for the final deserialized input, ignoring all partial updates. + /// Non-streaming tools can use this to wait until the whole input is available. + pub async fn recv(mut self) -> Result { + // Drain any remaining partials + while self.partial_rx.next().await.is_some() {} + let value = self + .final_rx + .await + .map_err(|_| anyhow!("tool input was not fully received"))?; + serde_json::from_value(value).map_err(Into::into) + } + + /// Returns the next partial JSON snapshot, or `None` when input is complete. + /// Once this returns `None`, call `recv()` to get the final input. + pub async fn recv_partial(&mut self) -> Option { + self.partial_rx.next().await + } + + fn cast(self) -> ToolInput { + ToolInput { + partial_rx: self.partial_rx, + final_rx: self.final_rx, + _phantom: PhantomData, + } + } +} + +pub struct ToolInputSender { + partial_tx: mpsc::UnboundedSender, + final_tx: Option>, +} + +impl ToolInputSender { + pub(crate) fn channel() -> (Self, ToolInput) { + let (partial_tx, partial_rx) = mpsc::unbounded(); + let (final_tx, final_rx) = oneshot::channel(); + let sender = Self { + partial_tx, + final_tx: Some(final_tx), + }; + let input = ToolInput { + partial_rx, + final_rx, + _phantom: PhantomData, + }; + (sender, input) + } + + pub(crate) fn send_partial(&self, value: serde_json::Value) { + self.partial_tx.unbounded_send(value).ok(); + } + + pub(crate) fn send_final(mut self, value: serde_json::Value) { + // Close the partial channel so recv_partial() returns None + self.partial_tx.close_channel(); + if let Some(final_tx) = self.final_tx.take() { + final_tx.send(value).ok(); + } + } +} + pub trait AgentTool where Self: 'static + Sized, @@ -2828,6 +3118,11 @@ where language_model::tool_schema::root_schema_for::(format) } + /// Returns whether the tool supports streaming of tool use parameters. + fn supports_input_streaming() -> bool { + false + } + /// Some tools rely on a provider for the underlying billing or other reasons. /// Allow the tool to check if they are compatible, or should be filtered out. fn supports_provider(_provider: &LanguageModelProviderId) -> bool { @@ -2843,7 +3138,7 @@ where /// still signaling whether the invocation succeeded or failed. fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task>; @@ -2888,13 +3183,16 @@ pub trait AnyAgentTool { fn kind(&self) -> acp::ToolKind; fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString; fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result; + fn supports_input_streaming(&self) -> bool { + false + } fn supports_provider(&self, _provider: &LanguageModelProviderId) -> bool { true } /// See [`AgentTool::run`] for why this returns `Result`. fn run( self: Arc, - input: serde_json::Value, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task>; @@ -2923,6 +3221,10 @@ where T::kind() } + fn supports_input_streaming(&self) -> bool { + T::supports_input_streaming() + } + fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString { let parsed_input = serde_json::from_value(input.clone()).map_err(|_| input); self.0.initial_title(parsed_input, _cx) @@ -2940,35 +3242,31 @@ where fn run( self: Arc, - input: serde_json::Value, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - cx.spawn(async move |cx| { - let input: T::Input = serde_json::from_value(input).map_err(|e| { - AgentToolOutput::from_error(format!("Failed to parse tool input: {e}")) - })?; - let task = cx.update(|cx| self.0.clone().run(input, event_stream, cx)); - match task.await { - Ok(output) => { - let raw_output = serde_json::to_value(&output).map_err(|e| { - AgentToolOutput::from_error(format!("Failed to serialize tool output: {e}")) - })?; - Ok(AgentToolOutput { - llm_output: output.into(), - raw_output, - }) - } - Err(error_output) => { - let raw_output = serde_json::to_value(&error_output).unwrap_or_else(|e| { - log::error!("Failed to serialize tool error output: {e}"); - serde_json::Value::Null - }); - Err(AgentToolOutput { - llm_output: error_output.into(), - raw_output, - }) - } + let tool_input: ToolInput = input.cast(); + let task = self.0.clone().run(tool_input, event_stream, cx); + cx.spawn(async move |_cx| match task.await { + Ok(output) => { + let raw_output = serde_json::to_value(&output).map_err(|e| { + AgentToolOutput::from_error(format!("Failed to serialize tool output: {e}")) + })?; + Ok(AgentToolOutput { + llm_output: output.into(), + raw_output, + }) + } + Err(error_output) => { + let raw_output = serde_json::to_value(&error_output).unwrap_or_else(|e| { + log::error!("Failed to serialize tool error output: {e}"); + serde_json::Value::Null + }); + Err(AgentToolOutput { + llm_output: error_output.into(), + raw_output, + }) } }) } @@ -3592,6 +3890,7 @@ mod tests { use super::*; use gpui::TestAppContext; use language_model::LanguageModelToolUseId; + use language_model::fake_provider::FakeLanguageModel; use serde_json::json; use std::sync::Arc; @@ -3629,6 +3928,181 @@ mod tests { }) } + fn setup_parent_with_subagents( + cx: &mut TestAppContext, + parent: &Entity, + count: usize, + ) -> Vec> { + cx.update(|cx| { + let mut subagents = Vec::new(); + for _ in 0..count { + let subagent = cx.new(|cx| Thread::new_subagent(parent, cx)); + parent.update(cx, |thread, _cx| { + thread.register_running_subagent(subagent.downgrade()); + }); + subagents.push(subagent); + } + subagents + }) + } + + #[gpui::test] + async fn test_set_model_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + let new_model: Arc = Arc::new(FakeLanguageModel::with_id_and_thinking( + "test-provider", + "new-model", + "New Model", + false, + )); + + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_model(new_model, cx); + }); + + for subagent in &subagents { + let subagent_model_id = subagent.read(cx).model().unwrap().id(); + assert_eq!( + subagent_model_id.0.as_ref(), + "new-model", + "Subagent model should match parent model after set_model" + ); + } + }); + } + + #[gpui::test] + async fn test_set_summarization_model_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + let summary_model: Arc = + Arc::new(FakeLanguageModel::with_id_and_thinking( + "test-provider", + "summary-model", + "Summary Model", + false, + )); + + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_summarization_model(Some(summary_model), cx); + }); + + for subagent in &subagents { + let subagent_summary_id = subagent.read(cx).summarization_model().unwrap().id(); + assert_eq!( + subagent_summary_id.0.as_ref(), + "summary-model", + "Subagent summarization model should match parent after set_summarization_model" + ); + } + }); + } + + #[gpui::test] + async fn test_set_thinking_enabled_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_thinking_enabled(true, cx); + }); + + for subagent in &subagents { + assert!( + subagent.read(cx).thinking_enabled(), + "Subagent thinking should be enabled after parent enables it" + ); + } + + parent.update(cx, |thread, cx| { + thread.set_thinking_enabled(false, cx); + }); + + for subagent in &subagents { + assert!( + !subagent.read(cx).thinking_enabled(), + "Subagent thinking should be disabled after parent disables it" + ); + } + }); + } + + #[gpui::test] + async fn test_set_thinking_effort_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_thinking_effort(Some("high".to_string()), cx); + }); + + for subagent in &subagents { + assert_eq!( + subagent.read(cx).thinking_effort().map(|s| s.as_str()), + Some("high"), + "Subagent thinking effort should match parent" + ); + } + + parent.update(cx, |thread, cx| { + thread.set_thinking_effort(None, cx); + }); + + for subagent in &subagents { + assert_eq!( + subagent.read(cx).thinking_effort(), + None, + "Subagent thinking effort should be None after parent clears it" + ); + } + }); + } + + #[gpui::test] + async fn test_set_speed_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_speed(Speed::Fast, cx); + }); + + for subagent in &subagents { + assert_eq!( + subagent.read(cx).speed(), + Some(Speed::Fast), + "Subagent speed should match parent after set_speed" + ); + } + }); + } + + #[gpui::test] + async fn test_dropped_subagent_does_not_panic(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 1); + + // Drop the subagent so the WeakEntity can no longer be upgraded + drop(subagents); + + // Should not panic even though the subagent was dropped + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_thinking_enabled(true, cx); + thread.set_speed(Speed::Fast, cx); + thread.set_thinking_effort(Some("high".to_string()), cx); + }); + }); + } + #[gpui::test] async fn test_handle_tool_use_json_parse_error_adds_tool_use_to_content( cx: &mut TestAppContext, diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index 6add31fdb39302d3d02c250829dc14b0c10850af..961be1da4c09890691adbd5448d7678b2808fe7b 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -2,40 +2,12 @@ use crate::{DbThread, DbThreadMetadata, ThreadsDatabase}; use agent_client_protocol as acp; use anyhow::{Result, anyhow}; use gpui::{App, Context, Entity, Global, Task, prelude::*}; -use project::Project; -use std::rc::Rc; +use util::path_list::PathList; struct GlobalThreadStore(Entity); impl Global for GlobalThreadStore {} -// TODO: Remove once ACP thread loading is fully handled elsewhere. -pub fn load_agent_thread( - session_id: acp::SessionId, - thread_store: Entity, - project: Entity, - cx: &mut App, -) -> Task>> { - use agent_servers::{AgentServer, AgentServerDelegate}; - - let server = Rc::new(crate::NativeAgentServer::new( - project.read(cx).fs().clone(), - thread_store, - )); - let delegate = AgentServerDelegate::new( - project.read(cx).agent_server_store().clone(), - project.clone(), - None, - None, - ); - let connection = server.connect(None, delegate, cx); - cx.spawn(async move |cx| { - let (agent, _) = connection.await?; - let agent = agent.downcast::().unwrap(); - cx.update(|cx| agent.load_thread(session_id, cx)).await - }) -} - pub struct ThreadStore { threads: Vec, } @@ -50,6 +22,10 @@ impl ThreadStore { cx.global::().0.clone() } + pub fn try_global(cx: &App) -> Option> { + cx.try_global::().map(|g| g.0.clone()) + } + pub fn new(cx: &mut Context) -> Self { let this = Self { threads: Vec::new(), @@ -78,12 +54,13 @@ impl ThreadStore { &mut self, id: acp::SessionId, thread: crate::DbThread, + folder_paths: PathList, cx: &mut Context, ) -> Task> { let database_future = ThreadsDatabase::connect(cx); cx.spawn(async move |this, cx| { let database = database_future.await.map_err(|err| anyhow!(err))?; - database.save_thread(id, thread).await?; + database.save_thread(id, thread, folder_paths).await?; this.update(cx, |this, cx| this.reload(cx)) }) } @@ -135,6 +112,13 @@ impl ThreadStore { pub fn entries(&self) -> impl Iterator + '_ { self.threads.iter().cloned() } + + /// Returns threads whose folder_paths match the given paths exactly. + pub fn threads_for_paths(&self, paths: &PathList) -> impl Iterator { + self.threads + .iter() + .filter(move |thread| &thread.folder_paths == paths) + } } #[cfg(test)] @@ -162,7 +146,11 @@ mod tests { profile: None, imported: false, subagent_context: None, - git_worktree_info: None, + speed: None, + thinking_enabled: false, + thinking_effort: None, + draft_prompt: None, + ui_scroll_position: None, } } @@ -184,12 +172,12 @@ mod tests { ); let save_older = thread_store.update(cx, |store, cx| { - store.save_thread(older_id.clone(), older_thread, cx) + store.save_thread(older_id.clone(), older_thread, PathList::default(), cx) }); save_older.await.unwrap(); let save_newer = thread_store.update(cx, |store, cx| { - store.save_thread(newer_id.clone(), newer_thread, cx) + store.save_thread(newer_id.clone(), newer_thread, PathList::default(), cx) }); save_newer.await.unwrap(); @@ -212,8 +200,9 @@ mod tests { Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap(), ); - let save_task = - thread_store.update(cx, |store, cx| store.save_thread(thread_id, thread, cx)); + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread(thread_id, thread, PathList::default(), cx) + }); save_task.await.unwrap(); cx.run_until_parked(); @@ -244,11 +233,11 @@ mod tests { ); let save_first = thread_store.update(cx, |store, cx| { - store.save_thread(first_id.clone(), first_thread, cx) + store.save_thread(first_id.clone(), first_thread, PathList::default(), cx) }); save_first.await.unwrap(); let save_second = thread_store.update(cx, |store, cx| { - store.save_thread(second_id.clone(), second_thread, cx) + store.save_thread(second_id.clone(), second_thread, PathList::default(), cx) }); save_second.await.unwrap(); cx.run_until_parked(); @@ -281,11 +270,11 @@ mod tests { ); let save_first = thread_store.update(cx, |store, cx| { - store.save_thread(first_id.clone(), first_thread, cx) + store.save_thread(first_id.clone(), first_thread, PathList::default(), cx) }); save_first.await.unwrap(); let save_second = thread_store.update(cx, |store, cx| { - store.save_thread(second_id.clone(), second_thread, cx) + store.save_thread(second_id.clone(), second_thread, PathList::default(), cx) }); save_second.await.unwrap(); cx.run_until_parked(); @@ -295,7 +284,7 @@ mod tests { Utc.with_ymd_and_hms(2024, 1, 3, 0, 0, 0).unwrap(), ); let update_task = thread_store.update(cx, |store, cx| { - store.save_thread(first_id.clone(), updated_first, cx) + store.save_thread(first_id.clone(), updated_first, PathList::default(), cx) }); update_task.await.unwrap(); cx.run_until_parked(); @@ -305,4 +294,50 @@ mod tests { assert_eq!(entries[0].id, first_id); assert_eq!(entries[1].id, second_id); } + + #[gpui::test] + async fn test_threads_for_paths_filters_correctly(cx: &mut TestAppContext) { + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + cx.run_until_parked(); + + let project_a_paths = PathList::new(&[std::path::PathBuf::from("/home/user/project-a")]); + let project_b_paths = PathList::new(&[std::path::PathBuf::from("/home/user/project-b")]); + + let thread_a = make_thread( + "Thread in A", + Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap(), + ); + let thread_b = make_thread( + "Thread in B", + Utc.with_ymd_and_hms(2024, 1, 2, 0, 0, 0).unwrap(), + ); + let thread_a_id = session_id("thread-a"); + let thread_b_id = session_id("thread-b"); + + let save_a = thread_store.update(cx, |store, cx| { + store.save_thread(thread_a_id.clone(), thread_a, project_a_paths.clone(), cx) + }); + save_a.await.unwrap(); + + let save_b = thread_store.update(cx, |store, cx| { + store.save_thread(thread_b_id.clone(), thread_b, project_b_paths.clone(), cx) + }); + save_b.await.unwrap(); + + cx.run_until_parked(); + + thread_store.read_with(cx, |store, _cx| { + let a_threads: Vec<_> = store.threads_for_paths(&project_a_paths).collect(); + assert_eq!(a_threads.len(), 1); + assert_eq!(a_threads[0].id, thread_a_id); + + let b_threads: Vec<_> = store.threads_for_paths(&project_b_paths).collect(); + assert_eq!(b_threads.len(), 1); + assert_eq!(b_threads[0].id, thread_b_id); + + let nonexistent = PathList::new(&[std::path::PathBuf::from("/nonexistent")]); + let no_threads: Vec<_> = store.threads_for_paths(&nonexistent).collect(); + assert!(no_threads.is_empty()); + }); + } } diff --git a/crates/agent/src/tools.rs b/crates/agent/src/tools.rs index 1962f237045c47935de90ebb231575da29d1205c..446472e0c459aa15fa57bb8b49178b08e6781d11 100644 --- a/crates/agent/src/tools.rs +++ b/crates/agent/src/tools.rs @@ -17,6 +17,7 @@ mod save_file_tool; mod spawn_agent_tool; mod streaming_edit_file_tool; mod terminal_tool; +mod tool_edit_parser; mod tool_permissions; mod web_search_tool; @@ -100,6 +101,7 @@ macro_rules! tools { name: T::NAME.to_string(), description: T::description().to_string(), input_schema: T::input_schema(LanguageModelToolSchemaFormat::JsonSchema).to_value(), + use_input_streaming: T::supports_input_streaming(), } } [ diff --git a/crates/agent/src/tools/context_server_registry.rs b/crates/agent/src/tools/context_server_registry.rs index 694e28750cd69facc49b7a0bf862203a00043b4c..1c7590d8097a5de50b879d5b253c5dbabd3dcbab 100644 --- a/crates/agent/src/tools/context_server_registry.rs +++ b/crates/agent/src/tools/context_server_registry.rs @@ -1,4 +1,4 @@ -use crate::{AgentToolOutput, AnyAgentTool, ToolCallEventStream}; +use crate::{AgentToolOutput, AnyAgentTool, ToolCallEventStream, ToolInput}; use agent_client_protocol::ToolKind; use anyhow::Result; use collections::{BTreeMap, HashMap}; @@ -329,7 +329,7 @@ impl AnyAgentTool for ContextServerTool { fn run( self: Arc, - input: serde_json::Value, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { @@ -339,14 +339,15 @@ impl AnyAgentTool for ContextServerTool { let tool_name = self.tool.name.clone(); let tool_id = mcp_tool_id(&self.server_id.0, &self.tool.name); let display_name = self.tool.name.clone(); - let authorize = event_stream.authorize_third_party_tool( - self.initial_title(input.clone(), cx), - tool_id, - display_name, - cx, - ); + let initial_title = self.initial_title(serde_json::Value::Null, cx); + let authorize = + event_stream.authorize_third_party_tool(initial_title, tool_id, display_name, cx); cx.spawn(async move |_cx| { + let input = input.recv().await.map_err(|e| { + AgentToolOutput::from_error(format!("Failed to receive tool input: {e}")) + })?; + authorize.await.map_err(|e| AgentToolOutput::from_error(e.to_string()))?; let Some(protocol) = server.client() else { diff --git a/crates/agent/src/tools/copy_path_tool.rs b/crates/agent/src/tools/copy_path_tool.rs index c82d9e930e1987d389ece84347c1a0f43c601182..7f53a5c36a7979a01de96535f19e421fa3119e16 100644 --- a/crates/agent/src/tools/copy_path_tool.rs +++ b/crates/agent/src/tools/copy_path_tool.rs @@ -2,7 +2,9 @@ use super::tool_permissions::{ SensitiveSettingsKind, authorize_symlink_escapes, canonicalize_worktree_roots, collect_symlink_escapes, sensitive_settings_kind, }; -use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_paths}; +use crate::{ + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_paths, +}; use agent_client_protocol::ToolKind; use agent_settings::AgentSettings; use futures::FutureExt as _; @@ -79,19 +81,24 @@ impl AgentTool for CopyPathTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx); - let paths = vec![input.source_path.clone(), input.destination_path.clone()]; - let decision = decide_permission_for_paths(Self::NAME, &paths, settings); - if let ToolPermissionDecision::Deny(reason) = decision { - return Task::ready(Err(reason)); - } - let project = self.project.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let paths = vec![input.source_path.clone(), input.destination_path.clone()]; + let decision = cx.update(|cx| { + decide_permission_for_paths(Self::NAME, &paths, &AgentSettings::get_global(cx)) + }); + if let ToolPermissionDecision::Deny(reason) = decision { + return Err(reason); + } + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -248,7 +255,7 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let auth = event_rx.expect_authorization().await; let title = auth.tool_call.fields.title.as_deref().unwrap_or(""); @@ -302,7 +309,7 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let auth = event_rx.expect_authorization().await; drop(auth); @@ -354,7 +361,7 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let auth = event_rx.expect_authorization().await; let title = auth.tool_call.fields.title.as_deref().unwrap_or(""); @@ -430,7 +437,9 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let result = cx.update(|cx| tool.run(input, event_stream, cx)).await; + let result = cx + .update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)) + .await; assert!(result.is_err(), "Tool should fail when policy denies"); assert!( diff --git a/crates/agent/src/tools/create_directory_tool.rs b/crates/agent/src/tools/create_directory_tool.rs index 500b5f00289db245898d5918a79dc684a6f0f110..5d8930f3c7400428d55cfe7d14bafc16d94be43a 100644 --- a/crates/agent/src/tools/create_directory_tool.rs +++ b/crates/agent/src/tools/create_directory_tool.rs @@ -13,7 +13,9 @@ use settings::Settings; use std::sync::Arc; use util::markdown::MarkdownInlineCode; -use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_path}; +use crate::{ + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_path, +}; use std::path::Path; /// Creates a new directory at the specified path within the project. Returns confirmation that the directory was created. @@ -68,21 +70,26 @@ impl AgentTool for CreateDirectoryTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx); - let decision = decide_permission_for_path(Self::NAME, &input.path, settings); + let project = self.project.clone(); + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let decision = cx.update(|cx| { + decide_permission_for_path(Self::NAME, &input.path, AgentSettings::get_global(cx)) + }); - if let ToolPermissionDecision::Deny(reason) = decision { - return Task::ready(Err(reason)); - } + if let ToolPermissionDecision::Deny(reason) = decision { + return Err(reason); + } - let destination_path: Arc = input.path.as_str().into(); + let destination_path: Arc = input.path.as_str().into(); - let project = self.project.clone(); - cx.spawn(async move |cx| { let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -218,9 +225,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.run( - CreateDirectoryToolInput { + ToolInput::resolved(CreateDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -277,9 +284,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.run( - CreateDirectoryToolInput { + ToolInput::resolved(CreateDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -336,9 +343,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.run( - CreateDirectoryToolInput { + ToolInput::resolved(CreateDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -415,9 +422,9 @@ mod tests { let result = cx .update(|cx| { tool.run( - CreateDirectoryToolInput { + ToolInput::resolved(CreateDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) diff --git a/crates/agent/src/tools/delete_path_tool.rs b/crates/agent/src/tools/delete_path_tool.rs index 048f4bd8292077874b49bd74b09cbea38b4fafc5..27ab68db667a4cf3223e6521682814dc1c245bb7 100644 --- a/crates/agent/src/tools/delete_path_tool.rs +++ b/crates/agent/src/tools/delete_path_tool.rs @@ -2,7 +2,9 @@ use super::tool_permissions::{ SensitiveSettingsKind, authorize_symlink_access, canonicalize_worktree_roots, detect_symlink_escape, sensitive_settings_kind, }; -use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_path}; +use crate::{ + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_path, +}; use action_log::ActionLog; use agent_client_protocol::ToolKind; use agent_settings::AgentSettings; @@ -71,22 +73,27 @@ impl AgentTool for DeletePathTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let path = input.path; - - let settings = AgentSettings::get_global(cx); - let decision = decide_permission_for_path(Self::NAME, &path, settings); - - if let ToolPermissionDecision::Deny(reason) = decision { - return Task::ready(Err(reason)); - } - let project = self.project.clone(); let action_log = self.action_log.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let path = input.path; + + let decision = cx.update(|cx| { + decide_permission_for_path(Self::NAME, &path, AgentSettings::get_global(cx)) + }); + + if let ToolPermissionDecision::Deny(reason) = decision { + return Err(reason); + } + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -278,9 +285,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.run( - DeletePathToolInput { + ToolInput::resolved(DeletePathToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -345,9 +352,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.run( - DeletePathToolInput { + ToolInput::resolved(DeletePathToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -405,9 +412,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.run( - DeletePathToolInput { + ToolInput::resolved(DeletePathToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -488,9 +495,9 @@ mod tests { let result = cx .update(|cx| { tool.run( - DeletePathToolInput { + ToolInput::resolved(DeletePathToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) diff --git a/crates/agent/src/tools/diagnostics_tool.rs b/crates/agent/src/tools/diagnostics_tool.rs index fea16d531ed5f4212e6b1374aee04cee67b2fc0b..5889f66c2edbe06055678b19474447e0f23e2b0f 100644 --- a/crates/agent/src/tools/diagnostics_tool.rs +++ b/crates/agent/src/tools/diagnostics_tool.rs @@ -1,4 +1,4 @@ -use crate::{AgentTool, ToolCallEventStream}; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; use agent_client_protocol as acp; use anyhow::Result; use futures::FutureExt as _; @@ -87,21 +87,27 @@ impl AgentTool for DiagnosticsTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - match input.path { - Some(path) if !path.is_empty() => { - let Some(project_path) = self.project.read(cx).find_project_path(&path, cx) else { - return Task::ready(Err(format!("Could not find path {path} in project"))); - }; - - let open_buffer_task = self - .project - .update(cx, |project, cx| project.open_buffer(project_path, cx)); + let project = self.project.clone(); + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + match input.path { + Some(path) if !path.is_empty() => { + let (_project_path, open_buffer_task) = project.update(cx, |project, cx| { + let Some(project_path) = project.find_project_path(&path, cx) else { + return Err(format!("Could not find path {path} in project")); + }; + let task = project.open_buffer(project_path.clone(), cx); + Ok((project_path, task)) + })?; - cx.spawn(async move |cx| { let buffer = futures::select! { result = open_buffer_task.fuse() => result.map_err(|e| e.to_string())?, _ = event_stream.cancelled_by_user().fuse() => { @@ -135,36 +141,40 @@ impl AgentTool for DiagnosticsTool { } else { Ok(output) } - }) - } - _ => { - let project = self.project.read(cx); - let mut output = String::new(); - let mut has_diagnostics = false; - - for (project_path, _, summary) in project.diagnostic_summaries(true, cx) { - if summary.error_count > 0 || summary.warning_count > 0 { - let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) - else { - continue; - }; - - has_diagnostics = true; - output.push_str(&format!( - "{}: {} error(s), {} warning(s)\n", - worktree.read(cx).absolutize(&project_path.path).display(), - summary.error_count, - summary.warning_count - )); - } } + _ => { + let (output, has_diagnostics) = project.read_with(cx, |project, cx| { + let mut output = String::new(); + let mut has_diagnostics = false; + + for (project_path, _, summary) in project.diagnostic_summaries(true, cx) { + if summary.error_count > 0 || summary.warning_count > 0 { + let Some(worktree) = + project.worktree_for_id(project_path.worktree_id, cx) + else { + continue; + }; + + has_diagnostics = true; + output.push_str(&format!( + "{}: {} error(s), {} warning(s)\n", + worktree.read(cx).absolutize(&project_path.path).display(), + summary.error_count, + summary.warning_count + )); + } + } + + (output, has_diagnostics) + }); - if has_diagnostics { - Task::ready(Ok(output)) - } else { - Task::ready(Ok("No errors or warnings found in the project.".into())) + if has_diagnostics { + Ok(output) + } else { + Ok("No errors or warnings found in the project.".into()) + } } } - } + }) } } diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 788bf06529a6f0b87242379ffcdb83f38e4c7126..29b08ac09db4417123403fd3915b8575791b2a4e 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -2,8 +2,8 @@ use super::restore_file_from_disk_tool::RestoreFileFromDiskTool; use super::save_file_tool::SaveFileTool; use super::tool_permissions::authorize_file_edit; use crate::{ - AgentTool, Templates, Thread, ToolCallEventStream, - edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent, EditFormat}, + AgentTool, Templates, Thread, ToolCallEventStream, ToolInput, + edit_agent::{EditAgent, EditAgentOutputEvent, EditFormat}, }; use acp_thread::Diff; use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields}; @@ -104,8 +104,6 @@ pub enum EditFileToolOutput { old_text: Arc, #[serde(default)] diff: String, - #[serde(alias = "raw_output")] - edit_agent_output: EditAgentOutput, }, Error { error: String, @@ -237,39 +235,47 @@ impl AgentTool for EditFileTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let Ok(project) = self - .thread - .read_with(cx, |thread, _cx| thread.project().clone()) - else { - return Task::ready(Err(EditFileToolOutput::Error { - error: "thread was dropped".to_string(), - })); - }; - let project_path = match resolve_path(&input, project.clone(), cx) { - Ok(path) => path, - Err(err) => { - return Task::ready(Err(EditFileToolOutput::Error { - error: err.to_string(), - })); - } - }; - let abs_path = project.read(cx).absolute_path(&project_path, cx); - if let Some(abs_path) = abs_path.clone() { - event_stream.update_fields( - ToolCallUpdateFields::new().locations(vec![acp::ToolCallLocation::new(abs_path)]), - ); - } - let allow_thinking = self - .thread - .read_with(cx, |thread, _cx| thread.thinking_enabled()) - .unwrap_or(true); - - let authorize = self.authorize(&input, &event_stream, cx); cx.spawn(async move |cx: &mut AsyncApp| { + let input = input.recv().await.map_err(|e| EditFileToolOutput::Error { + error: format!("Failed to receive tool input: {e}"), + })?; + + let project = self + .thread + .read_with(cx, |thread, _cx| thread.project().clone()) + .map_err(|_| EditFileToolOutput::Error { + error: "thread was dropped".to_string(), + })?; + + let (project_path, abs_path, allow_thinking, update_agent_location, authorize) = + cx.update(|cx| { + let project_path = resolve_path(&input, project.clone(), cx).map_err(|err| { + EditFileToolOutput::Error { + error: err.to_string(), + } + })?; + let abs_path = project.read(cx).absolute_path(&project_path, cx); + if let Some(abs_path) = abs_path.clone() { + event_stream.update_fields( + ToolCallUpdateFields::new() + .locations(vec![acp::ToolCallLocation::new(abs_path)]), + ); + } + let allow_thinking = self + .thread + .read_with(cx, |thread, _cx| thread.thinking_enabled()) + .unwrap_or(true); + + let update_agent_location = self.thread.read_with(cx, |thread, _cx| !thread.is_subagent()).unwrap_or_default(); + + let authorize = self.authorize(&input, &event_stream, cx); + Ok::<_, EditFileToolOutput>((project_path, abs_path, allow_thinking, update_agent_location, authorize)) + })?; + let result: anyhow::Result = async { authorize.await?; @@ -288,6 +294,7 @@ impl AgentTool for EditFileTool { self.templates.clone(), edit_format, allow_thinking, + update_agent_location, ); let buffer = project @@ -298,13 +305,13 @@ impl AgentTool for EditFileTool { // Check if the file has been modified since the agent last read it if let Some(abs_path) = abs_path.as_ref() { - let (last_read_mtime, current_mtime, is_dirty, has_save_tool, has_restore_tool) = self.thread.update(cx, |thread, cx| { - let last_read = thread.file_read_times.get(abs_path).copied(); + let last_read_mtime = action_log.read_with(cx, |log, _| log.file_read_time(abs_path)); + let (current_mtime, is_dirty, has_save_tool, has_restore_tool) = self.thread.read_with(cx, |thread, cx| { let current = buffer.read(cx).file().and_then(|file| file.disk_state().mtime()); let dirty = buffer.read(cx).is_dirty(); let has_save = thread.has_tool(SaveFileTool::NAME); let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME); - (last_read, current, dirty, has_save, has_restore) + (current, dirty, has_save, has_restore) })?; // Check for unsaved changes first - these indicate modifications we don't know about @@ -427,7 +434,7 @@ impl AgentTool for EditFileTool { } } - let edit_agent_output = output.await?; + output.await?; let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| { let settings = language_settings::language_settings( @@ -463,17 +470,6 @@ impl AgentTool for EditFileTool { log.buffer_edited(buffer.clone(), cx); }); - // Update the recorded read time after a successful edit so consecutive edits work - if let Some(abs_path) = abs_path.as_ref() { - if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| { - buffer.file().and_then(|file| file.disk_state().mtime()) - }) { - self.thread.update(cx, |thread, _| { - thread.file_read_times.insert(abs_path.to_path_buf(), new_mtime); - })?; - } - } - let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); let (new_text, unified_diff) = cx .background_spawn({ @@ -519,7 +515,6 @@ impl AgentTool for EditFileTool { new_text, old_text, diff: unified_diff, - edit_agent_output, }) }.await; result @@ -672,7 +667,11 @@ mod tests { language_registry, Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert_eq!( @@ -881,7 +880,11 @@ mod tests { language_registry.clone(), Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }); // Stream the unformatted content @@ -940,7 +943,11 @@ mod tests { language_registry, Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }); // Stream the unformatted content @@ -1027,7 +1034,11 @@ mod tests { language_registry.clone(), Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }); // Stream the content with trailing whitespace @@ -1082,7 +1093,11 @@ mod tests { language_registry, Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }); // Stream the content with trailing whitespace @@ -2081,11 +2096,11 @@ mod tests { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let edit = cx.update(|cx| { tool.run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "Edit file".into(), path: path!("/main.rs").into(), mode: EditFileMode::Edit, - }, + }), stream_tx, cx, ) @@ -2111,11 +2126,11 @@ mod tests { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let edit = cx.update(|cx| { tool.run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "Edit file".into(), path: path!("/main.rs").into(), mode: EditFileMode::Edit, - }, + }), stream_tx, cx, ) @@ -2139,11 +2154,11 @@ mod tests { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let edit = cx.update(|cx| { tool.run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "Edit file".into(), path: path!("/main.rs").into(), mode: EditFileMode::Edit, - }, + }), stream_tx, cx, ) @@ -2186,24 +2201,28 @@ mod tests { let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); // Initially, file_read_times should be empty - let is_empty = thread.read_with(cx, |thread, _| thread.file_read_times.is_empty()); + let is_empty = action_log.read_with(cx, |action_log, _| { + action_log + .file_read_time(path!("/root/test.txt").as_ref()) + .is_none() + }); assert!(is_empty, "file_read_times should start empty"); // Create read tool let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), project.clone(), - action_log, + action_log.clone(), + true, )); // Read the file to record the read time cx.update(|cx| { read_tool.clone().run( - crate::ReadFileToolInput { + ToolInput::resolved(crate::ReadFileToolInput { path: "root/test.txt".to_string(), start_line: None, end_line: None, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2212,12 +2231,9 @@ mod tests { .unwrap(); // Verify that file_read_times now contains an entry for the file - let has_entry = thread.read_with(cx, |thread, _| { - thread.file_read_times.len() == 1 - && thread - .file_read_times - .keys() - .any(|path| path.ends_with("test.txt")) + let has_entry = action_log.read_with(cx, |log, _| { + log.file_read_time(path!("/root/test.txt").as_ref()) + .is_some() }); assert!( has_entry, @@ -2227,11 +2243,11 @@ mod tests { // Read the file again - should update the entry cx.update(|cx| { read_tool.clone().run( - crate::ReadFileToolInput { + ToolInput::resolved(crate::ReadFileToolInput { path: "root/test.txt".to_string(), start_line: None, end_line: None, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2239,11 +2255,14 @@ mod tests { .await .unwrap(); - // Should still have exactly one entry - let has_one_entry = thread.read_with(cx, |thread, _| thread.file_read_times.len() == 1); + // Should still have an entry after re-reading + let has_entry = action_log.read_with(cx, |log, _| { + log.file_read_time(path!("/root/test.txt").as_ref()) + .is_some() + }); assert!( - has_one_entry, - "file_read_times should still have one entry after re-reading" + has_entry, + "file_read_times should still have an entry after re-reading" ); } @@ -2283,11 +2302,7 @@ mod tests { let languages = project.read_with(cx, |project, _| project.languages().clone()); let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); let edit_tool = Arc::new(EditFileTool::new( project.clone(), thread.downgrade(), @@ -2298,11 +2313,11 @@ mod tests { // Read the file first cx.update(|cx| { read_tool.clone().run( - crate::ReadFileToolInput { + ToolInput::resolved(crate::ReadFileToolInput { path: "root/test.txt".to_string(), start_line: None, end_line: None, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2314,11 +2329,11 @@ mod tests { let edit_result = { let edit_task = cx.update(|cx| { edit_tool.clone().run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "First edit".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2343,11 +2358,11 @@ mod tests { let edit_result = { let edit_task = cx.update(|cx| { edit_tool.clone().run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "Second edit".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2397,11 +2412,7 @@ mod tests { let languages = project.read_with(cx, |project, _| project.languages().clone()); let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); let edit_tool = Arc::new(EditFileTool::new( project.clone(), thread.downgrade(), @@ -2412,11 +2423,11 @@ mod tests { // Read the file first cx.update(|cx| { read_tool.clone().run( - crate::ReadFileToolInput { + ToolInput::resolved(crate::ReadFileToolInput { path: "root/test.txt".to_string(), start_line: None, end_line: None, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2456,11 +2467,11 @@ mod tests { let result = cx .update(|cx| { edit_tool.clone().run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "Edit after external change".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2508,11 +2519,7 @@ mod tests { let languages = project.read_with(cx, |project, _| project.languages().clone()); let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); let edit_tool = Arc::new(EditFileTool::new( project.clone(), thread.downgrade(), @@ -2523,11 +2530,11 @@ mod tests { // Read the file first cx.update(|cx| { read_tool.clone().run( - crate::ReadFileToolInput { + ToolInput::resolved(crate::ReadFileToolInput { path: "root/test.txt".to_string(), start_line: None, end_line: None, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2560,11 +2567,11 @@ mod tests { let result = cx .update(|cx| { edit_tool.clone().run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "Edit with dirty buffer".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, - }, + }), ToolCallEventStream::test().0, cx, ) diff --git a/crates/agent/src/tools/fetch_tool.rs b/crates/agent/src/tools/fetch_tool.rs index e573c2202b09d1283d75c3eda20b65be1bcd82a7..75880801595ad0604c9f3a1fac58bd916809a8ba 100644 --- a/crates/agent/src/tools/fetch_tool.rs +++ b/crates/agent/src/tools/fetch_tool.rs @@ -16,7 +16,8 @@ use ui::SharedString; use util::markdown::{MarkdownEscaped, MarkdownInlineCode}; use crate::{ - AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_from_settings, + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, + decide_permission_from_settings, }; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] @@ -141,41 +142,52 @@ impl AgentTool for FetchTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx); - let decision = - decide_permission_from_settings(Self::NAME, std::slice::from_ref(&input.url), settings); - - let authorize = match decision { - ToolPermissionDecision::Allow => None, - ToolPermissionDecision::Deny(reason) => { - return Task::ready(Err(reason)); - } - ToolPermissionDecision::Confirm => { - let context = - crate::ToolPermissionContext::new(Self::NAME, vec![input.url.clone()]); - Some(event_stream.authorize( - format!("Fetch {}", MarkdownInlineCode(&input.url)), - context, - cx, - )) - } - }; + let http_client = self.http_client.clone(); + cx.spawn(async move |cx| { + let input: FetchToolInput = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + let decision = cx.update(|cx| { + decide_permission_from_settings( + Self::NAME, + std::slice::from_ref(&input.url), + AgentSettings::get_global(cx), + ) + }); + + let authorize = match decision { + ToolPermissionDecision::Allow => None, + ToolPermissionDecision::Deny(reason) => { + return Err(reason); + } + ToolPermissionDecision::Confirm => Some(cx.update(|cx| { + let context = + crate::ToolPermissionContext::new(Self::NAME, vec![input.url.clone()]); + event_stream.authorize( + format!("Fetch {}", MarkdownInlineCode(&input.url)), + context, + cx, + ) + })), + }; - let fetch_task = cx.background_spawn({ - let http_client = self.http_client.clone(); - async move { - if let Some(authorize) = authorize { - authorize.await?; + let fetch_task = cx.background_spawn({ + let http_client = http_client.clone(); + let url = input.url.clone(); + async move { + if let Some(authorize) = authorize { + authorize.await?; + } + Self::build_message(http_client, &url).await } - Self::build_message(http_client, &input.url).await - } - }); + }); - cx.foreground_executor().spawn(async move { let text = futures::select! { result = fetch_task.fuse() => result.map_err(|e| e.to_string())?, _ = event_stream.cancelled_by_user().fuse() => { diff --git a/crates/agent/src/tools/find_path_tool.rs b/crates/agent/src/tools/find_path_tool.rs index 4ba60c61063c08ac002dc7dc16fa11b987cbab74..9c65461503225171bcda482d58871a94743481e3 100644 --- a/crates/agent/src/tools/find_path_tool.rs +++ b/crates/agent/src/tools/find_path_tool.rs @@ -1,4 +1,4 @@ -use crate::{AgentTool, ToolCallEventStream}; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; use agent_client_protocol as acp; use anyhow::{Result, anyhow}; use futures::FutureExt as _; @@ -121,13 +121,18 @@ impl AgentTool for FindPathTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let search_paths_task = search_paths(&input.glob, self.project.clone(), cx); + let project = self.project.clone(); + cx.spawn(async move |cx| { + let input = input.recv().await.map_err(|e| FindPathToolOutput::Error { + error: format!("Failed to receive tool input: {e}"), + })?; + + let search_paths_task = cx.update(|cx| search_paths(&input.glob, project, cx)); - cx.background_spawn(async move { let matches = futures::select! { result = search_paths_task.fuse() => result.map_err(|e| FindPathToolOutput::Error { error: e.to_string() })?, _ = event_stream.cancelled_by_user().fuse() => { diff --git a/crates/agent/src/tools/grep_tool.rs b/crates/agent/src/tools/grep_tool.rs index 16162107dff84ab40117b7783e04b633d144a214..fbfdc18585b822361effb6fd770e678b3e434a17 100644 --- a/crates/agent/src/tools/grep_tool.rs +++ b/crates/agent/src/tools/grep_tool.rs @@ -1,4 +1,4 @@ -use crate::{AgentTool, ToolCallEventStream}; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; use agent_client_protocol as acp; use anyhow::Result; use futures::{FutureExt as _, StreamExt}; @@ -114,66 +114,64 @@ impl AgentTool for GrepTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { const CONTEXT_LINES: u32 = 2; const MAX_ANCESTOR_LINES: u32 = 10; - let path_style = self.project.read(cx).path_style(cx); - - let include_matcher = match PathMatcher::new( - input - .include_pattern - .as_ref() - .into_iter() - .collect::>(), - path_style, - ) { - Ok(matcher) => matcher, - Err(error) => { - return Task::ready(Err(format!("invalid include glob pattern: {error}"))); - } - }; - - // Exclude global file_scan_exclusions and private_files settings - let exclude_matcher = { - let global_settings = WorktreeSettings::get_global(cx); - let exclude_patterns = global_settings - .file_scan_exclusions - .sources() - .chain(global_settings.private_files.sources()); - - match PathMatcher::new(exclude_patterns, path_style) { - Ok(matcher) => matcher, - Err(error) => { - return Task::ready(Err(format!("invalid exclude pattern: {error}"))); - } - } - }; - - let query = match SearchQuery::regex( - &input.regex, - false, - input.case_sensitive, - false, - false, - include_matcher, - exclude_matcher, - true, // Always match file include pattern against *full project paths* that start with a project root. - None, - ) { - Ok(query) => query, - Err(error) => return Task::ready(Err(error.to_string())), - }; - - let results = self - .project - .update(cx, |project, cx| project.search(query, cx)); - - let project = self.project.downgrade(); + let project = self.project.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + let results = cx.update(|cx| { + let path_style = project.read(cx).path_style(cx); + + let include_matcher = PathMatcher::new( + input + .include_pattern + .as_ref() + .into_iter() + .collect::>(), + path_style, + ) + .map_err(|error| format!("invalid include glob pattern: {error}"))?; + + // Exclude global file_scan_exclusions and private_files settings + let exclude_matcher = { + let global_settings = WorktreeSettings::get_global(cx); + let exclude_patterns = global_settings + .file_scan_exclusions + .sources() + .chain(global_settings.private_files.sources()); + + PathMatcher::new(exclude_patterns, path_style) + .map_err(|error| format!("invalid exclude pattern: {error}"))? + }; + + let query = SearchQuery::regex( + &input.regex, + false, + input.case_sensitive, + false, + false, + include_matcher, + exclude_matcher, + true, // Always match file include pattern against *full project paths* that start with a project root. + None, + ) + .map_err(|error| error.to_string())?; + + Ok::<_, String>( + project.update(cx, |project, cx| project.search(query, cx)), + ) + })?; + + let project = project.downgrade(); // Keep the search alive for the duration of result iteration. Dropping this task is the // cancellation mechanism; we intentionally do not detach it. let SearchResults {rx, _task_handle} = results; @@ -787,7 +785,13 @@ mod tests { cx: &mut TestAppContext, ) -> String { let tool = Arc::new(GrepTool { project }); - let task = cx.update(|cx| tool.run(input, ToolCallEventStream::test().0, cx)); + let task = cx.update(|cx| { + tool.run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }); match task.await { Ok(result) => { diff --git a/crates/agent/src/tools/list_directory_tool.rs b/crates/agent/src/tools/list_directory_tool.rs index 5dddee94904283ccb9198ce56aa4005250b5908a..1a674aaa71fef5bf9c11688e82982a5dbcfee331 100644 --- a/crates/agent/src/tools/list_directory_tool.rs +++ b/crates/agent/src/tools/list_directory_tool.rs @@ -2,7 +2,7 @@ use super::tool_permissions::{ ResolvedProjectPath, authorize_symlink_access, canonicalize_worktree_roots, resolve_project_path, }; -use crate::{AgentTool, ToolCallEventStream}; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; use agent_client_protocol::ToolKind; use anyhow::{Context as _, Result, anyhow}; use gpui::{App, Entity, SharedString, Task}; @@ -146,34 +146,39 @@ impl AgentTool for ListDirectoryTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - // Sometimes models will return these even though we tell it to give a path and not a glob. - // When this happens, just list the root worktree directories. - if matches!(input.path.as_str(), "." | "" | "./" | "*") { - let output = self - .project - .read(cx) - .worktrees(cx) - .filter_map(|worktree| { - let worktree = worktree.read(cx); - let root_entry = worktree.root_entry()?; - if root_entry.is_dir() { - Some(root_entry.path.display(worktree.path_style())) - } else { - None - } - }) - .collect::>() - .join("\n"); - - return Task::ready(Ok(output)); - } - let project = self.project.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + // Sometimes models will return these even though we tell it to give a path and not a glob. + // When this happens, just list the root worktree directories. + if matches!(input.path.as_str(), "." | "" | "./" | "*") { + let output = project.read_with(cx, |project, cx| { + project + .worktrees(cx) + .filter_map(|worktree| { + let worktree = worktree.read(cx); + let root_entry = worktree.root_entry()?; + if root_entry.is_dir() { + Some(root_entry.path.display(worktree.path_style())) + } else { + None + } + }) + .collect::>() + .join("\n") + }); + + return Ok(output); + } + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -323,7 +328,13 @@ mod tests { path: "project".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert_eq!( @@ -344,7 +355,13 @@ mod tests { path: "project/src".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert_eq!( @@ -365,7 +382,13 @@ mod tests { path: "project/tests".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert!(!output.contains("# Folders:")); @@ -393,7 +416,13 @@ mod tests { path: "project/empty_dir".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert_eq!(output, "project/empty_dir is empty.\n"); @@ -420,7 +449,13 @@ mod tests { path: "project/nonexistent".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await; assert!(output.unwrap_err().contains("Path not found")); @@ -429,7 +464,13 @@ mod tests { path: "project/file.txt".into(), }; let output = cx - .update(|cx| tool.run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await; assert!(output.unwrap_err().contains("is not a directory")); } @@ -493,7 +534,13 @@ mod tests { path: "project".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); @@ -520,7 +567,13 @@ mod tests { path: "project/.secretdir".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await; assert!( output.unwrap_err().contains("file_scan_exclusions"), @@ -532,7 +585,13 @@ mod tests { path: "project/visible_dir".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); @@ -637,7 +696,13 @@ mod tests { path: "worktree1/src".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert!(output.contains("main.rs"), "Should list main.rs"); @@ -655,7 +720,13 @@ mod tests { path: "worktree1/tests".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert!(output.contains("test.rs"), "Should list test.rs"); @@ -669,7 +740,13 @@ mod tests { path: "worktree2/lib".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert!(output.contains("public.js"), "Should list public.js"); @@ -687,7 +764,13 @@ mod tests { path: "worktree2/docs".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert!(output.contains("README.md"), "Should list README.md"); @@ -701,7 +784,13 @@ mod tests { path: "worktree1/src/secret.rs".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await; assert!(output.unwrap_err().contains("Cannot list directory"),); } @@ -743,9 +832,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - ListDirectoryToolInput { + ToolInput::resolved(ListDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -804,9 +893,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - ListDirectoryToolInput { + ToolInput::resolved(ListDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -871,9 +960,9 @@ mod tests { let result = cx .update(|cx| { tool.clone().run( - ListDirectoryToolInput { + ToolInput::resolved(ListDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -924,9 +1013,9 @@ mod tests { let result = cx .update(|cx| { tool.clone().run( - ListDirectoryToolInput { + ToolInput::resolved(ListDirectoryToolInput { path: "project/src".into(), - }, + }), event_stream, cx, ) @@ -981,9 +1070,9 @@ mod tests { let result = cx .update(|cx| { tool.clone().run( - ListDirectoryToolInput { + ToolInput::resolved(ListDirectoryToolInput { path: "project/link_dir".into(), - }, + }), event_stream, cx, ) diff --git a/crates/agent/src/tools/move_path_tool.rs b/crates/agent/src/tools/move_path_tool.rs index 4c337d0ec2827ad7c63c87ef206f0e74dc63091f..c246b3c5b0661546f4617bb5521766f9da3839fb 100644 --- a/crates/agent/src/tools/move_path_tool.rs +++ b/crates/agent/src/tools/move_path_tool.rs @@ -2,7 +2,9 @@ use super::tool_permissions::{ SensitiveSettingsKind, authorize_symlink_escapes, canonicalize_worktree_roots, collect_symlink_escapes, sensitive_settings_kind, }; -use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_paths}; +use crate::{ + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_paths, +}; use agent_client_protocol::ToolKind; use agent_settings::AgentSettings; use futures::FutureExt as _; @@ -92,19 +94,24 @@ impl AgentTool for MovePathTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx); - let paths = vec![input.source_path.clone(), input.destination_path.clone()]; - let decision = decide_permission_for_paths(Self::NAME, &paths, settings); - if let ToolPermissionDecision::Deny(reason) = decision { - return Task::ready(Err(reason)); - } - let project = self.project.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let paths = vec![input.source_path.clone(), input.destination_path.clone()]; + let decision = cx.update(|cx| { + decide_permission_for_paths(Self::NAME, &paths, AgentSettings::get_global(cx)) + }); + if let ToolPermissionDecision::Deny(reason) = decision { + return Err(reason); + } + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -255,7 +262,7 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let auth = event_rx.expect_authorization().await; let title = auth.tool_call.fields.title.as_deref().unwrap_or(""); @@ -309,7 +316,7 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let auth = event_rx.expect_authorization().await; drop(auth); @@ -361,7 +368,7 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let auth = event_rx.expect_authorization().await; let title = auth.tool_call.fields.title.as_deref().unwrap_or(""); @@ -437,7 +444,9 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let result = cx.update(|cx| tool.run(input, event_stream, cx)).await; + let result = cx + .update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)) + .await; assert!(result.is_err(), "Tool should fail when policy denies"); assert!( diff --git a/crates/agent/src/tools/now_tool.rs b/crates/agent/src/tools/now_tool.rs index 689d70ff20d15cbc56fcc0e14a3b46408647f737..fe1cafe5881d14c9700813f742e1f2df0aa1203e 100644 --- a/crates/agent/src/tools/now_tool.rs +++ b/crates/agent/src/tools/now_tool.rs @@ -6,7 +6,7 @@ use gpui::{App, SharedString, Task}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use crate::{AgentTool, ToolCallEventStream}; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; #[derive(Debug, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] @@ -48,14 +48,20 @@ impl AgentTool for NowTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, _event_stream: ToolCallEventStream, - _cx: &mut App, + cx: &mut App, ) -> Task> { - let now = match input.timezone { - Timezone::Utc => Utc::now().to_rfc3339(), - Timezone::Local => Local::now().to_rfc3339(), - }; - Task::ready(Ok(format!("The current datetime is {now}."))) + cx.spawn(async move |_cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let now = match input.timezone { + Timezone::Utc => Utc::now().to_rfc3339(), + Timezone::Local => Local::now().to_rfc3339(), + }; + Ok(format!("The current datetime is {now}.")) + }) } } diff --git a/crates/agent/src/tools/open_tool.rs b/crates/agent/src/tools/open_tool.rs index c0b24efbec6418c437e9e3d14ffb5d40b45c91b0..344a513d10c2d62e4247dd3e47bcdf428586d6f0 100644 --- a/crates/agent/src/tools/open_tool.rs +++ b/crates/agent/src/tools/open_tool.rs @@ -2,7 +2,7 @@ use super::tool_permissions::{ ResolvedProjectPath, authorize_symlink_access, canonicalize_worktree_roots, resolve_project_path, }; -use crate::AgentTool; +use crate::{AgentTool, ToolInput}; use agent_client_protocol::ToolKind; use futures::FutureExt as _; use gpui::{App, AppContext as _, Entity, SharedString, Task}; @@ -61,16 +61,24 @@ impl AgentTool for OpenTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: crate::ToolCallEventStream, cx: &mut App, ) -> Task> { - // If path_or_url turns out to be a path in the project, make it absolute. - let abs_path = to_absolute_path(&input.path_or_url, self.project.clone(), cx); - let initial_title = self.initial_title(Ok(input.clone()), cx); - let project = self.project.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + // If path_or_url turns out to be a path in the project, make it absolute. + let (abs_path, initial_title) = cx.update(|cx| { + let abs_path = to_absolute_path(&input.path_or_url, project.clone(), cx); + let initial_title = self.initial_title(Ok(input.clone()), cx); + (abs_path, initial_title) + }); + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; diff --git a/crates/agent/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs index efd33fe5caece4cee4fc02aab8c1a0ebee92f94e..f7a75bc63a1c461b65c3a2e6f74f2c70e0ca15f6 100644 --- a/crates/agent/src/tools/read_file_tool.rs +++ b/crates/agent/src/tools/read_file_tool.rs @@ -2,7 +2,7 @@ use action_log::ActionLog; use agent_client_protocol::{self as acp, ToolCallUpdateFields}; use anyhow::{Context as _, Result, anyhow}; use futures::FutureExt as _; -use gpui::{App, Entity, SharedString, Task, WeakEntity}; +use gpui::{App, Entity, SharedString, Task}; use indoc::formatdoc; use language::Point; use language_model::{LanguageModelImage, LanguageModelToolResultContent}; @@ -21,7 +21,7 @@ use super::tool_permissions::{ ResolvedProjectPath, authorize_symlink_access, canonicalize_worktree_roots, resolve_project_path, }; -use crate::{AgentTool, Thread, ToolCallEventStream, outline}; +use crate::{AgentTool, ToolCallEventStream, ToolInput, outline}; /// Reads the content of the given file in the project. /// @@ -56,21 +56,21 @@ pub struct ReadFileToolInput { } pub struct ReadFileTool { - thread: WeakEntity, project: Entity, action_log: Entity, + update_agent_location: bool, } impl ReadFileTool { pub fn new( - thread: WeakEntity, project: Entity, action_log: Entity, + update_agent_location: bool, ) -> Self { Self { - thread, project, action_log, + update_agent_location, } } } @@ -114,14 +114,17 @@ impl AgentTool for ReadFileTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { let project = self.project.clone(); - let thread = self.thread.clone(); let action_log = self.action_log.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(tool_content_err)?; let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -208,7 +211,6 @@ impl AgentTool for ReadFileTool { }); if is_image { - let image_entity: Entity = cx .update(|cx| { self.project.update(cx, |project, cx| { @@ -254,17 +256,6 @@ impl AgentTool for ReadFileTool { return Err(tool_content_err(format!("{file_path} not found"))); } - // Record the file read time and mtime - if let Some(mtime) = buffer.read_with(cx, |buffer, _| { - buffer.file().and_then(|file| file.disk_state().mtime()) - }) { - thread - .update(cx, |thread, _| { - thread.file_read_times.insert(abs_path.to_path_buf(), mtime); - }) - .ok(); - } - let mut anchor = None; // Check if specific line ranges are provided @@ -324,15 +315,17 @@ impl AgentTool for ReadFileTool { }; project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: anchor.unwrap_or_else(|| { - text::Anchor::min_for_buffer(buffer.read(cx).remote_id()) + if self.update_agent_location { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: anchor.unwrap_or_else(|| { + text::Anchor::min_for_buffer(buffer.read(cx).remote_id()) + }), }), - }), - cx, - ); + cx, + ); + } if let Ok(LanguageModelToolResultContent::Text(text)) = &result { let text: &str = text; let markdown = MarkdownCodeBlock { @@ -354,13 +347,10 @@ impl AgentTool for ReadFileTool { #[cfg(test)] mod test { use super::*; - use crate::{ContextServerRegistry, Templates, Thread}; use agent_client_protocol as acp; use fs::Fs as _; use gpui::{AppContext, TestAppContext, UpdateGlobal as _}; - use language_model::fake_provider::FakeLanguageModel; use project::{FakeFs, Project}; - use prompt_store::ProjectContext; use serde_json::json; use settings::SettingsStore; use std::path::PathBuf; @@ -375,20 +365,7 @@ mod test { fs.insert_tree(path!("/root"), json!({})).await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let (event_stream, _) = ToolCallEventStream::test(); let result = cx @@ -398,7 +375,7 @@ mod test { start_line: None, end_line: None, }; - tool.run(input, event_stream, cx) + tool.run(ToolInput::resolved(input), event_stream, cx) }) .await; assert_eq!( @@ -421,20 +398,7 @@ mod test { .await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -442,7 +406,11 @@ mod test { start_line: None, end_line: None, }; - tool.run(input, ToolCallEventStream::test().0, cx) + tool.run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert_eq!(result.unwrap(), "This is a small file content".into()); @@ -464,20 +432,7 @@ mod test { let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(language::rust_lang()); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -485,7 +440,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await .unwrap(); @@ -510,7 +469,11 @@ mod test { start_line: None, end_line: None, }; - tool.run(input, ToolCallEventStream::test().0, cx) + tool.run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await .unwrap(); @@ -549,20 +512,7 @@ mod test { let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -570,7 +520,11 @@ mod test { start_line: Some(2), end_line: Some(4), }; - tool.run(input, ToolCallEventStream::test().0, cx) + tool.run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert_eq!(result.unwrap(), "Line 2\nLine 3\nLine 4\n".into()); @@ -590,20 +544,7 @@ mod test { .await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); // start_line of 0 should be treated as 1 let result = cx @@ -613,7 +554,11 @@ mod test { start_line: Some(0), end_line: Some(2), }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert_eq!(result.unwrap(), "Line 1\nLine 2\n".into()); @@ -626,7 +571,11 @@ mod test { start_line: Some(1), end_line: Some(0), }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert_eq!(result.unwrap(), "Line 1\n".into()); @@ -639,7 +588,11 @@ mod test { start_line: Some(3), end_line: Some(2), }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert_eq!(result.unwrap(), "Line 3\n".into()); @@ -721,20 +674,7 @@ mod test { let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); // Reading a file outside the project worktree should fail let result = cx @@ -744,7 +684,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -760,7 +704,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -776,7 +724,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -791,7 +743,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -807,7 +763,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -822,7 +782,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -837,7 +801,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -853,7 +821,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!(result.is_ok(), "Should be able to read normal files"); @@ -867,7 +839,11 @@ mod test { start_line: None, end_line: None, }; - tool.run(input, ToolCallEventStream::test().0, cx) + tool.run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -893,29 +869,16 @@ mod test { let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let (event_stream, mut event_rx) = ToolCallEventStream::test(); let read_task = cx.update(|cx| { tool.run( - ReadFileToolInput { + ToolInput::resolved(ReadFileToolInput { path: "root/secret.png".to_string(), start_line: None, end_line: None, - }, + }), event_stream, cx, ) @@ -1012,24 +975,7 @@ mod test { .await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log.clone(), - )); + let tool = Arc::new(ReadFileTool::new(project.clone(), action_log.clone(), true)); // Test reading allowed files in worktree1 let result = cx @@ -1039,7 +985,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await .unwrap(); @@ -1057,7 +1007,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1075,7 +1029,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1093,7 +1051,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await .unwrap(); @@ -1111,7 +1073,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1129,7 +1095,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1148,7 +1118,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1188,33 +1162,16 @@ mod test { cx.executor().run_until_parked(); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true)); let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - ReadFileToolInput { + ToolInput::resolved(ReadFileToolInput { path: "project/secret_link.txt".to_string(), start_line: None, end_line: None, - }, + }), event_stream, cx, ) @@ -1264,33 +1221,16 @@ mod test { cx.executor().run_until_parked(); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true)); let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - ReadFileToolInput { + ToolInput::resolved(ReadFileToolInput { path: "project/secret_link.txt".to_string(), start_line: None, end_line: None, - }, + }), event_stream, cx, ) @@ -1344,34 +1284,17 @@ mod test { cx.executor().run_until_parked(); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true)); let (event_stream, mut event_rx) = ToolCallEventStream::test(); let result = cx .update(|cx| { tool.clone().run( - ReadFileToolInput { + ToolInput::resolved(ReadFileToolInput { path: "project/secret_link.txt".to_string(), start_line: None, end_line: None, - }, + }), event_stream, cx, ) diff --git a/crates/agent/src/tools/restore_file_from_disk_tool.rs b/crates/agent/src/tools/restore_file_from_disk_tool.rs index 304e0d1180fe626482206bfdc2dfa6d53f529816..c1aa8690a840ea6911dcb94c26c8cef3cb5f313d 100644 --- a/crates/agent/src/tools/restore_file_from_disk_tool.rs +++ b/crates/agent/src/tools/restore_file_from_disk_tool.rs @@ -17,7 +17,9 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; use util::markdown::MarkdownInlineCode; -use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_path}; +use crate::{ + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_path, +}; /// Discards unsaved changes in open buffers by reloading file contents from disk. /// @@ -66,25 +68,31 @@ impl AgentTool for RestoreFileFromDiskTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx).clone(); - - // Check for any immediate deny before spawning async work. - for path in &input.paths { - let path_str = path.to_string_lossy(); - let decision = decide_permission_for_path(Self::NAME, &path_str, &settings); - if let ToolPermissionDecision::Deny(reason) = decision { - return Task::ready(Err(reason)); - } - } - let project = self.project.clone(); - let input_paths = input.paths; cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + // Check for any immediate deny before doing async work. + for path in &input.paths { + let path_str = path.to_string_lossy(); + let decision = cx.update(|cx| { + decide_permission_for_path(Self::NAME, &path_str, AgentSettings::get_global(cx)) + }); + if let ToolPermissionDecision::Deny(reason) = decision { + return Err(reason); + } + } + + let input_paths = input.paths; + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -92,7 +100,9 @@ impl AgentTool for RestoreFileFromDiskTool { for path in &input_paths { let path_str = path.to_string_lossy(); - let decision = decide_permission_for_path(Self::NAME, &path_str, &settings); + let decision = cx.update(|cx| { + decide_permission_for_path(Self::NAME, &path_str, AgentSettings::get_global(cx)) + }); let symlink_escape = project.read_with(cx, |project, cx| { path_has_symlink_escape(project, path, &canonical_roots, cx) }); @@ -378,12 +388,12 @@ mod tests { let output = cx .update(|cx| { tool.clone().run( - RestoreFileFromDiskToolInput { + ToolInput::resolved(RestoreFileFromDiskToolInput { paths: vec![ PathBuf::from("root/dirty.txt"), PathBuf::from("root/clean.txt"), ], - }, + }), ToolCallEventStream::test().0, cx, ) @@ -428,7 +438,7 @@ mod tests { let output = cx .update(|cx| { tool.clone().run( - RestoreFileFromDiskToolInput { paths: vec![] }, + ToolInput::resolved(RestoreFileFromDiskToolInput { paths: vec![] }), ToolCallEventStream::test().0, cx, ) @@ -441,9 +451,9 @@ mod tests { let output = cx .update(|cx| { tool.clone().run( - RestoreFileFromDiskToolInput { + ToolInput::resolved(RestoreFileFromDiskToolInput { paths: vec![PathBuf::from("nonexistent/path.txt")], - }, + }), ToolCallEventStream::test().0, cx, ) @@ -495,9 +505,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - RestoreFileFromDiskToolInput { + ToolInput::resolved(RestoreFileFromDiskToolInput { paths: vec![PathBuf::from("project/link.txt")], - }, + }), event_stream, cx, ) @@ -564,9 +574,9 @@ mod tests { let result = cx .update(|cx| { tool.clone().run( - RestoreFileFromDiskToolInput { + ToolInput::resolved(RestoreFileFromDiskToolInput { paths: vec![PathBuf::from("project/link.txt")], - }, + }), event_stream, cx, ) @@ -623,9 +633,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - RestoreFileFromDiskToolInput { + ToolInput::resolved(RestoreFileFromDiskToolInput { paths: vec![PathBuf::from("project/link.txt")], - }, + }), event_stream, cx, ) diff --git a/crates/agent/src/tools/save_file_tool.rs b/crates/agent/src/tools/save_file_tool.rs index 20140c77d113d96c741d5afbe672882f708870d6..99e937b9dff2a1b4781dde16bd2bf6d64edd25ad 100644 --- a/crates/agent/src/tools/save_file_tool.rs +++ b/crates/agent/src/tools/save_file_tool.rs @@ -17,7 +17,9 @@ use super::tool_permissions::{ canonicalize_worktree_roots, path_has_symlink_escape, resolve_project_path, sensitive_settings_kind, }; -use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_path}; +use crate::{ + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_path, +}; /// Saves files that have unsaved changes. /// @@ -63,25 +65,31 @@ impl AgentTool for SaveFileTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx).clone(); - - // Check for any immediate deny before spawning async work. - for path in &input.paths { - let path_str = path.to_string_lossy(); - let decision = decide_permission_for_path(Self::NAME, &path_str, &settings); - if let ToolPermissionDecision::Deny(reason) = decision { - return Task::ready(Err(reason)); - } - } - let project = self.project.clone(); - let input_paths = input.paths; cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + // Check for any immediate deny before doing async work. + for path in &input.paths { + let path_str = path.to_string_lossy(); + let decision = cx.update(|cx| { + decide_permission_for_path(Self::NAME, &path_str, AgentSettings::get_global(cx)) + }); + if let ToolPermissionDecision::Deny(reason) = decision { + return Err(reason); + } + } + + let input_paths = input.paths; + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -89,7 +97,9 @@ impl AgentTool for SaveFileTool { for path in &input_paths { let path_str = path.to_string_lossy(); - let decision = decide_permission_for_path(Self::NAME, &path_str, &settings); + let decision = cx.update(|cx| { + decide_permission_for_path(Self::NAME, &path_str, AgentSettings::get_global(cx)) + }); let symlink_escape = project.read_with(cx, |project, cx| { path_has_symlink_escape(project, path, &canonical_roots, cx) }); @@ -382,12 +392,12 @@ mod tests { let output = cx .update(|cx| { tool.clone().run( - SaveFileToolInput { + ToolInput::resolved(SaveFileToolInput { paths: vec![ PathBuf::from("root/dirty.txt"), PathBuf::from("root/clean.txt"), ], - }, + }), ToolCallEventStream::test().0, cx, ) @@ -425,7 +435,7 @@ mod tests { let output = cx .update(|cx| { tool.clone().run( - SaveFileToolInput { paths: vec![] }, + ToolInput::resolved(SaveFileToolInput { paths: vec![] }), ToolCallEventStream::test().0, cx, ) @@ -438,9 +448,9 @@ mod tests { let output = cx .update(|cx| { tool.clone().run( - SaveFileToolInput { + ToolInput::resolved(SaveFileToolInput { paths: vec![PathBuf::from("nonexistent/path.txt")], - }, + }), ToolCallEventStream::test().0, cx, ) @@ -490,9 +500,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - SaveFileToolInput { + ToolInput::resolved(SaveFileToolInput { paths: vec![PathBuf::from("project/link.txt")], - }, + }), event_stream, cx, ) @@ -559,9 +569,9 @@ mod tests { let result = cx .update(|cx| { tool.clone().run( - SaveFileToolInput { + ToolInput::resolved(SaveFileToolInput { paths: vec![PathBuf::from("project/link.txt")], - }, + }), event_stream, cx, ) @@ -618,9 +628,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - SaveFileToolInput { + ToolInput::resolved(SaveFileToolInput { paths: vec![PathBuf::from("project/link.txt")], - }, + }), event_stream, cx, ) @@ -702,12 +712,12 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - SaveFileToolInput { + ToolInput::resolved(SaveFileToolInput { paths: vec![ PathBuf::from("project/dirty.txt"), PathBuf::from("project/link.txt"), ], - }, + }), event_stream, cx, ) diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs index e2dd78d4476de48465cb5c48e225e2ae5a0a7767..9c10b2fbf127c42d842300f4af865c4297cdedb8 100644 --- a/crates/agent/src/tools/spawn_agent_tool.rs +++ b/crates/agent/src/tools/spawn_agent_tool.rs @@ -1,77 +1,93 @@ -use acp_thread::SUBAGENT_SESSION_ID_META_KEY; +use acp_thread::{SUBAGENT_SESSION_INFO_META_KEY, SubagentSessionInfo}; use agent_client_protocol as acp; use anyhow::Result; -use gpui::{App, SharedString, Task, WeakEntity}; +use gpui::{App, SharedString, Task}; use language_model::LanguageModelToolResultContent; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::rc::Rc; use std::sync::Arc; -use crate::{AgentTool, Thread, ThreadEnvironment, ToolCallEventStream}; +use crate::{AgentTool, ThreadEnvironment, ToolCallEventStream, ToolInput}; /// Spawns an agent to perform a delegated task. /// -/// Use this tool when you want to do any of the following: -/// - Run multiple tasks in parallel that would take significantly longer to run sequentially. -/// - Complete a self-contained task where you need to know if it succeeded or failed (and how), but none of its intermediate output. -/// - Perform an investigation where all you need to know is the outcome, not the research that led to that outcome. +/// Use this tool when you want to: +/// - Run multiple tasks in parallel. +/// - Delegate a self-contained task where you only need the final outcome. /// -/// You control what the agent does by providing a prompt describing what the agent should do. The agent has access to the same tools you do, but does NOT see your conversation history or any context the user attached. You must include all relevant context (file paths, requirements, constraints) in the prompt. +/// Do NOT use this tool for tasks you could accomplish directly with one or two tool calls (e.g. reading a file, running a single command). /// /// You will receive only the agent's final message as output. /// -/// If a response (success or error) includes a session_id, you can send a follow-up message to that session by passing the session_id back. This is useful for multi-turn conversations with an agent, asking clarifying questions about its output, or retrying after timeouts or transient failures. +/// **New session** (no session_id): Creates a new agent that does NOT see your conversation history. Include all relevant context (file paths, requirements, constraints) in the message. +/// +/// **Follow-up** (with session_id): Sends a follow-up to an existing agent session. The agent already has full context, so send only a short, direct message — do NOT repeat the original task or context. Examples: "Also update the tests", "Fix the compile error in foo.rs", "Retry". /// -/// Note: -/// - Agents cannot use tools you don't have access to. /// - If spawning multiple agents that might write to the filesystem, provide guidance on how to avoid conflicts (e.g. assign each to different directories). #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] pub struct SpawnAgentToolInput { /// Short label displayed in the UI while the agent runs (e.g., "Researching alternatives") pub label: String, - /// Describe the task for the agent to perform. Be specific about what you want accomplished. Include all necessary context (file paths, requirements, constraints) since the agent cannot see your conversation. + /// The prompt for the agent. For new sessions, include full context needed for the task. For follow-ups (with session_id), you can rely on the agent already having the previous message. pub message: String, - /// Optional session ID of an existing agent session to continue a conversation with. When provided, the message is sent as a follow-up to that session instead of creating a new one. Use this to ask clarifying questions, request changes based on previous output, or retry after errors. + /// Session ID of an existing agent session to continue instead of creating a new one. #[serde(default)] pub session_id: Option, } -#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] +#[serde(rename_all = "snake_case")] pub enum SpawnAgentToolOutput { Success { session_id: acp::SessionId, output: String, + session_info: SubagentSessionInfo, }, Error { #[serde(skip_serializing_if = "Option::is_none")] #[serde(default)] session_id: Option, error: String, + session_info: Option, }, } impl From for LanguageModelToolResultContent { fn from(output: SpawnAgentToolOutput) -> Self { - serde_json::to_string(&output) + match output { + SpawnAgentToolOutput::Success { + session_id, + output, + session_info: _, // Don't show this to the model + } => serde_json::to_string( + &serde_json::json!({ "session_id": session_id, "output": output }), + ) .unwrap_or_else(|e| format!("Failed to serialize spawn_agent output: {e}")) - .into() + .into(), + SpawnAgentToolOutput::Error { + session_id, + error, + session_info: _, // Don't show this to the model + } => serde_json::to_string( + &serde_json::json!({ "session_id": session_id, "error": error }), + ) + .unwrap_or_else(|e| format!("Failed to serialize spawn_agent output: {e}")) + .into(), + } } } /// Tool that spawns an agent thread to work on a task. pub struct SpawnAgentTool { - parent_thread: WeakEntity, environment: Rc, } impl SpawnAgentTool { - pub fn new(parent_thread: WeakEntity, environment: Rc) -> Self { - Self { - parent_thread, - environment, - } + pub fn new(environment: Rc) -> Self { + Self { environment } } } @@ -90,69 +106,108 @@ impl AgentTool for SpawnAgentTool { input: Result, _cx: &mut App, ) -> SharedString { - input - .map(|i| i.label.into()) - .unwrap_or_else(|_| "Spawning agent".into()) + match input { + Ok(i) => i.label.into(), + Err(value) => value + .get("label") + .and_then(|v| v.as_str()) + .map(|s| SharedString::from(s.to_owned())) + .unwrap_or_else(|| "Spawning agent".into()), + } } fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let Some(parent_thread_entity) = self.parent_thread.upgrade() else { - return Task::ready(Err(SpawnAgentToolOutput::Error { - session_id: None, - error: "Parent thread no longer exists".to_string(), - })); - }; - - let subagent = if let Some(session_id) = input.session_id { - self.environment - .resume_subagent(parent_thread_entity, session_id, input.message, cx) - } else { - self.environment - .create_subagent(parent_thread_entity, input.label, input.message, cx) - }; - let subagent = match subagent { - Ok(subagent) => subagent, - Err(err) => { - return Task::ready(Err(SpawnAgentToolOutput::Error { + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| SpawnAgentToolOutput::Error { + session_id: None, + error: format!("Failed to receive tool input: {e}"), + session_info: None, + })?; + + let (subagent, mut session_info) = cx.update(|cx| { + let subagent = if let Some(session_id) = input.session_id { + self.environment.resume_subagent(session_id, cx) + } else { + self.environment.create_subagent(input.label, cx) + }; + let subagent = subagent.map_err(|err| SpawnAgentToolOutput::Error { session_id: None, error: err.to_string(), - })); - } - }; - let subagent_session_id = subagent.id(); - - event_stream.subagent_spawned(subagent_session_id.clone()); - let meta = acp::Meta::from_iter([( - SUBAGENT_SESSION_ID_META_KEY.into(), - subagent_session_id.to_string().into(), - )]); - event_stream.update_fields_with_meta(acp::ToolCallUpdateFields::new(), Some(meta)); - - cx.spawn(async move |cx| match subagent.wait_for_output(cx).await { - Ok(output) => { - event_stream.update_fields( - acp::ToolCallUpdateFields::new().content(vec![output.clone().into()]), - ); - Ok(SpawnAgentToolOutput::Success { - session_id: subagent_session_id, - output, - }) - } - Err(e) => { - let error = e.to_string(); - event_stream.update_fields( - acp::ToolCallUpdateFields::new().content(vec![error.clone().into()]), + session_info: None, + })?; + let session_info = SubagentSessionInfo { + session_id: subagent.id(), + message_start_index: subagent.num_entries(cx), + message_end_index: None, + }; + + event_stream.subagent_spawned(subagent.id()); + event_stream.update_fields_with_meta( + acp::ToolCallUpdateFields::new(), + Some(acp::Meta::from_iter([( + SUBAGENT_SESSION_INFO_META_KEY.into(), + serde_json::json!(&session_info), + )])), ); - Err(SpawnAgentToolOutput::Error { - session_id: Some(subagent_session_id), - error, - }) - } + + Ok((subagent, session_info)) + })?; + + let send_result = subagent.send(input.message, cx).await; + + let status = if send_result.is_ok() { + "completed" + } else { + "error" + }; + telemetry::event!( + "Subagent Completed", + subagent_session = session_info.session_id.to_string(), + status, + ); + + session_info.message_end_index = + cx.update(|cx| Some(subagent.num_entries(cx).saturating_sub(1))); + + let meta = Some(acp::Meta::from_iter([( + SUBAGENT_SESSION_INFO_META_KEY.into(), + serde_json::json!(&session_info), + )])); + + let (output, result) = match send_result { + Ok(output) => ( + output.clone(), + Ok(SpawnAgentToolOutput::Success { + session_id: session_info.session_id.clone(), + session_info, + output, + }), + ), + Err(e) => { + let error = e.to_string(); + ( + error.clone(), + Err(SpawnAgentToolOutput::Error { + session_id: Some(session_info.session_id.clone()), + error, + session_info: Some(session_info), + }), + ) + } + }; + event_stream.update_fields_with_meta( + acp::ToolCallUpdateFields::new().content(vec![output.into()]), + meta, + ); + result }) } @@ -163,25 +218,29 @@ impl AgentTool for SpawnAgentTool { event_stream: ToolCallEventStream, _cx: &mut App, ) -> Result<()> { - let session_id = match &output { - SpawnAgentToolOutput::Success { session_id, .. } => Some(session_id), - SpawnAgentToolOutput::Error { session_id, .. } => session_id.as_ref(), + let (content, session_info) = match output { + SpawnAgentToolOutput::Success { + output, + session_info, + .. + } => (output.into(), Some(session_info)), + SpawnAgentToolOutput::Error { + error, + session_info, + .. + } => (error.into(), session_info), }; - if let Some(session_id) = session_id { - event_stream.subagent_spawned(session_id.clone()); - let meta = acp::Meta::from_iter([( - SUBAGENT_SESSION_ID_META_KEY.into(), - session_id.to_string().into(), - )]); - event_stream.update_fields_with_meta(acp::ToolCallUpdateFields::new(), Some(meta)); - } - - let content = match &output { - SpawnAgentToolOutput::Success { output, .. } => output.into(), - SpawnAgentToolOutput::Error { error, .. } => error.into(), - }; - event_stream.update_fields(acp::ToolCallUpdateFields::new().content(vec![content])); + let meta = session_info.map(|session_info| { + acp::Meta::from_iter([( + SUBAGENT_SESSION_INFO_META_KEY.into(), + serde_json::json!(&session_info), + )]) + }); + event_stream.update_fields_with_meta( + acp::ToolCallUpdateFields::new().content(vec![content]), + meta, + ); Ok(()) } diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index dd5445142a001fbd9106af548444165bc8331581..81846ec282a52cc694a0f1c8e8418b5202d7e0d6 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -1,30 +1,36 @@ use super::edit_file_tool::EditFileTool; use super::restore_file_from_disk_tool::RestoreFileFromDiskTool; use super::save_file_tool::SaveFileTool; +use super::tool_edit_parser::{ToolEditEvent, ToolEditParser}; use crate::{ - AgentTool, Templates, Thread, ToolCallEventStream, - edit_agent::streaming_fuzzy_matcher::StreamingFuzzyMatcher, + AgentTool, Thread, ToolCallEventStream, ToolInput, + edit_agent::{ + reindent::{Reindenter, compute_indent_delta}, + streaming_fuzzy_matcher::StreamingFuzzyMatcher, + }, }; use acp_thread::Diff; +use action_log::ActionLog; use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields}; -use anyhow::{Context as _, Result, anyhow}; +use anyhow::{Context as _, Result}; use collections::HashSet; use futures::FutureExt as _; use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity}; -use language::LanguageRegistry; use language::language_settings::{self, FormatOnSave}; +use language::{Buffer, LanguageRegistry}; use language_model::LanguageModelToolResultContent; use project::lsp_store::{FormatTrigger, LspFormatTarget}; -use project::{Project, ProjectPath}; +use project::{AgentLocation, Project, ProjectPath}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::ops::Range; use std::path::PathBuf; use std::sync::Arc; -use text::BufferSnapshot; +use streaming_diff::{CharOperation, StreamingDiff}; +use text::ToOffset; use ui::SharedString; -use util::ResultExt; use util::rel_path::RelPath; +use util::{Deferred, ResultExt}; const DEFAULT_UI_TEXT: &str = "Editing file"; @@ -70,14 +76,13 @@ pub struct StreamingEditFileToolInput { pub path: PathBuf, /// The mode of operation on the file. Possible values: - /// - 'create': Create a new file if it doesn't exist. Requires 'content' field. - /// - 'overwrite': Replace the entire contents of an existing file. Requires 'content' field. + /// - 'write': Replace the entire contents of the file. If the file doesn't exist, it will be created. Requires 'content' field. /// - 'edit': Make granular edits to an existing file. Requires 'edits' field. /// /// When a file already exists or you just created it, prefer editing it as opposed to recreating it from scratch. pub mode: StreamingEditFileMode, - /// The complete content for the new file (required for 'create' and 'overwrite' modes). + /// The complete content for the new file (required for 'write' mode). /// This field should contain the entire file content. #[serde(default, skip_serializing_if = "Option::is_none")] pub content: Option, @@ -85,23 +90,22 @@ pub struct StreamingEditFileToolInput { /// List of edit operations to apply sequentially (required for 'edit' mode). /// Each edit finds `old_text` in the file and replaces it with `new_text`. #[serde(default, skip_serializing_if = "Option::is_none")] - pub edits: Option>, + pub edits: Option>, } -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum StreamingEditFileMode { - /// Create a new file if it doesn't exist - Create, - /// Replace the entire contents of an existing file - Overwrite, + /// Overwrite the file with new content (replacing any existing content). + /// If the file does not exist, it will be created. + Write, /// Make granular edits to an existing file Edit, } /// A single edit operation that replaces old text with new text #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] -pub struct EditOperation { +pub struct Edit { /// The exact text to find in the file. This will be matched using fuzzy matching /// to handle minor differences in whitespace or formatting. pub old_text: String, @@ -109,12 +113,26 @@ pub struct EditOperation { pub new_text: String, } -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +#[derive(Default, Debug, Deserialize)] struct StreamingEditFileToolPartialInput { #[serde(default)] - path: String, + display_description: Option, + #[serde(default)] + path: Option, + #[serde(default)] + mode: Option, + #[serde(default)] + content: Option, + #[serde(default)] + edits: Option>, +} + +#[derive(Default, Debug, Deserialize)] +pub struct PartialEdit { + #[serde(default)] + pub old_text: Option, #[serde(default)] - display_description: String, + pub new_text: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -133,6 +151,14 @@ pub enum StreamingEditFileToolOutput { }, } +impl StreamingEditFileToolOutput { + pub fn error(error: impl Into) -> Self { + Self::Error { + error: error.into(), + } + } +} + impl std::fmt::Display for StreamingEditFileToolOutput { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -161,52 +187,55 @@ impl From for LanguageModelToolResultContent { } pub struct StreamingEditFileTool { + project: Entity, thread: WeakEntity, + action_log: Entity, language_registry: Arc, - project: Entity, - #[allow(dead_code)] - templates: Arc, } impl StreamingEditFileTool { pub fn new( project: Entity, thread: WeakEntity, + action_log: Entity, language_registry: Arc, - templates: Arc, ) -> Self { Self { project, thread, + action_log, language_registry, - templates, - } - } - - pub fn with_thread(&self, new_thread: WeakEntity) -> Self { - Self { - project: self.project.clone(), - thread: new_thread, - language_registry: self.language_registry.clone(), - templates: self.templates.clone(), } } fn authorize( &self, - input: &StreamingEditFileToolInput, + path: &PathBuf, + description: &str, event_stream: &ToolCallEventStream, cx: &mut App, ) -> Task> { super::tool_permissions::authorize_file_edit( EditFileTool::NAME, - &input.path, - &input.display_description, + path, + description, &self.thread, event_stream, cx, ) } + + fn set_agent_location(&self, buffer: WeakEntity, position: text::Anchor, cx: &mut App) { + let should_update_agent_location = self + .thread + .read_with(cx, |thread, _cx| !thread.is_subagent()) + .unwrap_or_default(); + if should_update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location(Some(AgentLocation { buffer, position }), cx); + }); + } + } } impl AgentTool for StreamingEditFileTool { @@ -215,6 +244,10 @@ impl AgentTool for StreamingEditFileTool { const NAME: &'static str = "streaming_edit_file"; + fn supports_input_streaming() -> bool { + true + } + fn kind() -> acp::ToolKind { acp::ToolKind::Edit } @@ -237,25 +270,27 @@ impl AgentTool for StreamingEditFileTool { .unwrap_or(input.path.to_string_lossy().into_owned()) .into(), Err(raw_input) => { - if let Some(input) = - serde_json::from_value::(raw_input).ok() + if let Ok(input) = + serde_json::from_value::(raw_input) { - let path = input.path.trim(); + let path = input.path.unwrap_or_default(); + let path = path.trim(); if !path.is_empty() { return self .project .read(cx) - .find_project_path(&input.path, cx) + .find_project_path(&path, cx) .and_then(|project_path| { self.project .read(cx) .short_full_path_for_project_path(&project_path, cx) }) - .unwrap_or(input.path) + .unwrap_or_else(|| path.to_string()) .into(); } - let description = input.display_description.trim(); + let description = input.display_description.unwrap_or_default(); + let description = description.trim(); if !description.is_empty() { return description.to_string().into(); } @@ -268,227 +303,93 @@ impl AgentTool for StreamingEditFileTool { fn run( self: Arc, - input: Self::Input, + mut input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let Ok(project) = self - .thread - .read_with(cx, |thread, _cx| thread.project().clone()) - else { - return Task::ready(Err(StreamingEditFileToolOutput::Error { - error: "thread was dropped".to_string(), - })); - }; - - let project_path = match resolve_path(&input, project.clone(), cx) { - Ok(path) => path, - Err(err) => { - return Task::ready(Err(StreamingEditFileToolOutput::Error { - error: err.to_string(), - })); - } - }; - - let abs_path = project.read(cx).absolute_path(&project_path, cx); - if let Some(abs_path) = abs_path.clone() { - event_stream.update_fields( - ToolCallUpdateFields::new().locations(vec![acp::ToolCallLocation::new(abs_path)]), - ); - } - - let authorize = self.authorize(&input, &event_stream, cx); - cx.spawn(async move |cx: &mut AsyncApp| { - let result: anyhow::Result = async { - authorize.await?; - - let buffer = project - .update(cx, |project, cx| { - project.open_buffer(project_path.clone(), cx) - }) - .await?; - - if let Some(abs_path) = abs_path.as_ref() { - let (last_read_mtime, current_mtime, is_dirty, has_save_tool, has_restore_tool) = - self.thread.update(cx, |thread, cx| { - let last_read = thread.file_read_times.get(abs_path).copied(); - let current = buffer - .read(cx) - .file() - .and_then(|file| file.disk_state().mtime()); - let dirty = buffer.read(cx).is_dirty(); - let has_save = thread.has_tool(SaveFileTool::NAME); - let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME); - (last_read, current, dirty, has_save, has_restore) - })?; - - if is_dirty { - let message = match (has_save_tool, has_restore_tool) { - (true, true) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ - If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." - } - (true, false) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ - If they want to discard them, ask the user to manually revert the file, then inform you when it's ok to proceed." - } - (false, true) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask the user to manually save the file, then inform you when it's ok to proceed. \ - If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." - } - (false, false) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes, \ - then ask them to save or revert the file manually and inform you when it's ok to proceed." + let mut state: Option = None; + loop { + futures::select! { + partial = input.recv_partial().fuse() => { + let Some(partial_value) = partial else { break }; + if let Ok(parsed) = serde_json::from_value::(partial_value) { + if state.is_none() + && let StreamingEditFileToolPartialInput { + path: Some(path), + display_description: Some(display_description), + mode: Some(mode), + .. + } = &parsed + { + match EditSession::new( + &PathBuf::from(path), + display_description, + *mode, + &self, + &event_stream, + cx, + ) + .await + { + Ok(session) => state = Some(session), + Err(e) => { + log::error!("Failed to create edit session: {}", e); + return Err(e); + } + } } - }; - anyhow::bail!("{}", message); - } - if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) { - if current != last_read { - anyhow::bail!( - "The file {} has been modified since you last read it. \ - Please read the file again to get the current state before editing it.", - input.path.display() - ); + if let Some(state) = &mut state { + if let Err(e) = state.process(parsed, &self, &event_stream, cx) { + log::error!("Failed to process edit: {}", e); + return Err(e); + } + } } } - } - - let diff = cx.new(|cx| Diff::new(buffer.clone(), cx)); - event_stream.update_diff(diff.clone()); - let _finalize_diff = util::defer({ - let diff = diff.downgrade(); - let mut cx = cx.clone(); - move || { - diff.update(&mut cx, |diff, cx| diff.finalize(cx)).ok(); - } - }); - - let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - let old_text = cx - .background_spawn({ - let old_snapshot = old_snapshot.clone(); - async move { Arc::new(old_snapshot.text()) } - }) - .await; - - let action_log = self.thread.read_with(cx, |thread, _cx| thread.action_log().clone())?; - - // Edit the buffer and report edits to the action log as part of the - // same effect cycle, otherwise the edit will be reported as if the - // user made it (due to the buffer subscription in action_log). - match input.mode { - StreamingEditFileMode::Create | StreamingEditFileMode::Overwrite => { - action_log.update(cx, |log, cx| { - log.buffer_created(buffer.clone(), cx); - }); - let content = input.content.ok_or_else(|| { - anyhow!("'content' field is required for create and overwrite modes") - })?; - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - buffer.edit([(0..buffer.len(), content.as_str())], None, cx); - }); - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - }); - } - StreamingEditFileMode::Edit => { - action_log.update(cx, |log, cx| { - log.buffer_read(buffer.clone(), cx); - }); - let edits = input.edits.ok_or_else(|| { - anyhow!("'edits' field is required for edit mode") - })?; - // apply_edits now handles buffer_edited internally in the same effect cycle - apply_edits(&buffer, &action_log, &edits, &diff, &event_stream, &abs_path, cx)?; - } - } - - let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| { - let settings = language_settings::language_settings( - buffer.language().map(|l| l.name()), - buffer.file(), - cx, - ); - settings.format_on_save != FormatOnSave::Off - }); - - if format_on_save_enabled { - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - - let format_task = project.update(cx, |project, cx| { - project.format( - HashSet::from_iter([buffer.clone()]), - LspFormatTarget::Buffers, - false, - FormatTrigger::Save, - cx, - ) - }); - futures::select! { - result = format_task.fuse() => { result.log_err(); }, - _ = event_stream.cancelled_by_user().fuse() => { - anyhow::bail!("Edit cancelled by user"); - } - }; - } - - let save_task = project - .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)); - futures::select! { - result = save_task.fuse() => { result?; }, _ = event_stream.cancelled_by_user().fuse() => { - anyhow::bail!("Edit cancelled by user"); + return Err(StreamingEditFileToolOutput::error("Edit cancelled by user")); } - }; - - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - - if let Some(abs_path) = abs_path.as_ref() { - if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| { - buffer.file().and_then(|file| file.disk_state().mtime()) - }) { - self.thread.update(cx, |thread, _| { - thread.file_read_times.insert(abs_path.to_path_buf(), new_mtime); - })?; + } + } + let full_input = + input + .recv() + .await + .map_err(|e| { + let err = StreamingEditFileToolOutput::error(format!("Failed to receive tool input: {e}")); + log::error!("Failed to receive tool input: {e}"); + err + })?; + + let mut state = if let Some(state) = state { + state + } else { + match EditSession::new( + &full_input.path, + &full_input.display_description, + full_input.mode, + &self, + &event_stream, + cx, + ) + .await + { + Ok(session) => session, + Err(e) => { + log::error!("Failed to create edit session: {}", e); + return Err(e); } } - - let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - let (new_text, unified_diff) = cx - .background_spawn({ - let new_snapshot = new_snapshot.clone(); - let old_text = old_text.clone(); - async move { - let new_text = new_snapshot.text(); - let diff = language::unified_diff(&old_text, &new_text); - (new_text, diff) - } - }) - .await; - - let output = StreamingEditFileToolOutput::Success { - input_path: input.path, - new_text, - old_text, - diff: unified_diff, - }; - - Ok(output) - }.await; - result - .map_err(|e| StreamingEditFileToolOutput::Error { error: e.to_string() }) + }; + match state.finalize(full_input, &self, &event_stream, cx).await { + Ok(output) => Ok(output), + Err(e) => { + log::error!("Failed to finalize edit: {}", e); + Err(e) + } + } }) } @@ -522,173 +423,632 @@ impl AgentTool for StreamingEditFileTool { } } -fn apply_edits( - buffer: &Entity, - action_log: &Entity, - edits: &[EditOperation], - diff: &Entity, - event_stream: &ToolCallEventStream, - abs_path: &Option, - cx: &mut AsyncApp, -) -> Result<()> { - let mut failed_edits = Vec::new(); - let mut ambiguous_edits = Vec::new(); - let mut resolved_edits: Vec<(Range, String)> = Vec::new(); - - // First pass: resolve all edits without applying them - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - for (index, edit) in edits.iter().enumerate() { - let result = resolve_edit(&snapshot, edit); - - match result { - Ok(Some((range, new_text))) => { - // Reveal the range in the diff view - let (start_anchor, end_anchor) = buffer.read_with(cx, |buffer, _cx| { - ( - buffer.anchor_before(range.start), - buffer.anchor_after(range.end), - ) - }); - diff.update(cx, |card, cx| { - card.reveal_range(start_anchor..end_anchor, cx) - }); - resolved_edits.push((range, new_text)); - } - Ok(None) => { - failed_edits.push(index); - } - Err(ranges) => { - ambiguous_edits.push((index, ranges)); - } +pub struct EditSession { + abs_path: PathBuf, + buffer: Entity, + old_text: Arc, + diff: Entity, + mode: StreamingEditFileMode, + parser: ToolEditParser, + pipeline: EditPipeline, + _finalize_diff_guard: Deferred>, +} + +struct EditPipeline { + current_edit: Option, + content_written: bool, +} + +enum EditPipelineEntry { + ResolvingOldText { + matcher: StreamingFuzzyMatcher, + }, + StreamingNewText { + streaming_diff: StreamingDiff, + edit_cursor: usize, + reindenter: Reindenter, + original_snapshot: text::BufferSnapshot, + }, +} + +impl EditPipeline { + fn new() -> Self { + Self { + current_edit: None, + content_written: false, } } - // Check for errors before applying any edits - if !failed_edits.is_empty() { - let indices = failed_edits - .iter() - .map(|i| i.to_string()) - .collect::>() - .join(", "); - anyhow::bail!( - "Could not find matching text for edit(s) at index(es): {}. \ - The old_text did not match any content in the file. \ - Please read the file again to get the current content.", - indices - ); + fn ensure_resolving_old_text(&mut self, buffer: &Entity, cx: &mut AsyncApp) { + if self.current_edit.is_none() { + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot()); + self.current_edit = Some(EditPipelineEntry::ResolvingOldText { + matcher: StreamingFuzzyMatcher::new(snapshot), + }); + } } +} - if !ambiguous_edits.is_empty() { - let details: Vec = ambiguous_edits - .iter() - .map(|(index, ranges)| { - let lines = ranges - .iter() - .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string()) - .collect::>() - .join(", "); - format!("edit {}: matches at lines {}", index, lines) - }) - .collect(); - anyhow::bail!( - "Some edits matched multiple locations in the file:\n{}. \ - Please provide more context in old_text to uniquely identify the location.", - details.join("\n") +impl EditSession { + async fn new( + path: &PathBuf, + display_description: &str, + mode: StreamingEditFileMode, + tool: &StreamingEditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result { + let project_path = cx + .update(|cx| resolve_path(mode, &path, &tool.project, cx)) + .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; + + let Some(abs_path) = cx.update(|cx| tool.project.read(cx).absolute_path(&project_path, cx)) + else { + return Err(StreamingEditFileToolOutput::error(format!( + "Worktree at '{}' does not exist", + path.to_string_lossy() + ))); + }; + + event_stream.update_fields( + ToolCallUpdateFields::new().locations(vec![ToolCallLocation::new(abs_path.clone())]), ); + + cx.update(|cx| tool.authorize(&path, &display_description, event_stream, cx)) + .await + .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; + + let buffer = tool + .project + .update(cx, |project, cx| project.open_buffer(project_path, cx)) + .await + .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; + + ensure_buffer_saved(&buffer, &abs_path, tool, cx)?; + + let diff = cx.new(|cx| Diff::new(buffer.clone(), cx)); + event_stream.update_diff(diff.clone()); + let finalize_diff_guard = util::defer(Box::new({ + let diff = diff.downgrade(); + let mut cx = cx.clone(); + move || { + diff.update(&mut cx, |diff, cx| diff.finalize(cx)).ok(); + } + }) as Box); + + tool.action_log + .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + + let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let old_text = cx + .background_spawn({ + let old_snapshot = old_snapshot.clone(); + async move { Arc::new(old_snapshot.text()) } + }) + .await; + + Ok(Self { + abs_path, + buffer, + old_text, + diff, + mode, + parser: ToolEditParser::default(), + pipeline: EditPipeline::new(), + _finalize_diff_guard: finalize_diff_guard, + }) } - // Sort edits by position so buffer.edit() can handle offset translation - let mut edits_sorted = resolved_edits; - edits_sorted.sort_by(|a, b| a.0.start.cmp(&b.0.start)); + async fn finalize( + &mut self, + input: StreamingEditFileToolInput, + tool: &StreamingEditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result { + let old_text = self.old_text.clone(); - // Emit location for the earliest edit in the file - if let Some((first_range, _)) = edits_sorted.first() { - if let Some(abs_path) = abs_path.clone() { - let line = snapshot.offset_to_point(first_range.start).row; - event_stream.update_fields( - ToolCallUpdateFields::new() - .locations(vec![ToolCallLocation::new(abs_path).line(Some(line))]), - ); + match input.mode { + StreamingEditFileMode::Write => { + let content = input.content.ok_or_else(|| { + StreamingEditFileToolOutput::error("'content' field is required for write mode") + })?; + + let events = self.parser.finalize_content(&content); + self.process_events(&events, tool, event_stream, cx)?; + + tool.action_log.update(cx, |log, cx| { + log.buffer_created(self.buffer.clone(), cx); + }); + } + StreamingEditFileMode::Edit => { + let edits = input.edits.ok_or_else(|| { + StreamingEditFileToolOutput::error("'edits' field is required for edit mode") + })?; + let events = self.parser.finalize_edits(&edits); + self.process_events(&events, tool, event_stream, cx)?; + + if log::log_enabled!(log::Level::Debug) { + log::debug!("Got edits:"); + for edit in &edits { + log::debug!( + " old_text: '{}', new_text: '{}'", + edit.old_text.replace('\n', "\\n"), + edit.new_text.replace('\n', "\\n") + ); + } + } + } } - } - // Validate no overlaps (sorted ascending by start) - for window in edits_sorted.windows(2) { - if let [(earlier_range, _), (later_range, _)] = window - && (earlier_range.end > later_range.start || earlier_range.start == later_range.start) - { - let earlier_start_line = snapshot.offset_to_point(earlier_range.start).row + 1; - let earlier_end_line = snapshot.offset_to_point(earlier_range.end).row + 1; - let later_start_line = snapshot.offset_to_point(later_range.start).row + 1; - let later_end_line = snapshot.offset_to_point(later_range.end).row + 1; - anyhow::bail!( - "Conflicting edit ranges detected: lines {}-{} conflicts with lines {}-{}. \ - Conflicting edit ranges are not allowed, as they would overwrite each other.", - earlier_start_line, - earlier_end_line, - later_start_line, - later_end_line, + let format_on_save_enabled = self.buffer.read_with(cx, |buffer, cx| { + let settings = language_settings::language_settings( + buffer.language().map(|l| l.name()), + buffer.file(), + cx, ); - } - } + settings.format_on_save != FormatOnSave::Off + }); - // Apply all edits in a single batch and report to action_log in the same - // effect cycle. This prevents the buffer subscription from treating these - // as user edits. - if !edits_sorted.is_empty() { - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - buffer.edit( - edits_sorted - .iter() - .map(|(range, new_text)| (range.clone(), new_text.as_str())), - None, - cx, - ); + if format_on_save_enabled { + tool.action_log.update(cx, |log, cx| { + log.buffer_edited(self.buffer.clone(), cx); }); - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); + + let format_task = tool.project.update(cx, |project, cx| { + project.format( + HashSet::from_iter([self.buffer.clone()]), + LspFormatTarget::Buffers, + false, + FormatTrigger::Save, + cx, + ) }); + futures::select! { + result = format_task.fuse() => { result.log_err(); }, + _ = event_stream.cancelled_by_user().fuse() => { + return Err(StreamingEditFileToolOutput::error("Edit cancelled by user")); + } + }; + } + + let save_task = tool.project.update(cx, |project, cx| { + project.save_buffer(self.buffer.clone(), cx) + }); + futures::select! { + result = save_task.fuse() => { result.map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; }, + _ = event_stream.cancelled_by_user().fuse() => { + return Err(StreamingEditFileToolOutput::error("Edit cancelled by user")); + } + }; + + tool.action_log.update(cx, |log, cx| { + log.buffer_edited(self.buffer.clone(), cx); }); + + let new_snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let (new_text, unified_diff) = cx + .background_spawn({ + let new_snapshot = new_snapshot.clone(); + let old_text = old_text.clone(); + async move { + let new_text = new_snapshot.text(); + let diff = language::unified_diff(&old_text, &new_text); + (new_text, diff) + } + }) + .await; + + let output = StreamingEditFileToolOutput::Success { + input_path: input.path, + new_text, + old_text: old_text.clone(), + diff: unified_diff, + }; + Ok(output) } - Ok(()) + fn process( + &mut self, + partial: StreamingEditFileToolPartialInput, + tool: &StreamingEditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result<(), StreamingEditFileToolOutput> { + match &self.mode { + StreamingEditFileMode::Write => { + if let Some(content) = &partial.content { + let events = self.parser.push_content(content); + self.process_events(&events, tool, event_stream, cx)?; + } + } + StreamingEditFileMode::Edit => { + if let Some(edits) = partial.edits { + let events = self.parser.push_edits(&edits); + self.process_events(&events, tool, event_stream, cx)?; + } + } + } + Ok(()) + } + + fn process_events( + &mut self, + events: &[ToolEditEvent], + tool: &StreamingEditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result<(), StreamingEditFileToolOutput> { + for event in events { + match event { + ToolEditEvent::ContentChunk { chunk } => { + let (buffer_id, buffer_len) = self + .buffer + .read_with(cx, |buffer, _cx| (buffer.remote_id(), buffer.len())); + let edit_range = if self.pipeline.content_written { + buffer_len..buffer_len + } else { + 0..buffer_len + }; + + agent_edit_buffer( + &self.buffer, + [(edit_range, chunk.as_str())], + &tool.action_log, + cx, + ); + cx.update(|cx| { + tool.set_agent_location( + self.buffer.downgrade(), + text::Anchor::max_for_buffer(buffer_id), + cx, + ); + }); + self.pipeline.content_written = true; + } + + ToolEditEvent::OldTextChunk { + chunk, done: false, .. + } => { + log::debug!("old_text_chunk: done=false, chunk='{}'", chunk); + self.pipeline.ensure_resolving_old_text(&self.buffer, cx); + + if let Some(EditPipelineEntry::ResolvingOldText { matcher }) = + &mut self.pipeline.current_edit + && !chunk.is_empty() + { + if let Some(match_range) = matcher.push(chunk, None) { + let anchor_range = self.buffer.read_with(cx, |buffer, _cx| { + buffer.anchor_range_between(match_range.clone()) + }); + self.diff + .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); + + cx.update(|cx| { + let position = self.buffer.read(cx).anchor_before(match_range.end); + tool.set_agent_location(self.buffer.downgrade(), position, cx); + }); + } + } + } + + ToolEditEvent::OldTextChunk { + edit_index, + chunk, + done: true, + } => { + log::debug!("old_text_chunk: done=true, chunk='{}'", chunk); + + self.pipeline.ensure_resolving_old_text(&self.buffer, cx); + + let Some(EditPipelineEntry::ResolvingOldText { matcher }) = + &mut self.pipeline.current_edit + else { + continue; + }; + + if !chunk.is_empty() { + matcher.push(chunk, None); + } + let range = extract_match(matcher.finish(), &self.buffer, edit_index, cx)?; + + let anchor_range = self + .buffer + .read_with(cx, |buffer, _cx| buffer.anchor_range_between(range.clone())); + self.diff + .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); + + let snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + + let line = snapshot.offset_to_point(range.start).row; + event_stream.update_fields( + ToolCallUpdateFields::new().locations(vec![ + ToolCallLocation::new(&self.abs_path).line(Some(line)), + ]), + ); + + let buffer_indent = snapshot.line_indent_for_row(line); + let query_indent = text::LineIndent::from_iter( + matcher + .query_lines() + .first() + .map(|s| s.as_str()) + .unwrap_or("") + .chars(), + ); + let indent_delta = compute_indent_delta(buffer_indent, query_indent); + + let old_text_in_buffer = + snapshot.text_for_range(range.clone()).collect::(); + + log::debug!( + "edit[{}] old_text matched at {}..{}: {:?}", + edit_index, + range.start, + range.end, + old_text_in_buffer, + ); + + let text_snapshot = self + .buffer + .read_with(cx, |buffer, _cx| buffer.text_snapshot()); + self.pipeline.current_edit = Some(EditPipelineEntry::StreamingNewText { + streaming_diff: StreamingDiff::new(old_text_in_buffer), + edit_cursor: range.start, + reindenter: Reindenter::new(indent_delta), + original_snapshot: text_snapshot, + }); + + cx.update(|cx| { + let position = self.buffer.read(cx).anchor_before(range.end); + tool.set_agent_location(self.buffer.downgrade(), position, cx); + }); + } + + ToolEditEvent::NewTextChunk { + chunk, done: false, .. + } => { + log::debug!("new_text_chunk: done=false, chunk='{}'", chunk); + + let Some(EditPipelineEntry::StreamingNewText { + streaming_diff, + edit_cursor, + reindenter, + original_snapshot, + .. + }) = &mut self.pipeline.current_edit + else { + continue; + }; + + let reindented = reindenter.push(chunk); + if reindented.is_empty() { + continue; + } + + let char_ops = streaming_diff.push_new(&reindented); + apply_char_operations( + &char_ops, + &self.buffer, + original_snapshot, + edit_cursor, + &tool.action_log, + cx, + ); + + let position = original_snapshot.anchor_before(*edit_cursor); + cx.update(|cx| { + tool.set_agent_location(self.buffer.downgrade(), position, cx); + }); + } + + ToolEditEvent::NewTextChunk { + chunk, done: true, .. + } => { + log::debug!("new_text_chunk: done=true, chunk='{}'", chunk); + + let Some(EditPipelineEntry::StreamingNewText { + mut streaming_diff, + mut edit_cursor, + mut reindenter, + original_snapshot, + }) = self.pipeline.current_edit.take() + else { + continue; + }; + + // Flush any remaining reindent buffer + final chunk. + let mut final_text = reindenter.push(chunk); + final_text.push_str(&reindenter.finish()); + + log::debug!("new_text_chunk: done=true, final_text='{}'", final_text); + + if !final_text.is_empty() { + let char_ops = streaming_diff.push_new(&final_text); + apply_char_operations( + &char_ops, + &self.buffer, + &original_snapshot, + &mut edit_cursor, + &tool.action_log, + cx, + ); + } + + let remaining_ops = streaming_diff.finish(); + apply_char_operations( + &remaining_ops, + &self.buffer, + &original_snapshot, + &mut edit_cursor, + &tool.action_log, + cx, + ); + + let position = original_snapshot.anchor_before(edit_cursor); + cx.update(|cx| { + tool.set_agent_location(self.buffer.downgrade(), position, cx); + }); + } + } + } + Ok(()) + } +} + +fn apply_char_operations( + ops: &[CharOperation], + buffer: &Entity, + snapshot: &text::BufferSnapshot, + edit_cursor: &mut usize, + action_log: &Entity, + cx: &mut AsyncApp, +) { + for op in ops { + match op { + CharOperation::Insert { text } => { + let anchor = snapshot.anchor_after(*edit_cursor); + agent_edit_buffer(&buffer, [(anchor..anchor, text.as_str())], action_log, cx); + } + CharOperation::Delete { bytes } => { + let delete_end = *edit_cursor + bytes; + let anchor_range = snapshot.anchor_range_around(*edit_cursor..delete_end); + agent_edit_buffer(&buffer, [(anchor_range, "")], action_log, cx); + *edit_cursor = delete_end; + } + CharOperation::Keep { bytes } => { + *edit_cursor += bytes; + } + } + } +} + +fn extract_match( + matches: Vec>, + buffer: &Entity, + edit_index: &usize, + cx: &mut AsyncApp, +) -> Result, StreamingEditFileToolOutput> { + match matches.len() { + 0 => Err(StreamingEditFileToolOutput::error(format!( + "Could not find matching text for edit at index {}. \ + The old_text did not match any content in the file. \ + Please read the file again to get the current content.", + edit_index, + ))), + 1 => Ok(matches.into_iter().next().unwrap()), + _ => { + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let lines = matches + .iter() + .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string()) + .collect::>() + .join(", "); + Err(StreamingEditFileToolOutput::error(format!( + "Edit {} matched multiple locations in the file at lines: {}. \ + Please provide more context in old_text to uniquely \ + identify the location.", + edit_index, lines + ))) + } + } } -/// Resolves an edit operation by finding the matching text in the buffer. -/// Returns Ok(Some((range, new_text))) if a unique match is found, -/// Ok(None) if no match is found, or Err(ranges) if multiple matches are found. -fn resolve_edit( - snapshot: &BufferSnapshot, - edit: &EditOperation, -) -> std::result::Result, String)>, Vec>> { - let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone()); - matcher.push(&edit.old_text, None); - let matches = matcher.finish(); +/// Edits a buffer and reports the edit to the action log in the same effect +/// cycle. This ensures the action log's subscription handler sees the version +/// already updated by `buffer_edited`, so it does not misattribute the agent's +/// edit as a user edit. +fn agent_edit_buffer( + buffer: &Entity, + edits: I, + action_log: &Entity, + cx: &mut AsyncApp, +) where + I: IntoIterator, T)>, + S: ToOffset, + T: Into>, +{ + cx.update(|cx| { + buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + }); + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); +} - if matches.is_empty() { - return Ok(None); +fn ensure_buffer_saved( + buffer: &Entity, + abs_path: &PathBuf, + tool: &StreamingEditFileTool, + cx: &mut AsyncApp, +) -> Result<(), StreamingEditFileToolOutput> { + let last_read_mtime = tool + .action_log + .read_with(cx, |log, _| log.file_read_time(abs_path)); + let check_result = tool.thread.read_with(cx, |thread, cx| { + let current = buffer + .read(cx) + .file() + .and_then(|file| file.disk_state().mtime()); + let dirty = buffer.read(cx).is_dirty(); + let has_save = thread.has_tool(SaveFileTool::NAME); + let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME); + (current, dirty, has_save, has_restore) + }); + + let Ok((current_mtime, is_dirty, has_save_tool, has_restore_tool)) = check_result else { + return Ok(()); + }; + + if is_dirty { + let message = match (has_save_tool, has_restore_tool) { + (true, true) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ + If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." + } + (true, false) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ + If they want to discard them, ask the user to manually revert the file, then inform you when it's ok to proceed." + } + (false, true) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask the user to manually save the file, then inform you when it's ok to proceed. \ + If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." + } + (false, false) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes, \ + then ask them to save or revert the file manually and inform you when it's ok to proceed." + } + }; + return Err(StreamingEditFileToolOutput::error(message)); } - if matches.len() > 1 { - return Err(matches); + if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) { + if current != last_read { + return Err(StreamingEditFileToolOutput::error( + "The file has been modified since you last read it. \ + Please read the file again to get the current state before editing it.", + )); + } } - let match_range = matches.into_iter().next().expect("checked len above"); - Ok(Some((match_range, edit.new_text.clone()))) + Ok(()) } fn resolve_path( - input: &StreamingEditFileToolInput, - project: Entity, + mode: StreamingEditFileMode, + path: &PathBuf, + project: &Entity, cx: &mut App, ) -> Result { let project = project.read(cx); - match input.mode { - StreamingEditFileMode::Edit | StreamingEditFileMode::Overwrite => { + match mode { + StreamingEditFileMode::Edit => { let path = project - .find_project_path(&input.path, cx) + .find_project_path(&path, cx) .context("Can't edit file: path not found")?; let entry = project @@ -698,19 +1058,15 @@ fn resolve_path( anyhow::ensure!(entry.is_file(), "Can't edit file: path is a directory"); Ok(path) } - - StreamingEditFileMode::Create => { - if let Some(path) = project.find_project_path(&input.path, cx) { - anyhow::ensure!( - project.entry_for_path(&path, cx).is_none(), - "Can't create file: file already exists" - ); + StreamingEditFileMode::Write => { + if let Some(path) = project.find_project_path(&path, cx) + && let Some(entry) = project.entry_for_path(&path, cx) + { + anyhow::ensure!(entry.is_file(), "Can't write to file: path is a directory"); + return Ok(path); } - let parent_path = input - .path - .parent() - .context("Can't create file: incorrect path")?; + let parent_path = path.parent().context("Can't create file: incorrect path")?; let parent_project_path = project.find_project_path(&parent_path, cx); @@ -724,8 +1080,7 @@ fn resolve_path( "Can't create file: parent is not a directory" ); - let file_name = input - .path + let file_name = path .file_name() .and_then(|file_name| file_name.to_str()) .and_then(|file_name| RelPath::unix(file_name).ok()) @@ -744,54 +1099,36 @@ fn resolve_path( #[cfg(test)] mod tests { use super::*; - use crate::{ContextServerRegistry, Templates}; + use crate::{ContextServerRegistry, Templates, ToolInputSender}; + use fs::Fs as _; + use futures::StreamExt as _; use gpui::{TestAppContext, UpdateGlobal}; use language_model::fake_provider::FakeLanguageModel; use prompt_store::ProjectContext; use serde_json::json; + use settings::Settings; use settings::SettingsStore; use util::path; + use util::rel_path::rel_path; #[gpui::test] async fn test_streaming_edit_create_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({"dir": {}})).await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let result = cx - .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Create new file".into(), - path: "root/dir/new_file.txt".into(), - mode: StreamingEditFileMode::Create, - content: Some("Hello, World!".into()), - edits: None, - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )) - .run(input, ToolCallEventStream::test().0, cx) - }) - .await; + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Create new file".into(), + path: "root/dir/new_file.txt".into(), + mode: StreamingEditFileMode::Write, + content: Some("Hello, World!".into()), + edits: None, + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; let StreamingEditFileToolOutput::Success { new_text, diff, .. } = result.unwrap() else { panic!("expected success"); @@ -802,43 +1139,21 @@ mod tests { #[gpui::test] async fn test_streaming_edit_overwrite_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({"file.txt": "old content"})) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "old content"})).await; let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Overwrite file".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Overwrite, - content: Some("new content".into()), - edits: None, - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )) - .run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Overwrite file".into(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Write, + content: Some("new content".into()), + edits: None, + }), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -854,51 +1169,24 @@ mod tests { #[gpui::test] async fn test_streaming_edit_granular_edits(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Edit lines".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![EditOperation { - old_text: "line 2".into(), - new_text: "modified line 2".into(), - }]), - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )) - .run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit lines".into(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "line 2".into(), + new_text: "modified line 2".into(), + }]), + }), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -909,58 +1197,34 @@ mod tests { } #[gpui::test] - async fn test_streaming_edit_multiple_nonoverlapping_edits(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n" - }), + async fn test_streaming_edit_multiple_edits(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), ) .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Edit multiple lines".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![ - EditOperation { - old_text: "line 5".into(), - new_text: "modified line 5".into(), - }, - EditOperation { - old_text: "line 1".into(), - new_text: "modified line 1".into(), - }, - ]), - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )) - .run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit multiple lines".into(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![ + Edit { + old_text: "line 5".into(), + new_text: "modified line 5".into(), + }, + Edit { + old_text: "line 1".into(), + new_text: "modified line 1".into(), + }, + ]), + }), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -975,57 +1239,33 @@ mod tests { #[gpui::test] async fn test_streaming_edit_adjacent_edits(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n" - }), + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), ) .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Edit adjacent lines".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![ - EditOperation { - old_text: "line 2".into(), - new_text: "modified line 2".into(), - }, - EditOperation { - old_text: "line 3".into(), - new_text: "modified line 3".into(), - }, - ]), - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )) - .run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit adjacent lines".into(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![ + Edit { + old_text: "line 2".into(), + new_text: "modified line 2".into(), + }, + Edit { + old_text: "line 3".into(), + new_text: "modified line 3".into(), + }, + ]), + }), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1040,57 +1280,33 @@ mod tests { #[gpui::test] async fn test_streaming_edit_ascending_order_edits(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n" - }), + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), ) .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Edit multiple lines in ascending order".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![ - EditOperation { - old_text: "line 1".into(), - new_text: "modified line 1".into(), - }, - EditOperation { - old_text: "line 5".into(), - new_text: "modified line 5".into(), - }, - ]), - }; - Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - Templates::new(), - )) - .run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit multiple lines in ascending order".into(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![ + Edit { + old_text: "line 1".into(), + new_text: "modified line 1".into(), + }, + Edit { + old_text: "line 5".into(), + new_text: "modified line 5".into(), + }, + ]), + }), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1105,45 +1321,23 @@ mod tests { #[gpui::test] async fn test_streaming_edit_nonexistent_file(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({})).await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({})).await; let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Some edit".into(), - path: "root/nonexistent_file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![EditOperation { - old_text: "foo".into(), - new_text: "bar".into(), - }]), - }; - Arc::new(StreamingEditFileTool::new( - project, - thread.downgrade(), - language_registry, - Templates::new(), - )) - .run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Some edit".into(), + path: "root/nonexistent_file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "foo".into(), + new_text: "bar".into(), + }]), + }), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1155,46 +1349,24 @@ mod tests { #[gpui::test] async fn test_streaming_edit_failed_match(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree("/root", json!({"file.txt": "hello world"})) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world"})).await; let result = cx .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Edit file".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![EditOperation { - old_text: "nonexistent text that is not in the file".into(), - new_text: "replacement".into(), - }]), - }; - Arc::new(StreamingEditFileTool::new( - project, - thread.downgrade(), - language_registry, - Templates::new(), - )) - .run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit file".into(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "nonexistent text that is not in the file".into(), + new_text: "replacement".into(), + }]), + }), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1208,75 +1380,2408 @@ mod tests { } #[gpui::test] - async fn test_streaming_edit_overlapping_edits_out_of_order(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - // Multi-line file so the line-based fuzzy matcher can resolve each edit. - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); + async fn test_streaming_early_buffer_open(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Send partials simulating LLM streaming: description first, then path, then mode + sender.send_partial(json!({"display_description": "Edit lines"})); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt" + })); + cx.run_until_parked(); + + // Path is NOT yet complete because mode hasn't appeared — no buffer open yet + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // Now send the final complete input + sender.send_final(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "line 1\nmodified line 2\nline 3\n"); + } - // Edit A spans lines 3-4, edit B spans lines 2-3. They overlap on - // "line 3" and are given in descending file order so the ascending - // sort must reorder them before the pairwise overlap check can - // detect them correctly. - let result = cx - .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Overlapping edits".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![ - EditOperation { - old_text: "line 3\nline 4".into(), - new_text: "SECOND".into(), - }, - EditOperation { - old_text: "line 2\nline 3".into(), - new_text: "FIRST".into(), - }, - ]), - }; - Arc::new(StreamingEditFileTool::new( - project, - thread.downgrade(), - language_registry, - Templates::new(), - )) - .run(input, ToolCallEventStream::test().0, cx) - }) - .await; + #[gpui::test] + async fn test_streaming_path_completeness_heuristic(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world"})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Send partial with path but NO mode — path should NOT be treated as complete + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file" + })); + cx.run_until_parked(); + + // Now the path grows and mode appears + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write" + })); + cx.run_until_parked(); + + // Send final + sender.send_final(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new content" + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "new content"); + } + #[gpui::test] + async fn test_streaming_cancellation_during_partials(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world"})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver, mut cancellation_tx) = + ToolCallEventStream::test_with_cancellation(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Send a partial + sender.send_partial(json!({"display_description": "Edit"})); + cx.run_until_parked(); + + // Cancel during streaming + ToolCallEventStream::signal_cancellation_with_sender(&mut cancellation_tx); + cx.run_until_parked(); + + // The sender is still alive so the partial loop should detect cancellation + // We need to drop the sender to also unblock recv() if the loop didn't catch it + drop(sender); + + let result = task.await; let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { panic!("expected error"); }; assert!( - error.contains("Conflicting edit ranges detected"), - "Expected 'Conflicting edit ranges detected' but got: {error}" + error.contains("cancelled"), + "Expected cancellation error but got: {error}" ); } + #[gpui::test] + async fn test_streaming_edit_with_multiple_partials(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), + ) + .await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Simulate fine-grained streaming of the JSON + sender.send_partial(json!({"display_description": "Edit multiple"})); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "line 1"}] + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "modified line 1"}, + {"old_text": "line 5"} + ] + })); + cx.run_until_parked(); + + // Send final complete input + sender.send_final(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "modified line 1"}, + {"old_text": "line 5", "new_text": "modified line 5"} + ] + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!( + new_text, + "modified line 1\nline 2\nline 3\nline 4\nmodified line 5\n" + ); + } + + #[gpui::test] + async fn test_streaming_create_file_with_partials(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Stream partials for create mode + sender.send_partial(json!({"display_description": "Create new file"})); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "Hello, " + })); + cx.run_until_parked(); + + // Final with full content + sender.send_final(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "Hello, World!" + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "Hello, World!"); + } + + #[gpui::test] + async fn test_streaming_no_partials_direct_final(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Send final immediately with no partials (simulates non-streaming path) + sender.send_final(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "line 1\nmodified line 2\nline 3\n"); + } + + #[gpui::test] + async fn test_streaming_incremental_edit_application(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), + ) + .await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Stream description, path, mode + sender.send_partial(json!({"display_description": "Edit multiple lines"})); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // First edit starts streaming (old_text only, still in progress) + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "line 1"}] + })); + cx.run_until_parked(); + + // Buffer should not have changed yet — the first edit is still in progress + // (no second edit has appeared to prove the first is complete) + let buffer_text = project.update(cx, |project, cx| { + let project_path = project.find_project_path(&PathBuf::from("root/file.txt"), cx); + project_path.and_then(|pp| { + project + .get_open_buffer(&pp, cx) + .map(|buffer| buffer.read(cx).text()) + }) + }); + // Buffer is open (from streaming) but edit 1 is still in-progress + assert_eq!( + buffer_text.as_deref(), + Some("line 1\nline 2\nline 3\nline 4\nline 5\n"), + "Buffer should not be modified while first edit is still in progress" + ); + + // Second edit appears — this proves the first edit is complete, so it + // should be applied immediately during streaming + sender.send_partial(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "MODIFIED 1"}, + {"old_text": "line 5"} + ] + })); + cx.run_until_parked(); + + // First edit should now be applied to the buffer + let buffer_text = project.update(cx, |project, cx| { + let project_path = project.find_project_path(&PathBuf::from("root/file.txt"), cx); + project_path.and_then(|pp| { + project + .get_open_buffer(&pp, cx) + .map(|buffer| buffer.read(cx).text()) + }) + }); + assert_eq!( + buffer_text.as_deref(), + Some("MODIFIED 1\nline 2\nline 3\nline 4\nline 5\n"), + "First edit should be applied during streaming when second edit appears" + ); + + // Send final complete input + sender.send_final(json!({ + "display_description": "Edit multiple lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "MODIFIED 1"}, + {"old_text": "line 5", "new_text": "MODIFIED 5"} + ] + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { + new_text, old_text, .. + } = result.unwrap() + else { + panic!("expected success"); + }; + assert_eq!(new_text, "MODIFIED 1\nline 2\nline 3\nline 4\nMODIFIED 5\n"); + assert_eq!( + *old_text, "line 1\nline 2\nline 3\nline 4\nline 5\n", + "old_text should reflect the original file content before any edits" + ); + } + + #[gpui::test] + async fn test_streaming_incremental_three_edits(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Setup: description + path + mode + sender.send_partial(json!({ + "display_description": "Edit three lines", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // Edit 1 in progress + sender.send_partial(json!({ + "display_description": "Edit three lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "aaa", "new_text": "AAA"}] + })); + cx.run_until_parked(); + + // Edit 2 appears — edit 1 is now complete and should be applied + sender.send_partial(json!({ + "display_description": "Edit three lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "aaa", "new_text": "AAA"}, + {"old_text": "ccc", "new_text": "CCC"} + ] + })); + cx.run_until_parked(); + + // Verify edit 1 fully applied. Edit 2's new_text is being + // streamed: "CCC" is inserted but the old "ccc" isn't deleted + // yet (StreamingDiff::finish runs when edit 3 marks edit 2 done). + let buffer_text = project.update(cx, |project, cx| { + let pp = project + .find_project_path(&PathBuf::from("root/file.txt"), cx) + .unwrap(); + project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) + }); + assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCCccc\nddd\neee\n")); + + // Edit 3 appears — edit 2 is now complete and should be applied + sender.send_partial(json!({ + "display_description": "Edit three lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "aaa", "new_text": "AAA"}, + {"old_text": "ccc", "new_text": "CCC"}, + {"old_text": "eee", "new_text": "EEE"} + ] + })); + cx.run_until_parked(); + + // Verify edits 1 and 2 fully applied. Edit 3's new_text is being + // streamed: "EEE" is inserted but old "eee" isn't deleted yet. + let buffer_text = project.update(cx, |project, cx| { + let pp = project + .find_project_path(&PathBuf::from("root/file.txt"), cx) + .unwrap(); + project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) + }); + assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCC\nddd\nEEEeee\n")); + + // Send final + sender.send_final(json!({ + "display_description": "Edit three lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "aaa", "new_text": "AAA"}, + {"old_text": "ccc", "new_text": "CCC"}, + {"old_text": "eee", "new_text": "EEE"} + ] + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "AAA\nbbb\nCCC\nddd\nEEE\n"); + } + + #[gpui::test] + async fn test_streaming_edit_failure_mid_stream(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Setup + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // Edit 1 (valid) in progress — not yet complete (no second edit) + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "MODIFIED"} + ] + })); + cx.run_until_parked(); + + // Edit 2 appears (will fail to match) — this makes edit 1 complete. + // Edit 1 should be applied. Edit 2 is still in-progress (last edit). + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "MODIFIED"}, + {"old_text": "nonexistent text that does not appear anywhere in the file at all", "new_text": "whatever"} + ] + })); + cx.run_until_parked(); + + // Verify edit 1 was applied + let buffer_text = project.update(cx, |project, cx| { + let pp = project + .find_project_path(&PathBuf::from("root/file.txt"), cx) + .unwrap(); + project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) + }); + assert_eq!( + buffer_text.as_deref(), + Some("MODIFIED\nline 2\nline 3\n"), + "First edit should be applied even though second edit will fail" + ); + + // Edit 3 appears — this makes edit 2 "complete", triggering its + // resolution which should fail (old_text doesn't exist in the file). + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "line 1", "new_text": "MODIFIED"}, + {"old_text": "nonexistent text that does not appear anywhere in the file at all", "new_text": "whatever"}, + {"old_text": "line 3", "new_text": "MODIFIED 3"} + ] + })); + cx.run_until_parked(); + + // The error from edit 2 should have propagated out of the partial loop. + // Drop sender to unblock recv() if the loop didn't catch it. + drop(sender); + + let result = task.await; + let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + panic!("expected error"); + }; + assert!( + error.contains("Could not find matching text for edit at index 1"), + "Expected error about edit 1 failing, got: {error}" + ); + } + + #[gpui::test] + async fn test_streaming_single_edit_no_incremental(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world\n"})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Setup + single edit that stays in-progress (no second edit to prove completion) + sender.send_partial(json!({ + "display_description": "Single edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello world", "new_text": "goodbye world"}] + })); + cx.run_until_parked(); + + // The edit's old_text and new_text both arrived in one partial, so + // the old_text is resolved and new_text is being streamed via + // StreamingDiff. The buffer reflects the in-progress diff (new text + // inserted, old text not yet fully removed until finalization). + let buffer_text = project.update(cx, |project, cx| { + let pp = project + .find_project_path(&PathBuf::from("root/file.txt"), cx) + .unwrap(); + project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) + }); + assert_eq!( + buffer_text.as_deref(), + Some("goodbye worldhello world\n"), + "In-progress streaming diff: new text inserted, old text not yet removed" + ); + + // Send final — the edit is applied during finalization + sender.send_final(json!({ + "display_description": "Single edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello world", "new_text": "goodbye world"}] + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "goodbye world\n"); + } + + #[gpui::test] + async fn test_streaming_input_partials_then_final(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + let (sender, input): (ToolInputSender, ToolInput) = + ToolInput::test(); + let (event_stream, _event_rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Send progressively more complete partial snapshots, as the LLM would + sender.send_partial(json!({ + "display_description": "Edit lines" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] + })); + cx.run_until_parked(); + + // Send the final complete input + sender.send_final(json!({ + "display_description": "Edit lines", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "line 2", "new_text": "modified line 2"}] + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "line 1\nmodified line 2\nline 3\n"); + } + + #[gpui::test] + async fn test_streaming_input_sender_dropped_before_final(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello world\n"})).await; + let (sender, input): (ToolInputSender, ToolInput) = + ToolInput::test(); + let (event_stream, _event_rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Send a partial then drop the sender without sending final + sender.send_partial(json!({ + "display_description": "Edit file" + })); + cx.run_until_parked(); + + drop(sender); + + let result = task.await; + assert!( + result.is_err(), + "Tool should error when sender is dropped without sending final input" + ); + } + + #[gpui::test] + async fn test_streaming_input_recv_drains_partials(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; + // Create a channel and send multiple partials before a final, then use + // ToolInput::resolved-style immediate delivery to confirm recv() works + // when partials are already buffered. + let (sender, input): (ToolInputSender, ToolInput) = + ToolInput::test(); + let (event_stream, _event_rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Buffer several partials before sending the final + sender.send_partial(json!({"display_description": "Create"})); + sender.send_partial(json!({"display_description": "Create", "path": "root/dir/new.txt"})); + sender.send_partial(json!({ + "display_description": "Create", + "path": "root/dir/new.txt", + "mode": "write" + })); + sender.send_final(json!({ + "display_description": "Create", + "path": "root/dir/new.txt", + "mode": "write", + "content": "streamed content" + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "streamed content"); + } + + #[gpui::test] + async fn test_streaming_resolve_path_for_creating_file(cx: &mut TestAppContext) { + let mode = StreamingEditFileMode::Write; + + let result = test_resolve_path(&mode, "root/new.txt", cx); + assert_resolved_path_eq(result.await, rel_path("new.txt")); + + let result = test_resolve_path(&mode, "new.txt", cx); + assert_resolved_path_eq(result.await, rel_path("new.txt")); + + let result = test_resolve_path(&mode, "dir/new.txt", cx); + assert_resolved_path_eq(result.await, rel_path("dir/new.txt")); + + let result = test_resolve_path(&mode, "root/dir/subdir/existing.txt", cx); + assert_resolved_path_eq(result.await, rel_path("dir/subdir/existing.txt")); + + let result = test_resolve_path(&mode, "root/dir/subdir", cx); + assert_eq!( + result.await.unwrap_err().to_string(), + "Can't write to file: path is a directory" + ); + + let result = test_resolve_path(&mode, "root/dir/nonexistent_dir/new.txt", cx); + assert_eq!( + result.await.unwrap_err().to_string(), + "Can't create file: parent directory doesn't exist" + ); + } + + #[gpui::test] + async fn test_streaming_resolve_path_for_editing_file(cx: &mut TestAppContext) { + let mode = StreamingEditFileMode::Edit; + + let path_with_root = "root/dir/subdir/existing.txt"; + let path_without_root = "dir/subdir/existing.txt"; + let result = test_resolve_path(&mode, path_with_root, cx); + assert_resolved_path_eq(result.await, rel_path(path_without_root)); + + let result = test_resolve_path(&mode, path_without_root, cx); + assert_resolved_path_eq(result.await, rel_path(path_without_root)); + + let result = test_resolve_path(&mode, "root/nonexistent.txt", cx); + assert_eq!( + result.await.unwrap_err().to_string(), + "Can't edit file: path not found" + ); + + let result = test_resolve_path(&mode, "root/dir", cx); + assert_eq!( + result.await.unwrap_err().to_string(), + "Can't edit file: path is a directory" + ); + } + + async fn test_resolve_path( + mode: &StreamingEditFileMode, + path: &str, + cx: &mut TestAppContext, + ) -> anyhow::Result { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "dir": { + "subdir": { + "existing.txt": "hello" + } + } + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + + cx.update(|cx| resolve_path(*mode, &PathBuf::from(path), &project, cx)) + } + + #[track_caller] + fn assert_resolved_path_eq(path: anyhow::Result, expected: &RelPath) { + let actual = path.expect("Should return valid path").path; + assert_eq!(actual.as_ref(), expected); + } + + #[gpui::test] + async fn test_streaming_format_on_save(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/root", json!({"src": {}})).await; + let (tool, project, action_log, fs, thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; + + let rust_language = Arc::new(language::Language::new( + language::LanguageConfig { + name: "Rust".into(), + matcher: language::LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + None, + )); + + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_language); + + let mut fake_language_servers = language_registry.register_fake_lsp( + "Rust", + language::FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + document_formatting_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + ..Default::default() + }, + ); + + fs.save( + path!("/root/src/main.rs").as_ref(), + &"initial content".into(), + language::LineEnding::Unix, + ) + .await + .unwrap(); + + // Open the buffer to trigger LSP initialization + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/root/src/main.rs"), cx) + }) + .await + .unwrap(); + + // Register the buffer with language servers + let _handle = project.update(cx, |project, cx| { + project.register_buffer_with_language_servers(&buffer, cx) + }); + + const UNFORMATTED_CONTENT: &str = "fn main() {println!(\"Hello!\");}\ +"; + const FORMATTED_CONTENT: &str = "This file was formatted by the fake formatter in the test.\ +"; + + // Get the fake language server and set up formatting handler + let fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server.set_request_handler::({ + |_, _| async move { + Ok(Some(vec![lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(1, 0)), + new_text: FORMATTED_CONTENT.to_string(), + }])) + } + }); + + // Test with format_on_save enabled + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.project.all_languages.defaults.format_on_save = Some(FormatOnSave::On); + settings.project.all_languages.defaults.formatter = + Some(language::language_settings::FormatterList::default()); + }); + }); + }); + + // Use streaming pattern so executor can pump the LSP request/response + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Create main function", + "path": "root/src/main.rs", + "mode": "write" + })); + cx.run_until_parked(); + + sender.send_final(json!({ + "display_description": "Create main function", + "path": "root/src/main.rs", + "mode": "write", + "content": UNFORMATTED_CONTENT + })); + + let result = task.await; + assert!(result.is_ok()); + + cx.executor().run_until_parked(); + + let new_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); + assert_eq!( + new_content.replace("\r\n", "\n"), + FORMATTED_CONTENT, + "Code should be formatted when format_on_save is enabled" + ); + + let stale_buffer_count = thread + .read_with(cx, |thread, _cx| thread.action_log.clone()) + .read_with(cx, |log, cx| log.stale_buffers(cx).count()); + + assert_eq!( + stale_buffer_count, 0, + "BUG: Buffer is incorrectly marked as stale after format-on-save. Found {} stale buffers.", + stale_buffer_count + ); + + // Test with format_on_save disabled + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.project.all_languages.defaults.format_on_save = + Some(FormatOnSave::Off); + }); + }); + }); + + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + + let tool2 = Arc::new(StreamingEditFileTool::new( + project.clone(), + thread.downgrade(), + action_log.clone(), + language_registry, + )); + + let task = cx.update(|cx| tool2.run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Update main function", + "path": "root/src/main.rs", + "mode": "write" + })); + cx.run_until_parked(); + + sender.send_final(json!({ + "display_description": "Update main function", + "path": "root/src/main.rs", + "mode": "write", + "content": UNFORMATTED_CONTENT + })); + + let result = task.await; + assert!(result.is_ok()); + + cx.executor().run_until_parked(); + + let new_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); + assert_eq!( + new_content.replace("\r\n", "\n"), + UNFORMATTED_CONTENT, + "Code should not be formatted when format_on_save is disabled" + ); + } + + #[gpui::test] + async fn test_streaming_remove_trailing_whitespace(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/root", json!({"src": {}})).await; + fs.save( + path!("/root/src/main.rs").as_ref(), + &"initial content".into(), + language::LineEnding::Unix, + ) + .await + .unwrap(); + let (tool, project, action_log, fs, thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; + let language_registry = project.read_with(cx, |p, _cx| p.languages().clone()); + + // Test with remove_trailing_whitespace_on_save enabled + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings + .project + .all_languages + .defaults + .remove_trailing_whitespace_on_save = Some(true); + }); + }); + }); + + const CONTENT_WITH_TRAILING_WHITESPACE: &str = + "fn main() { \n println!(\"Hello!\"); \n}\n"; + + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Create main function".into(), + path: "root/src/main.rs".into(), + mode: StreamingEditFileMode::Write, + content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), + edits: None, + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + assert!(result.is_ok()); + + cx.executor().run_until_parked(); + + assert_eq!( + fs.load(path!("/root/src/main.rs").as_ref()) + .await + .unwrap() + .replace("\r\n", "\n"), + "fn main() {\n println!(\"Hello!\");\n}\n", + "Trailing whitespace should be removed when remove_trailing_whitespace_on_save is enabled" + ); + + // Test with remove_trailing_whitespace_on_save disabled + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings + .project + .all_languages + .defaults + .remove_trailing_whitespace_on_save = Some(false); + }); + }); + }); + + let tool2 = Arc::new(StreamingEditFileTool::new( + project.clone(), + thread.downgrade(), + action_log.clone(), + language_registry, + )); + + let result = cx + .update(|cx| { + tool2.run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Update main function".into(), + path: "root/src/main.rs".into(), + mode: StreamingEditFileMode::Write, + content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), + edits: None, + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + assert!(result.is_ok()); + + cx.executor().run_until_parked(); + + let final_content = fs.load(path!("/root/src/main.rs").as_ref()).await.unwrap(); + assert_eq!( + final_content.replace("\r\n", "\n"), + CONTENT_WITH_TRAILING_WHITESPACE, + "Trailing whitespace should remain when remove_trailing_whitespace_on_save is disabled" + ); + } + + #[gpui::test] + async fn test_streaming_authorize(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({})).await; + + // Test 1: Path with .zed component should require confirmation + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let _auth = cx.update(|cx| { + tool.authorize( + &PathBuf::from(".zed/settings.json"), + "test 1", + &stream_tx, + cx, + ) + }); + + let event = stream_rx.expect_authorization().await; + assert_eq!( + event.tool_call.fields.title, + Some("test 1 (local settings)".into()) + ); + + // Test 2: Path outside project should require confirmation + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let _auth = + cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 2", &stream_tx, cx)); + + let event = stream_rx.expect_authorization().await; + assert_eq!(event.tool_call.fields.title, Some("test 2".into())); + + // Test 3: Relative path without .zed should not require confirmation + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + cx.update(|cx| { + tool.authorize(&PathBuf::from("root/src/main.rs"), "test 3", &stream_tx, cx) + }) + .await + .unwrap(); + assert!(stream_rx.try_next().is_err()); + + // Test 4: Path with .zed in the middle should require confirmation + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let _auth = cx.update(|cx| { + tool.authorize( + &PathBuf::from("root/.zed/tasks.json"), + "test 4", + &stream_tx, + cx, + ) + }); + let event = stream_rx.expect_authorization().await; + assert_eq!( + event.tool_call.fields.title, + Some("test 4 (local settings)".into()) + ); + + // Test 5: When global default is allow, sensitive and outside-project + // paths still require confirmation + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + // 5.1: .zed/settings.json is a sensitive path — still prompts + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let _auth = cx.update(|cx| { + tool.authorize( + &PathBuf::from(".zed/settings.json"), + "test 5.1", + &stream_tx, + cx, + ) + }); + let event = stream_rx.expect_authorization().await; + assert_eq!( + event.tool_call.fields.title, + Some("test 5.1 (local settings)".into()) + ); + + // 5.2: /etc/hosts is outside the project, but Allow auto-approves + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 5.2", &stream_tx, cx)) + .await + .unwrap(); + assert!(stream_rx.try_next().is_err()); + + // 5.3: Normal in-project path with allow — no confirmation needed + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + cx.update(|cx| { + tool.authorize( + &PathBuf::from("root/src/main.rs"), + "test 5.3", + &stream_tx, + cx, + ) + }) + .await + .unwrap(); + assert!(stream_rx.try_next().is_err()); + + // 5.4: With Confirm default, non-project paths still prompt + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Confirm; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let _auth = cx + .update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 5.4", &stream_tx, cx)); + + let event = stream_rx.expect_authorization().await; + assert_eq!(event.tool_call.fields.title, Some("test 5.4".into())); + } + + #[gpui::test] + async fn test_streaming_authorize_create_under_symlink_with_allow(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/root", json!({})).await; + fs.insert_tree("/outside", json!({})).await; + fs.insert_symlink("/root/link", PathBuf::from("/outside")) + .await; + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let authorize_task = cx.update(|cx| { + tool.authorize( + &PathBuf::from("link/new.txt"), + "create through symlink", + &stream_tx, + cx, + ) + }); + + let event = stream_rx.expect_authorization().await; + assert!( + event + .tool_call + .fields + .title + .as_deref() + .is_some_and(|title| title.contains("points outside the project")), + "Expected symlink escape authorization for create under external symlink" + ); + + event + .response + .send(acp::PermissionOptionId::new("allow")) + .unwrap(); + authorize_task.await.unwrap(); + } + + #[gpui::test] + async fn test_streaming_edit_file_symlink_escape_requests_authorization( + cx: &mut TestAppContext, + ) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + json!({ + "src": { "main.rs": "fn main() {}" } + }), + ) + .await; + fs.insert_tree( + path!("/outside"), + json!({ + "config.txt": "old content" + }), + ) + .await; + fs.create_symlink( + path!("/root/link_to_external").as_ref(), + PathBuf::from("/outside"), + ) + .await + .unwrap(); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; + + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let _authorize_task = cx.update(|cx| { + tool.authorize( + &PathBuf::from("link_to_external/config.txt"), + "edit through symlink", + &stream_tx, + cx, + ) + }); + + let auth = stream_rx.expect_authorization().await; + let title = auth.tool_call.fields.title.as_deref().unwrap_or(""); + assert!( + title.contains("points outside the project"), + "title should mention symlink escape, got: {title}" + ); + } + + #[gpui::test] + async fn test_streaming_edit_file_symlink_escape_denied(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + json!({ + "src": { "main.rs": "fn main() {}" } + }), + ) + .await; + fs.insert_tree( + path!("/outside"), + json!({ + "config.txt": "old content" + }), + ) + .await; + fs.create_symlink( + path!("/root/link_to_external").as_ref(), + PathBuf::from("/outside"), + ) + .await + .unwrap(); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; + + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let authorize_task = cx.update(|cx| { + tool.authorize( + &PathBuf::from("link_to_external/config.txt"), + "edit through symlink", + &stream_tx, + cx, + ) + }); + + let auth = stream_rx.expect_authorization().await; + drop(auth); // deny by dropping + + let result = authorize_task.await; + assert!(result.is_err(), "should fail when denied"); + } + + #[gpui::test] + async fn test_streaming_edit_file_symlink_escape_honors_deny_policy(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.tools.insert( + "edit_file".into(), + agent_settings::ToolRules { + default: Some(settings::ToolPermissionMode::Deny), + ..Default::default() + }, + ); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + json!({ + "src": { "main.rs": "fn main() {}" } + }), + ) + .await; + fs.insert_tree( + path!("/outside"), + json!({ + "config.txt": "old content" + }), + ) + .await; + fs.create_symlink( + path!("/root/link_to_external").as_ref(), + PathBuf::from("/outside"), + ) + .await + .unwrap(); + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await; + + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let result = cx + .update(|cx| { + tool.authorize( + &PathBuf::from("link_to_external/config.txt"), + "edit through symlink", + &stream_tx, + cx, + ) + }) + .await; + + assert!(result.is_err(), "Tool should fail when policy denies"); + assert!( + !matches!( + stream_rx.try_next(), + Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + ), + "Deny policy should not emit symlink authorization prompt", + ); + } + + #[gpui::test] + async fn test_streaming_authorize_global_config(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/project", json!({})).await; + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; + + let test_cases = vec![ + ( + "/etc/hosts", + true, + "System file should require confirmation", + ), + ( + "/usr/local/bin/script", + true, + "System bin file should require confirmation", + ), + ( + "project/normal_file.rs", + false, + "Normal project file should not require confirmation", + ), + ]; + + for (path, should_confirm, description) in test_cases { + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let auth = + cx.update(|cx| tool.authorize(&PathBuf::from(path), "Edit file", &stream_tx, cx)); + + if should_confirm { + stream_rx.expect_authorization().await; + } else { + auth.await.unwrap(); + assert!( + stream_rx.try_next().is_err(), + "Failed for case: {} - path: {} - expected no confirmation but got one", + description, + path + ); + } + } + } + + #[gpui::test] + async fn test_streaming_needs_confirmation_with_multiple_worktrees(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + "/workspace/frontend", + json!({ + "src": { + "main.js": "console.log('frontend');" + } + }), + ) + .await; + fs.insert_tree( + "/workspace/backend", + json!({ + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + fs.insert_tree( + "/workspace/shared", + json!({ + ".zed": { + "settings.json": "{}" + } + }), + ) + .await; + let (tool, _project, _action_log, _fs, _thread) = setup_test_with_fs( + cx, + fs, + &[ + path!("/workspace/frontend").as_ref(), + path!("/workspace/backend").as_ref(), + path!("/workspace/shared").as_ref(), + ], + ) + .await; + + let test_cases = vec![ + ("frontend/src/main.js", false, "File in first worktree"), + ("backend/src/main.rs", false, "File in second worktree"), + ( + "shared/.zed/settings.json", + true, + ".zed file in third worktree", + ), + ("/etc/hosts", true, "Absolute path outside all worktrees"), + ( + "../outside/file.txt", + true, + "Relative path outside worktrees", + ), + ]; + + for (path, should_confirm, description) in test_cases { + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let auth = + cx.update(|cx| tool.authorize(&PathBuf::from(path), "Edit file", &stream_tx, cx)); + + if should_confirm { + stream_rx.expect_authorization().await; + } else { + auth.await.unwrap(); + assert!( + stream_rx.try_next().is_err(), + "Failed for case: {} - path: {} - expected no confirmation but got one", + description, + path + ); + } + } + } + + #[gpui::test] + async fn test_streaming_needs_confirmation_edge_cases(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + json!({ + ".zed": { + "settings.json": "{}" + }, + "src": { + ".zed": { + "local.json": "{}" + } + } + }), + ) + .await; + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; + + let test_cases = vec![ + ("", false, "Empty path is treated as project root"), + ("/", true, "Root directory should be outside project"), + ( + "project/../other", + true, + "Path with .. that goes outside of root directory", + ), + ( + "project/./src/file.rs", + false, + "Path with . should work normally", + ), + #[cfg(target_os = "windows")] + ("C:\\Windows\\System32\\hosts", true, "Windows system path"), + #[cfg(target_os = "windows")] + ("project\\src\\main.rs", false, "Windows-style project path"), + ]; + + for (path, should_confirm, description) in test_cases { + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let auth = + cx.update(|cx| tool.authorize(&PathBuf::from(path), "Edit file", &stream_tx, cx)); + + cx.run_until_parked(); + + if should_confirm { + stream_rx.expect_authorization().await; + } else { + assert!( + stream_rx.try_next().is_err(), + "Failed for case: {} - path: {} - expected no confirmation but got one", + description, + path + ); + auth.await.unwrap(); + } + } + } + + #[gpui::test] + async fn test_streaming_needs_confirmation_with_different_modes(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + json!({ + "existing.txt": "content", + ".zed": { + "settings.json": "{}" + } + }), + ) + .await; + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; + + let modes = vec![StreamingEditFileMode::Edit, StreamingEditFileMode::Write]; + + for _mode in modes { + // Test .zed path with different modes + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let _auth = cx.update(|cx| { + tool.authorize( + &PathBuf::from("project/.zed/settings.json"), + "Edit settings", + &stream_tx, + cx, + ) + }); + + stream_rx.expect_authorization().await; + + // Test outside path with different modes + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let _auth = cx.update(|cx| { + tool.authorize( + &PathBuf::from("/outside/file.txt"), + "Edit file", + &stream_tx, + cx, + ) + }); + + stream_rx.expect_authorization().await; + + // Test normal path with different modes + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + cx.update(|cx| { + tool.authorize( + &PathBuf::from("project/normal.txt"), + "Edit file", + &stream_tx, + cx, + ) + }) + .await + .unwrap(); + assert!(stream_rx.try_next().is_err()); + } + } + + #[gpui::test] + async fn test_streaming_initial_title_with_partial_input(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/project", json!({})).await; + let (tool, _project, _action_log, _fs, _thread) = + setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await; + + cx.update(|cx| { + assert_eq!( + tool.initial_title( + Err(json!({ + "path": "src/main.rs", + "display_description": "", + })), + cx + ), + "src/main.rs" + ); + assert_eq!( + tool.initial_title( + Err(json!({ + "path": "", + "display_description": "Fix error handling", + })), + cx + ), + "Fix error handling" + ); + assert_eq!( + tool.initial_title( + Err(json!({ + "path": "src/main.rs", + "display_description": "Fix error handling", + })), + cx + ), + "src/main.rs" + ); + assert_eq!( + tool.initial_title( + Err(json!({ + "path": "", + "display_description": "", + })), + cx + ), + DEFAULT_UI_TEXT + ); + assert_eq!( + tool.initial_title(Err(serde_json::Value::Null), cx), + DEFAULT_UI_TEXT + ); + }); + } + + #[gpui::test] + async fn test_streaming_diff_finalization(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/", json!({"main.rs": ""})).await; + let (tool, project, action_log, _fs, thread) = + setup_test_with_fs(cx, fs, &[path!("/").as_ref()]).await; + let language_registry = project.read_with(cx, |p, _cx| p.languages().clone()); + + // Ensure the diff is finalized after the edit completes. + { + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let edit = cx.update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit file".into(), + path: path!("/main.rs").into(), + mode: StreamingEditFileMode::Write, + content: Some("new content".into()), + edits: None, + }), + stream_tx, + cx, + ) + }); + stream_rx.expect_update_fields().await; + let diff = stream_rx.expect_diff().await; + diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Pending(_)))); + cx.run_until_parked(); + edit.await.unwrap(); + diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); + } + + // Ensure the diff is finalized if the tool call gets dropped. + { + let tool = Arc::new(StreamingEditFileTool::new( + project.clone(), + thread.downgrade(), + action_log, + language_registry, + )); + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let edit = cx.update(|cx| { + tool.run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit file".into(), + path: path!("/main.rs").into(), + mode: StreamingEditFileMode::Write, + content: Some("dropped content".into()), + edits: None, + }), + stream_tx, + cx, + ) + }); + stream_rx.expect_update_fields().await; + let diff = stream_rx.expect_diff().await; + diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Pending(_)))); + drop(edit); + cx.run_until_parked(); + diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); + } + } + + #[gpui::test] + async fn test_streaming_consecutive_edits_work(cx: &mut TestAppContext) { + let (tool, project, action_log, _fs, _thread) = + setup_test(cx, json!({"test.txt": "original content"})).await; + let read_tool = Arc::new(crate::ReadFileTool::new( + project.clone(), + action_log.clone(), + true, + )); + + // Read the file first + cx.update(|cx| { + read_tool.clone().run( + ToolInput::resolved(crate::ReadFileToolInput { + path: "root/test.txt".to_string(), + start_line: None, + end_line: None, + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + + // First edit should work + let edit_result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "First edit".into(), + path: "root/test.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "original content".into(), + new_text: "modified content".into(), + }]), + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + assert!( + edit_result.is_ok(), + "First edit should succeed, got error: {:?}", + edit_result.as_ref().err() + ); + + // Second edit should also work because the edit updated the recorded read time + let edit_result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Second edit".into(), + path: "root/test.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "modified content".into(), + new_text: "further modified content".into(), + }]), + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + assert!( + edit_result.is_ok(), + "Second consecutive edit should succeed, got error: {:?}", + edit_result.as_ref().err() + ); + } + + #[gpui::test] + async fn test_streaming_external_modification_detected(cx: &mut TestAppContext) { + let (tool, project, action_log, fs, _thread) = + setup_test(cx, json!({"test.txt": "original content"})).await; + let read_tool = Arc::new(crate::ReadFileTool::new( + project.clone(), + action_log.clone(), + true, + )); + + // Read the file first + cx.update(|cx| { + read_tool.clone().run( + ToolInput::resolved(crate::ReadFileToolInput { + path: "root/test.txt".to_string(), + start_line: None, + end_line: None, + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + + // Simulate external modification + cx.background_executor + .advance_clock(std::time::Duration::from_secs(2)); + fs.save( + path!("/root/test.txt").as_ref(), + &"externally modified content".into(), + language::LineEnding::Unix, + ) + .await + .unwrap(); + + // Reload the buffer to pick up the new mtime + let project_path = project + .read_with(cx, |project, cx| { + project.find_project_path("root/test.txt", cx) + }) + .expect("Should find project path"); + let buffer = project + .update(cx, |project, cx| project.open_buffer(project_path, cx)) + .await + .unwrap(); + buffer + .update(cx, |buffer, cx| buffer.reload(cx)) + .await + .unwrap(); + + cx.executor().run_until_parked(); + + // Try to edit - should fail because file was modified externally + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit after external change".into(), + path: "root/test.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "externally modified content".into(), + new_text: "new content".into(), + }]), + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + + let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + panic!("expected error"); + }; + assert!( + error.contains("has been modified since you last read it"), + "Error should mention file modification, got: {}", + error + ); + } + + #[gpui::test] + async fn test_streaming_dirty_buffer_detected(cx: &mut TestAppContext) { + let (tool, project, action_log, _fs, _thread) = + setup_test(cx, json!({"test.txt": "original content"})).await; + let read_tool = Arc::new(crate::ReadFileTool::new( + project.clone(), + action_log.clone(), + true, + )); + + // Read the file first + cx.update(|cx| { + read_tool.clone().run( + ToolInput::resolved(crate::ReadFileToolInput { + path: "root/test.txt".to_string(), + start_line: None, + end_line: None, + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await + .unwrap(); + + // Open the buffer and make it dirty + let project_path = project + .read_with(cx, |project, cx| { + project.find_project_path("root/test.txt", cx) + }) + .expect("Should find project path"); + let buffer = project + .update(cx, |project, cx| project.open_buffer(project_path, cx)) + .await + .unwrap(); + + buffer.update(cx, |buffer, cx| { + let end_point = buffer.max_point(); + buffer.edit([(end_point..end_point, " added text")], None, cx); + }); + + let is_dirty = buffer.read_with(cx, |buffer, _| buffer.is_dirty()); + assert!(is_dirty, "Buffer should be dirty after in-memory edit"); + + // Try to edit - should fail because buffer has unsaved changes + let result = cx + .update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit with dirty buffer".into(), + path: "root/test.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "original content".into(), + new_text: "new content".into(), + }]), + }), + ToolCallEventStream::test().0, + cx, + ) + }) + .await; + + let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + panic!("expected error"); + }; + assert!( + error.contains("This file has unsaved changes."), + "Error should mention unsaved changes, got: {}", + error + ); + assert!( + error.contains("keep or discard"), + "Error should ask whether to keep or discard changes, got: {}", + error + ); + assert!( + error.contains("save or revert the file manually"), + "Error should ask user to manually save or revert when tools aren't available, got: {}", + error + ); + } + + #[gpui::test] + async fn test_streaming_overlapping_edits_resolved_sequentially(cx: &mut TestAppContext) { + // Edit 1's replacement introduces text that contains edit 2's + // old_text as a substring. Because edits resolve sequentially + // against the current buffer, edit 2 finds a unique match in + // the modified buffer and succeeds. + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Setup: resolve the buffer + sender.send_partial(json!({ + "display_description": "Overlapping edits", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // Edit 1 replaces "bbb\nccc" with "XXX\nccc\nddd", so the + // buffer becomes "aaa\nXXX\nccc\nddd\nddd\neee\n". + // Edit 2's old_text "ccc\nddd" matches the first occurrence + // in the modified buffer and replaces it with "ZZZ". + // Edit 3 exists only to mark edit 2 as "complete" during streaming. + sender.send_partial(json!({ + "display_description": "Overlapping edits", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "bbb\nccc", "new_text": "XXX\nccc\nddd"}, + {"old_text": "ccc\nddd", "new_text": "ZZZ"}, + {"old_text": "eee", "new_text": "DUMMY"} + ] + })); + cx.run_until_parked(); + + // Send the final input with all three edits. + sender.send_final(json!({ + "display_description": "Overlapping edits", + "path": "root/file.txt", + "mode": "edit", + "edits": [ + {"old_text": "bbb\nccc", "new_text": "XXX\nccc\nddd"}, + {"old_text": "ccc\nddd", "new_text": "ZZZ"}, + {"old_text": "eee", "new_text": "DUMMY"} + ] + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "aaa\nXXX\nZZZ\nddd\nDUMMY\n"); + } + + #[gpui::test] + async fn test_streaming_create_content_streamed(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Transition to BufferResolved + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write" + })); + cx.run_until_parked(); + + // Stream content incrementally + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "line 1\n" + })); + cx.run_until_parked(); + + // Verify buffer has partial content + let buffer = project.update(cx, |project, cx| { + let path = project + .find_project_path("root/dir/new_file.txt", cx) + .unwrap(); + project.get_open_buffer(&path, cx).unwrap() + }); + assert_eq!(buffer.read_with(cx, |b, _| b.text()), "line 1\n"); + + // Stream more content + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "line 1\nline 2\n" + })); + cx.run_until_parked(); + assert_eq!(buffer.read_with(cx, |b, _| b.text()), "line 1\nline 2\n"); + + // Stream final chunk + sender.send_partial(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "line 1\nline 2\nline 3\n" + })); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |b, _| b.text()), + "line 1\nline 2\nline 3\n" + ); + + // Send final input + sender.send_final(json!({ + "display_description": "Create new file", + "path": "root/dir/new_file.txt", + "mode": "write", + "content": "line 1\nline 2\nline 3\n" + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "line 1\nline 2\nline 3\n"); + } + + #[gpui::test] + async fn test_streaming_overwrite_diff_revealed_during_streaming(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}), + ) + .await; + let (sender, input) = ToolInput::::test(); + let (event_stream, mut receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Transition to BufferResolved + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write" + })); + cx.run_until_parked(); + + // Get the diff entity from the event stream + receiver.expect_update_fields().await; + let diff = receiver.expect_diff().await; + + // Diff starts pending with no revealed ranges + diff.read_with(cx, |diff, cx| { + assert!(matches!(diff, Diff::Pending(_))); + assert!(!diff.has_revealed_range(cx)); + }); + + // Stream first content chunk + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\n" + })); + cx.run_until_parked(); + + // Diff should now have revealed ranges showing the new content + diff.read_with(cx, |diff, cx| { + assert!(diff.has_revealed_range(cx)); + }); + + // Send final input + sender.send_final(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\nnew line 2\n" + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { + new_text, old_text, .. + } = result.unwrap() + else { + panic!("expected success"); + }; + assert_eq!(new_text, "new line 1\nnew line 2\n"); + assert_eq!(*old_text, "old line 1\nold line 2\nold line 3\n"); + + // Diff is finalized after completion + diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); + } + + #[gpui::test] + async fn test_streaming_overwrite_content_streamed(cx: &mut TestAppContext) { + let (tool, project, _action_log, _fs, _thread) = setup_test( + cx, + json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}), + ) + .await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + // Transition to BufferResolved + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write" + })); + cx.run_until_parked(); + + // Verify buffer still has old content (no content partial yet) + let buffer = project.update(cx, |project, cx| { + let path = project.find_project_path("root/file.txt", cx).unwrap(); + project.get_open_buffer(&path, cx).unwrap() + }); + assert_eq!( + buffer.read_with(cx, |b, _| b.text()), + "old line 1\nold line 2\nold line 3\n" + ); + + // First content partial replaces old content + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\n" + })); + cx.run_until_parked(); + assert_eq!(buffer.read_with(cx, |b, _| b.text()), "new line 1\n"); + + // Subsequent content partials append + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\nnew line 2\n" + })); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |b, _| b.text()), + "new line 1\nnew line 2\n" + ); + + // Send final input with complete content + sender.send_final(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + "mode": "write", + "content": "new line 1\nnew line 2\nnew line 3\n" + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { + new_text, old_text, .. + } = result.unwrap() + else { + panic!("expected success"); + }; + assert_eq!(new_text, "new line 1\nnew line 2\nnew line 3\n"); + assert_eq!(*old_text, "old line 1\nold line 2\nold line 3\n"); + } + + #[gpui::test] + async fn test_streaming_edit_json_fixer_escape_corruption(cx: &mut TestAppContext) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "hello\nworld\nfoo\n"})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // Simulate JSON fixer producing a literal backslash when the LLM + // stream cuts in the middle of a \n escape sequence. + // The old_text "hello\nworld" would be streamed as: + // partial 1: old_text = "hello\\" (fixer closes incomplete \n as \\) + // partial 2: old_text = "hello\nworld" (fixer corrected the escape) + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello\\"}] + })); + cx.run_until_parked(); + + // Now the fixer corrects it to the real newline. + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello\nworld"}] + })); + cx.run_until_parked(); + + // Send final. + sender.send_final(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello\nworld", "new_text": "HELLO\nWORLD"}] + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "HELLO\nWORLD\nfoo\n"); + } + + // Verifies that after streaming_edit_file_tool edits a file, the action log + // reports changed buffers so that the Accept All / Reject All review UI appears. + #[gpui::test] + async fn test_streaming_edit_file_tool_registers_changed_buffers(cx: &mut TestAppContext) { + let (tool, _project, action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + let (event_stream, _rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit lines".to_string(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "line 2".into(), + new_text: "modified line 2".into(), + }]), + }), + event_stream, + cx, + ) + }); + + let result = task.await; + assert!(result.is_ok(), "edit should succeed: {:?}", result.err()); + + cx.run_until_parked(); + + let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); + assert!( + !changed.is_empty(), + "action_log.changed_buffers() should be non-empty after streaming edit, + but no changed buffers were found \u{2014} Accept All / Reject All will not appear" + ); + } + + // Same test but for Write mode (overwrite entire file). + #[gpui::test] + async fn test_streaming_edit_file_tool_write_mode_registers_changed_buffers( + cx: &mut TestAppContext, + ) { + let (tool, _project, action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "original content"})).await; + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + let (event_stream, _rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Overwrite file".to_string(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Write, + content: Some("completely new content".into()), + edits: None, + }), + event_stream, + cx, + ) + }); + + let result = task.await; + assert!(result.is_ok(), "write should succeed: {:?}", result.err()); + + cx.run_until_parked(); + + let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); + assert!( + !changed.is_empty(), + "action_log.changed_buffers() should be non-empty after streaming write, \ + but no changed buffers were found \u{2014} Accept All / Reject All will not appear" + ); + } + + async fn setup_test_with_fs( + cx: &mut TestAppContext, + fs: Arc, + worktree_paths: &[&std::path::Path], + ) -> ( + Arc, + Entity, + Entity, + Arc, + Entity, + ) { + let project = Project::test(fs.clone(), worktree_paths.iter().copied(), cx).await; + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + crate::Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model), + cx, + ) + }); + let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); + let tool = Arc::new(StreamingEditFileTool::new( + project.clone(), + thread.downgrade(), + action_log.clone(), + language_registry, + )); + (tool, project, action_log, fs, thread) + } + + async fn setup_test( + cx: &mut TestAppContext, + initial_tree: serde_json::Value, + ) -> ( + Arc, + Entity, + Entity, + Arc, + Entity, + ) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/root", initial_tree).await; + setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await + } + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); diff --git a/crates/agent/src/tools/terminal_tool.rs b/crates/agent/src/tools/terminal_tool.rs index 57b3278da256c01408f704a8e2f6f7e075057597..6396bd1b0e63b46a0207dd7df9b9f2fcd00176b7 100644 --- a/crates/agent/src/tools/terminal_tool.rs +++ b/crates/agent/src/tools/terminal_tool.rs @@ -15,7 +15,7 @@ use std::{ }; use crate::{ - AgentTool, ThreadEnvironment, ToolCallEventStream, ToolPermissionDecision, + AgentTool, ThreadEnvironment, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_from_settings, }; @@ -85,34 +85,45 @@ impl AgentTool for TerminalTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let working_dir = match working_dir(&input, &self.project, cx) { - Ok(dir) => dir, - Err(err) => return Task::ready(Err(err.to_string())), - }; + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; - let settings = AgentSettings::get_global(cx); - let decision = decide_permission_from_settings( - Self::NAME, - std::slice::from_ref(&input.command), - settings, - ); + let (working_dir, authorize) = cx.update(|cx| { + let working_dir = + working_dir(&input, &self.project, cx).map_err(|err| err.to_string())?; - let authorize = match decision { - ToolPermissionDecision::Allow => None, - ToolPermissionDecision::Deny(reason) => { - return Task::ready(Err(reason)); - } - ToolPermissionDecision::Confirm => { - let context = - crate::ToolPermissionContext::new(Self::NAME, vec![input.command.clone()]); - Some(event_stream.authorize(self.initial_title(Ok(input.clone()), cx), context, cx)) - } - }; - cx.spawn(async move |cx| { + let decision = decide_permission_from_settings( + Self::NAME, + std::slice::from_ref(&input.command), + AgentSettings::get_global(cx), + ); + + let authorize = match decision { + ToolPermissionDecision::Allow => None, + ToolPermissionDecision::Deny(reason) => { + return Err(reason); + } + ToolPermissionDecision::Confirm => { + let context = crate::ToolPermissionContext::new( + Self::NAME, + vec![input.command.clone()], + ); + Some(event_stream.authorize( + self.initial_title(Ok(input.clone()), cx), + context, + cx, + )) + } + }; + Ok((working_dir, authorize)) + })?; if let Some(authorize) = authorize { authorize.await.map_err(|e| e.to_string())?; } diff --git a/crates/agent/src/tools/tool_edit_parser.rs b/crates/agent/src/tools/tool_edit_parser.rs new file mode 100644 index 0000000000000000000000000000000000000000..86259db916f49c07bbecc63625a93a9ebb955539 --- /dev/null +++ b/crates/agent/src/tools/tool_edit_parser.rs @@ -0,0 +1,941 @@ +use smallvec::SmallVec; + +use crate::{Edit, PartialEdit}; + +/// Events emitted by `ToolEditParser` as tool call input streams in. +#[derive(Debug, PartialEq, Eq)] +pub enum ToolEditEvent { + /// A chunk of `old_text` for an edit operation. + OldTextChunk { + edit_index: usize, + chunk: String, + done: bool, + }, + /// A chunk of `new_text` for an edit operation. + NewTextChunk { + edit_index: usize, + chunk: String, + done: bool, + }, + /// A chunk of content for write/overwrite mode. + ContentChunk { chunk: String }, +} + +/// Tracks the streaming state of a single edit to detect deltas. +#[derive(Default, Debug)] +struct EditStreamState { + old_text_emitted_len: usize, + old_text_done: bool, + new_text_emitted_len: usize, + new_text_done: bool, +} + +/// Converts incrementally-growing tool call JSON into a stream of chunk events. +/// +/// The tool call streaming infrastructure delivers partial JSON objects where +/// string fields grow over time. This parser compares consecutive partials, +/// computes the deltas, and emits `ToolEditEvent`s that downstream pipeline +/// stages (`StreamingFuzzyMatcher` for old_text, `StreamingDiff` for new_text) +/// can consume incrementally. +/// +/// Because partial JSON comes through a fixer (`partial-json-fixer`) that +/// closes incomplete escape sequences, a string can temporarily contain wrong +/// trailing characters (e.g. a literal `\` instead of `\n`). We handle this +/// by holding back trailing backslash characters in non-finalized chunks: if +/// a partial string ends with `\` (0x5C), that byte is not emitted until the +/// next partial confirms or corrects it. This avoids feeding corrupted bytes +/// to downstream consumers. +#[derive(Default, Debug)] +pub struct ToolEditParser { + edit_states: Vec, + content_emitted_len: usize, +} + +impl ToolEditParser { + /// Push a new set of partial edits (from edit mode) and return any events. + /// + /// Each call should pass the *entire current* edits array as seen in the + /// latest partial input. The parser will diff it against its internal state + /// to produce only the new events. + pub fn push_edits(&mut self, edits: &[PartialEdit]) -> SmallVec<[ToolEditEvent; 4]> { + let mut events = SmallVec::new(); + + for (index, partial) in edits.iter().enumerate() { + if index >= self.edit_states.len() { + // A new edit appeared — finalize the previous one if there was one. + if let Some(previous) = self.finalize_previous_edit(index) { + events.extend(previous); + } + self.edit_states.push(EditStreamState::default()); + } + + let state = &mut self.edit_states[index]; + + // Process old_text changes. + if let Some(old_text) = &partial.old_text + && !state.old_text_done + { + if partial.new_text.is_some() { + // new_text appeared, so old_text is done — emit everything. + let start = state.old_text_emitted_len.min(old_text.len()); + let chunk = old_text[start..].to_string(); + state.old_text_done = true; + state.old_text_emitted_len = old_text.len(); + events.push(ToolEditEvent::OldTextChunk { + edit_index: index, + chunk, + done: true, + }); + } else { + let safe_end = safe_emit_end(old_text); + if safe_end > state.old_text_emitted_len { + let chunk = old_text[state.old_text_emitted_len..safe_end].to_string(); + state.old_text_emitted_len = safe_end; + events.push(ToolEditEvent::OldTextChunk { + edit_index: index, + chunk, + done: false, + }); + } + } + } + + // Process new_text changes. + if let Some(new_text) = &partial.new_text + && !state.new_text_done + { + let safe_end = safe_emit_end(new_text); + if safe_end > state.new_text_emitted_len { + let chunk = new_text[state.new_text_emitted_len..safe_end].to_string(); + state.new_text_emitted_len = safe_end; + events.push(ToolEditEvent::NewTextChunk { + edit_index: index, + chunk, + done: false, + }); + } + } + } + + events + } + + /// Push new content and return any events. + /// + /// Each call should pass the *entire current* content string. The parser + /// will diff it against its internal state to emit only the new chunk. + pub fn push_content(&mut self, content: &str) -> SmallVec<[ToolEditEvent; 1]> { + let mut events = SmallVec::new(); + + let safe_end = safe_emit_end(content); + if safe_end > self.content_emitted_len { + let chunk = content[self.content_emitted_len..safe_end].to_string(); + self.content_emitted_len = safe_end; + events.push(ToolEditEvent::ContentChunk { chunk }); + } + + events + } + + /// Finalize all edits with the complete input. This emits `done: true` + /// events for any in-progress old_text or new_text that hasn't been + /// finalized yet. + /// + /// `final_edits` should be the fully deserialized final edits array. The + /// parser compares against its tracked state and emits any remaining deltas + /// with `done: true`. + pub fn finalize_edits(&mut self, edits: &[Edit]) -> SmallVec<[ToolEditEvent; 4]> { + let mut events = SmallVec::new(); + + for (index, edit) in edits.iter().enumerate() { + if index >= self.edit_states.len() { + // This edit was never seen in partials — emit it fully. + if let Some(previous) = self.finalize_previous_edit(index) { + events.extend(previous); + } + self.edit_states.push(EditStreamState::default()); + } + + let state = &mut self.edit_states[index]; + + if !state.old_text_done { + let start = state.old_text_emitted_len.min(edit.old_text.len()); + let chunk = edit.old_text[start..].to_string(); + state.old_text_done = true; + state.old_text_emitted_len = edit.old_text.len(); + events.push(ToolEditEvent::OldTextChunk { + edit_index: index, + chunk, + done: true, + }); + } + + if !state.new_text_done { + let start = state.new_text_emitted_len.min(edit.new_text.len()); + let chunk = edit.new_text[start..].to_string(); + state.new_text_done = true; + state.new_text_emitted_len = edit.new_text.len(); + events.push(ToolEditEvent::NewTextChunk { + edit_index: index, + chunk, + done: true, + }); + } + } + + events + } + + /// Finalize content with the complete input. + pub fn finalize_content(&mut self, content: &str) -> SmallVec<[ToolEditEvent; 1]> { + let mut events = SmallVec::new(); + + let start = self.content_emitted_len.min(content.len()); + if content.len() > start { + let chunk = content[start..].to_string(); + self.content_emitted_len = content.len(); + events.push(ToolEditEvent::ContentChunk { chunk }); + } + + events + } + + /// When a new edit appears at `index`, finalize the edit at `index - 1` + /// by emitting a `NewTextChunk { done: true }` if it hasn't been finalized. + fn finalize_previous_edit(&mut self, new_index: usize) -> Option> { + if new_index == 0 || self.edit_states.is_empty() { + return None; + } + + let previous_index = new_index - 1; + if previous_index >= self.edit_states.len() { + return None; + } + + let state = &mut self.edit_states[previous_index]; + let mut events = SmallVec::new(); + + // If old_text was never finalized, finalize it now with an empty done chunk. + if !state.old_text_done { + state.old_text_done = true; + events.push(ToolEditEvent::OldTextChunk { + edit_index: previous_index, + chunk: String::new(), + done: true, + }); + } + + // Emit a done event for new_text if not already finalized. + if !state.new_text_done { + state.new_text_done = true; + events.push(ToolEditEvent::NewTextChunk { + edit_index: previous_index, + chunk: String::new(), + done: true, + }); + } + + Some(events) + } +} + +/// Returns the byte position up to which it is safe to emit from a partial +/// string. If the string ends with a backslash (`\`, 0x5C), that byte is +/// held back because it may be an artifact of the partial JSON fixer closing +/// an incomplete escape sequence (e.g. turning a half-received `\n` into `\\`). +/// The next partial will reveal the correct character. +fn safe_emit_end(text: &str) -> usize { + if text.as_bytes().last() == Some(&b'\\') { + text.len() - 1 + } else { + text.len() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_single_edit_streamed_incrementally() { + let mut parser = ToolEditParser::default(); + + // old_text arrives in chunks: "hell" → "hello w" → "hello world" + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hell".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "hell".into(), + done: false, + }] + ); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello w".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "o w".into(), + done: false, + }] + ); + + // new_text appears → old_text finalizes + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello world".into()), + new_text: Some("good".into()), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "orld".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "good".into(), + done: false, + }, + ] + ); + + // new_text grows + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello world".into()), + new_text: Some("goodbye world".into()), + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "bye world".into(), + done: false, + }] + ); + + // Finalize + let events = parser.finalize_edits(&[Edit { + old_text: "hello world".into(), + new_text: "goodbye world".into(), + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "".into(), + done: true, + }] + ); + } + + #[test] + fn test_multiple_edits_sequential() { + let mut parser = ToolEditParser::default(); + + // First edit streams in + let events = parser.push_edits(&[PartialEdit { + old_text: Some("first old".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "first old".into(), + done: false, + }] + ); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("first old".into()), + new_text: Some("first new".into()), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "first new".into(), + done: false, + }, + ] + ); + + // Second edit appears → first edit's new_text is finalized + let events = parser.push_edits(&[ + PartialEdit { + old_text: Some("first old".into()), + new_text: Some("first new".into()), + }, + PartialEdit { + old_text: Some("second".into()), + new_text: None, + }, + ]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "".into(), + done: true, + }, + ToolEditEvent::OldTextChunk { + edit_index: 1, + chunk: "second".into(), + done: false, + }, + ] + ); + + // Finalize everything + let events = parser.finalize_edits(&[ + Edit { + old_text: "first old".into(), + new_text: "first new".into(), + }, + Edit { + old_text: "second old".into(), + new_text: "second new".into(), + }, + ]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 1, + chunk: " old".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 1, + chunk: "second new".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_content_streamed_incrementally() { + let mut parser = ToolEditParser::default(); + + let events = parser.push_content("hello"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: "hello".into(), + }] + ); + + let events = parser.push_content("hello world"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: " world".into(), + }] + ); + + // No change + let events = parser.push_content("hello world"); + assert!(events.is_empty()); + + let events = parser.push_content("hello world!"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { chunk: "!".into() }] + ); + + // Finalize with no additional content + let events = parser.finalize_content("hello world!"); + assert!(events.is_empty()); + } + + #[test] + fn test_finalize_content_with_remaining() { + let mut parser = ToolEditParser::default(); + + parser.push_content("partial"); + let events = parser.finalize_content("partial content here"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: " content here".into(), + }] + ); + } + + #[test] + fn test_content_trailing_backslash_held_back() { + let mut parser = ToolEditParser::default(); + + // Partial JSON fixer turns incomplete \n into \\ (literal backslash). + // The trailing backslash is held back. + let events = parser.push_content("hello,\\"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: "hello,".into(), + }] + ); + + // Next partial corrects the escape to an actual newline. + // The held-back byte was wrong; the correct newline is emitted. + let events = parser.push_content("hello,\n"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { chunk: "\n".into() }] + ); + + // Normal growth. + let events = parser.push_content("hello,\nworld"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: "world".into(), + }] + ); + } + + #[test] + fn test_content_finalize_with_trailing_backslash() { + let mut parser = ToolEditParser::default(); + + // Stream a partial with a fixer-corrupted trailing backslash. + // The backslash is held back. + parser.push_content("abc\\"); + + // Finalize reveals the correct character. + let events = parser.finalize_content("abc\n"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { chunk: "\n".into() }] + ); + } + + #[test] + fn test_no_partials_direct_finalize() { + let mut parser = ToolEditParser::default(); + + let events = parser.finalize_edits(&[Edit { + old_text: "old".into(), + new_text: "new".into(), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "old".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "new".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_no_partials_direct_finalize_multiple() { + let mut parser = ToolEditParser::default(); + + let events = parser.finalize_edits(&[ + Edit { + old_text: "first old".into(), + new_text: "first new".into(), + }, + Edit { + old_text: "second old".into(), + new_text: "second new".into(), + }, + ]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "first old".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "first new".into(), + done: true, + }, + ToolEditEvent::OldTextChunk { + edit_index: 1, + chunk: "second old".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 1, + chunk: "second new".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_old_text_no_growth() { + let mut parser = ToolEditParser::default(); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("same".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "same".into(), + done: false, + }] + ); + + // Same old_text, no new_text → no events + let events = parser.push_edits(&[PartialEdit { + old_text: Some("same".into()), + new_text: None, + }]); + assert!(events.is_empty()); + } + + #[test] + fn test_old_text_none_then_appears() { + let mut parser = ToolEditParser::default(); + + // Edit exists but old_text is None (field hasn't arrived yet) + let events = parser.push_edits(&[PartialEdit { + old_text: None, + new_text: None, + }]); + assert!(events.is_empty()); + + // old_text appears + let events = parser.push_edits(&[PartialEdit { + old_text: Some("text".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "text".into(), + done: false, + }] + ); + } + + #[test] + fn test_empty_old_text_with_new_text() { + let mut parser = ToolEditParser::default(); + + // old_text is empty, new_text appears immediately + let events = parser.push_edits(&[PartialEdit { + old_text: Some("".into()), + new_text: Some("inserted".into()), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "inserted".into(), + done: false, + }, + ] + ); + } + + #[test] + fn test_three_edits_streamed() { + let mut parser = ToolEditParser::default(); + + // Stream first edit + parser.push_edits(&[PartialEdit { + old_text: Some("a".into()), + new_text: Some("A".into()), + }]); + + // Second edit appears + parser.push_edits(&[ + PartialEdit { + old_text: Some("a".into()), + new_text: Some("A".into()), + }, + PartialEdit { + old_text: Some("b".into()), + new_text: Some("B".into()), + }, + ]); + + // Third edit appears + let events = parser.push_edits(&[ + PartialEdit { + old_text: Some("a".into()), + new_text: Some("A".into()), + }, + PartialEdit { + old_text: Some("b".into()), + new_text: Some("B".into()), + }, + PartialEdit { + old_text: Some("c".into()), + new_text: None, + }, + ]); + + // Should finalize edit 1 (index=1) and start edit 2 (index=2) + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::NewTextChunk { + edit_index: 1, + chunk: "".into(), + done: true, + }, + ToolEditEvent::OldTextChunk { + edit_index: 2, + chunk: "c".into(), + done: false, + }, + ] + ); + + // Finalize + let events = parser.finalize_edits(&[ + Edit { + old_text: "a".into(), + new_text: "A".into(), + }, + Edit { + old_text: "b".into(), + new_text: "B".into(), + }, + Edit { + old_text: "c".into(), + new_text: "C".into(), + }, + ]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 2, + chunk: "".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 2, + chunk: "C".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_finalize_with_unseen_old_text() { + let mut parser = ToolEditParser::default(); + + // Only saw partial old_text, never saw new_text in partials + parser.push_edits(&[PartialEdit { + old_text: Some("partial".into()), + new_text: None, + }]); + + let events = parser.finalize_edits(&[Edit { + old_text: "partial old text".into(), + new_text: "replacement".into(), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: " old text".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "replacement".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_finalize_with_partially_seen_new_text() { + let mut parser = ToolEditParser::default(); + + parser.push_edits(&[PartialEdit { + old_text: Some("old".into()), + new_text: Some("partial".into()), + }]); + + let events = parser.finalize_edits(&[Edit { + old_text: "old".into(), + new_text: "partial new text".into(), + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: " new text".into(), + done: true, + }] + ); + } + + #[test] + fn test_repeated_pushes_with_no_change() { + let mut parser = ToolEditParser::default(); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("stable".into()), + new_text: Some("also stable".into()), + }]); + assert_eq!(events.len(), 2); // old done + new chunk + + // Push the exact same data again + let events = parser.push_edits(&[PartialEdit { + old_text: Some("stable".into()), + new_text: Some("also stable".into()), + }]); + assert!(events.is_empty()); + + // And again + let events = parser.push_edits(&[PartialEdit { + old_text: Some("stable".into()), + new_text: Some("also stable".into()), + }]); + assert!(events.is_empty()); + } + + #[test] + fn test_old_text_trailing_backslash_held_back() { + let mut parser = ToolEditParser::default(); + + // Partial-json-fixer produces a literal backslash when the JSON stream + // cuts in the middle of an escape sequence like \n. The parser holds + // back the trailing backslash instead of emitting it. + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello,\\".into()), // fixer closed incomplete \n as \\ + new_text: None, + }]); + // The trailing `\` is held back — only "hello," is emitted. + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "hello,".into(), + done: false, + }] + ); + + // Next partial: the fixer corrects the escape to \n. + // The held-back byte was wrong, but we never emitted it. Now the + // correct newline at that position is emitted normally. + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello,\n".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "\n".into(), + done: false, + }] + ); + + // Continue normally. + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello,\nworld".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "world".into(), + done: false, + }] + ); + } + + #[test] + fn test_multiline_old_and_new_text() { + let mut parser = ToolEditParser::default(); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("line1\nline2".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "line1\nline2".into(), + done: false, + }] + ); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("line1\nline2\nline3".into()), + new_text: Some("LINE1\n".into()), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "\nline3".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "LINE1\n".into(), + done: false, + }, + ] + ); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("line1\nline2\nline3".into()), + new_text: Some("LINE1\nLINE2\nLINE3".into()), + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "LINE2\nLINE3".into(), + done: false, + }] + ); + } +} diff --git a/crates/agent/src/tools/web_search_tool.rs b/crates/agent/src/tools/web_search_tool.rs index c536f45ba65c109d3068b0722db1ffb1cad8b87c..c697a5b78f1fe8c84d6ed58db13f651a493ae8c3 100644 --- a/crates/agent/src/tools/web_search_tool.rs +++ b/crates/agent/src/tools/web_search_tool.rs @@ -1,14 +1,15 @@ use std::sync::Arc; use crate::{ - AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_from_settings, + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, + decide_permission_from_settings, }; use agent_client_protocol as acp; use agent_settings::AgentSettings; use anyhow::Result; use cloud_llm_client::WebSearchResponse; use futures::FutureExt as _; -use gpui::{App, AppContext, Task}; +use gpui::{App, Task}; use language_model::{ LanguageModelProviderId, LanguageModelToolResultContent, ZED_CLOUD_PROVIDER_ID, }; @@ -73,41 +74,51 @@ impl AgentTool for WebSearchTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx); - let decision = decide_permission_from_settings( - Self::NAME, - std::slice::from_ref(&input.query), - settings, - ); - - let authorize = match decision { - ToolPermissionDecision::Allow => None, - ToolPermissionDecision::Deny(reason) => { - return Task::ready(Err(WebSearchToolOutput::Error { error: reason })); - } - ToolPermissionDecision::Confirm => { - let context = - crate::ToolPermissionContext::new(Self::NAME, vec![input.query.clone()]); - Some(event_stream.authorize( - format!("Search the web for {}", MarkdownInlineCode(&input.query)), - context, - cx, - )) - } - }; + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| WebSearchToolOutput::Error { + error: format!("Failed to receive tool input: {e}"), + })?; + + let (authorize, search_task) = cx.update(|cx| { + let decision = decide_permission_from_settings( + Self::NAME, + std::slice::from_ref(&input.query), + AgentSettings::get_global(cx), + ); + + let authorize = match decision { + ToolPermissionDecision::Allow => None, + ToolPermissionDecision::Deny(reason) => { + return Err(WebSearchToolOutput::Error { error: reason }); + } + ToolPermissionDecision::Confirm => { + let context = + crate::ToolPermissionContext::new(Self::NAME, vec![input.query.clone()]); + Some(event_stream.authorize( + format!("Search the web for {}", MarkdownInlineCode(&input.query)), + context, + cx, + )) + } + }; + + let Some(provider) = WebSearchRegistry::read_global(cx).active_provider() else { + return Err(WebSearchToolOutput::Error { + error: "Web search is not available.".to_string(), + }); + }; - let Some(provider) = WebSearchRegistry::read_global(cx).active_provider() else { - return Task::ready(Err(WebSearchToolOutput::Error { - error: "Web search is not available.".to_string(), - })); - }; + let search_task = provider.search(input.query, cx); + Ok((authorize, search_task)) + })?; - let search_task = provider.search(input.query, cx); - cx.background_spawn(async move { if let Some(authorize) = authorize { authorize.await.map_err(|e| WebSearchToolOutput::Error { error: e.to_string() })?; } diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index bb7c21083e1ee411d805d34e5676f4300dd5dce9..c63e4fab2201671fa6448e9d58f6c925c2c91cd8 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -10,7 +10,7 @@ use collections::HashMap; use futures::AsyncBufReadExt as _; use futures::io::BufReader; use project::Project; -use project::agent_server_store::AgentServerCommand; +use project::agent_server_store::{AgentServerCommand, GEMINI_NAME}; use serde::Deserialize; use settings::Settings as _; use task::ShellBuilder; @@ -36,6 +36,7 @@ pub struct UnsupportedVersion; pub struct AcpConnection { server_name: SharedString, + display_name: SharedString, telemetry_id: SharedString, connection: Rc, sessions: Rc>>, @@ -44,7 +45,6 @@ pub struct AcpConnection { default_mode: Option, default_model: Option, default_config_options: HashMap, - root_dir: PathBuf, child: Child, session_list: Option>, _io_task: Task>, @@ -158,22 +158,20 @@ impl AgentSessionList for AcpSessionList { pub async fn connect( server_name: SharedString, + display_name: SharedString, command: AgentServerCommand, - root_dir: &Path, default_mode: Option, default_model: Option, default_config_options: HashMap, - is_remote: bool, cx: &mut AsyncApp, ) -> Result> { let conn = AcpConnection::stdio( server_name, + display_name, command.clone(), - root_dir, default_mode, default_model, default_config_options, - is_remote, cx, ) .await?; @@ -185,12 +183,11 @@ const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::ProtocolVersion::V1 impl AcpConnection { pub async fn stdio( server_name: SharedString, + display_name: SharedString, command: AgentServerCommand, - root_dir: &Path, default_mode: Option, default_model: Option, default_config_options: HashMap, - is_remote: bool, cx: &mut AsyncApp, ) -> Result { let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone()); @@ -198,9 +195,6 @@ impl AcpConnection { let mut child = builder.build_std_command(Some(command.path.display().to_string()), &command.args); child.envs(command.env.iter().flatten()); - if !is_remote { - child.current_dir(root_dir); - } let mut child = Child::spawn(child, Stdio::piped(), Stdio::piped(), Stdio::piped())?; let stdout = child.stdout.take().context("Failed to take stdout")?; @@ -325,11 +319,30 @@ impl AcpConnection { None }; + // TODO: Remove this override once Google team releases their official auth methods + let auth_methods = if server_name == GEMINI_NAME { + let mut args = command.args.clone(); + args.retain(|a| a != "--experimental-acp"); + let value = serde_json::json!({ + "label": "gemini /auth", + "command": command.path.to_string_lossy().into_owned(), + "args": args, + "env": command.env.clone().unwrap_or_default(), + }); + let meta = acp::Meta::from_iter([("terminal-auth".to_string(), value)]); + vec![ + acp::AuthMethod::new("spawn-gemini-cli", "Login") + .description("Login with your Google or Vertex AI account") + .meta(meta), + ] + } else { + response.auth_methods + }; Ok(Self { - auth_methods: response.auth_methods, - root_dir: root_dir.to_owned(), + auth_methods, connection, server_name, + display_name, telemetry_id, sessions, agent_capabilities: response.agent_capabilities, @@ -347,10 +360,6 @@ impl AcpConnection { pub fn prompt_capabilities(&self) -> &acp::PromptCapabilities { &self.agent_capabilities.prompt_capabilities } - - pub fn root_dir(&self) -> &Path { - &self.root_dir - } } impl Drop for AcpConnection { @@ -550,7 +559,7 @@ impl AgentConnection for AcpConnection { let thread: Entity = cx.new(|cx| { AcpThread::new( None, - self.server_name.clone(), + self.display_name.clone(), self.clone(), project, action_log, @@ -603,10 +612,14 @@ impl AgentConnection for AcpConnection { let cwd = cwd.to_path_buf(); let mcp_servers = mcp_servers_for_project(&project, cx); let action_log = cx.new(|_| ActionLog::new(project.clone())); + let title = session + .title + .clone() + .unwrap_or_else(|| self.display_name.clone()); let thread: Entity = cx.new(|cx| { AcpThread::new( None, - self.server_name.clone(), + title, self.clone(), project, action_log, @@ -676,10 +689,14 @@ impl AgentConnection for AcpConnection { let cwd = cwd.to_path_buf(); let mcp_servers = mcp_servers_for_project(&project, cx); let action_log = cx.new(|_| ActionLog::new(project.clone())); + let title = session + .title + .clone() + .unwrap_or_else(|| self.display_name.clone()); let thread: Entity = cx.new(|cx| { AcpThread::new( None, - self.server_name.clone(), + title, self.clone(), project, action_log, diff --git a/crates/agent_servers/src/agent_servers.rs b/crates/agent_servers/src/agent_servers.rs index 6877c93342c22db3426bcf497fd9d45fe15c14ef..a07226ca25095fdb7037114d32d5033364a4999f 100644 --- a/crates/agent_servers/src/agent_servers.rs +++ b/crates/agent_servers/src/agent_servers.rs @@ -1,19 +1,13 @@ mod acp; -mod claude; -mod codex; mod custom; -mod gemini; #[cfg(any(test, feature = "test-support"))] pub mod e2e_tests; -pub use claude::*; use client::ProxySettings; -pub use codex::*; use collections::{HashMap, HashSet}; pub use custom::*; use fs::Fs; -pub use gemini::*; use http_client::read_no_proxy_from_env; use project::agent_server_store::AgentServerStore; @@ -22,7 +16,7 @@ use anyhow::Result; use gpui::{App, AppContext, Entity, SharedString, Task}; use project::Project; use settings::SettingsStore; -use std::{any::Any, path::Path, rc::Rc, sync::Arc}; +use std::{any::Any, rc::Rc, sync::Arc}; pub use acp::AcpConnection; @@ -58,10 +52,9 @@ pub trait AgentServer: Send { fn name(&self) -> SharedString; fn connect( &self, - root_dir: Option<&Path>, delegate: AgentServerDelegate, cx: &mut App, - ) -> Task, Option)>>; + ) -> Task>>; fn into_any(self: Rc) -> Rc; diff --git a/crates/agent_servers/src/claude.rs b/crates/agent_servers/src/claude.rs deleted file mode 100644 index f711708383fb6bdce42b08b26f0aa37ce173d9c3..0000000000000000000000000000000000000000 --- a/crates/agent_servers/src/claude.rs +++ /dev/null @@ -1,264 +0,0 @@ -use agent_client_protocol as acp; -use collections::HashSet; -use fs::Fs; -use settings::{SettingsStore, update_settings_file}; -use std::path::Path; -use std::rc::Rc; -use std::sync::Arc; -use std::{any::Any, path::PathBuf}; - -use anyhow::{Context as _, Result}; -use gpui::{App, AppContext as _, SharedString, Task}; -use project::agent_server_store::{AllAgentServersSettings, CLAUDE_AGENT_NAME}; - -use crate::{AgentServer, AgentServerDelegate, load_proxy_env}; -use acp_thread::AgentConnection; - -#[derive(Clone)] -pub struct ClaudeCode; - -pub struct AgentServerLoginCommand { - pub path: PathBuf, - pub arguments: Vec, -} - -impl AgentServer for ClaudeCode { - fn name(&self) -> SharedString { - "Claude Agent".into() - } - - fn logo(&self) -> ui::IconName { - ui::IconName::AiClaude - } - - fn default_mode(&self, cx: &App) -> Option { - let settings = cx.read_global(|settings: &SettingsStore, _| { - settings.get::(None).claude.clone() - }); - - settings - .as_ref() - .and_then(|s| s.default_mode.clone().map(acp::SessionModeId::new)) - } - - fn set_default_mode(&self, mode_id: Option, fs: Arc, cx: &mut App) { - update_settings_file(fs, cx, |settings, _| { - settings - .agent_servers - .get_or_insert_default() - .claude - .get_or_insert_default() - .default_mode = mode_id.map(|m| m.to_string()) - }); - } - - fn default_model(&self, cx: &App) -> Option { - let settings = cx.read_global(|settings: &SettingsStore, _| { - settings.get::(None).claude.clone() - }); - - settings - .as_ref() - .and_then(|s| s.default_model.clone().map(acp::ModelId::new)) - } - - fn set_default_model(&self, model_id: Option, fs: Arc, cx: &mut App) { - update_settings_file(fs, cx, |settings, _| { - settings - .agent_servers - .get_or_insert_default() - .claude - .get_or_insert_default() - .default_model = model_id.map(|m| m.to_string()) - }); - } - - fn favorite_model_ids(&self, cx: &mut App) -> HashSet { - let settings = cx.read_global(|settings: &SettingsStore, _| { - settings.get::(None).claude.clone() - }); - - settings - .as_ref() - .map(|s| { - s.favorite_models - .iter() - .map(|id| acp::ModelId::new(id.clone())) - .collect() - }) - .unwrap_or_default() - } - - fn toggle_favorite_model( - &self, - model_id: acp::ModelId, - should_be_favorite: bool, - fs: Arc, - cx: &App, - ) { - update_settings_file(fs, cx, move |settings, _| { - let favorite_models = &mut settings - .agent_servers - .get_or_insert_default() - .claude - .get_or_insert_default() - .favorite_models; - - let model_id_str = model_id.to_string(); - if should_be_favorite { - if !favorite_models.contains(&model_id_str) { - favorite_models.push(model_id_str); - } - } else { - favorite_models.retain(|id| id != &model_id_str); - } - }); - } - - fn default_config_option(&self, config_id: &str, cx: &App) -> Option { - let settings = cx.read_global(|settings: &SettingsStore, _| { - settings.get::(None).claude.clone() - }); - - settings - .as_ref() - .and_then(|s| s.default_config_options.get(config_id).cloned()) - } - - fn set_default_config_option( - &self, - config_id: &str, - value_id: Option<&str>, - fs: Arc, - cx: &mut App, - ) { - let config_id = config_id.to_string(); - let value_id = value_id.map(|s| s.to_string()); - update_settings_file(fs, cx, move |settings, _| { - let config_options = &mut settings - .agent_servers - .get_or_insert_default() - .claude - .get_or_insert_default() - .default_config_options; - - if let Some(value) = value_id.clone() { - config_options.insert(config_id.clone(), value); - } else { - config_options.remove(&config_id); - } - }); - } - - fn favorite_config_option_value_ids( - &self, - config_id: &acp::SessionConfigId, - cx: &mut App, - ) -> HashSet { - let settings = cx.read_global(|settings: &SettingsStore, _| { - settings.get::(None).claude.clone() - }); - - settings - .as_ref() - .and_then(|s| s.favorite_config_option_values.get(config_id.0.as_ref())) - .map(|values| { - values - .iter() - .cloned() - .map(acp::SessionConfigValueId::new) - .collect() - }) - .unwrap_or_default() - } - - fn toggle_favorite_config_option_value( - &self, - config_id: acp::SessionConfigId, - value_id: acp::SessionConfigValueId, - should_be_favorite: bool, - fs: Arc, - cx: &App, - ) { - let config_id = config_id.to_string(); - let value_id = value_id.to_string(); - - update_settings_file(fs, cx, move |settings, _| { - let favorites = &mut settings - .agent_servers - .get_or_insert_default() - .claude - .get_or_insert_default() - .favorite_config_option_values; - - let entry = favorites.entry(config_id.clone()).or_insert_with(Vec::new); - - if should_be_favorite { - if !entry.iter().any(|v| v == &value_id) { - entry.push(value_id.clone()); - } - } else { - entry.retain(|v| v != &value_id); - if entry.is_empty() { - favorites.remove(&config_id); - } - } - }); - } - - fn connect( - &self, - root_dir: Option<&Path>, - delegate: AgentServerDelegate, - cx: &mut App, - ) -> Task, Option)>> { - let name = self.name(); - let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned()); - let is_remote = delegate.project.read(cx).is_via_remote_server(); - let store = delegate.store.downgrade(); - let extra_env = load_proxy_env(cx); - let default_mode = self.default_mode(cx); - let default_model = self.default_model(cx); - let default_config_options = cx.read_global(|settings: &SettingsStore, _| { - settings - .get::(None) - .claude - .as_ref() - .map(|s| s.default_config_options.clone()) - .unwrap_or_default() - }); - - cx.spawn(async move |cx| { - let (command, root_dir, login) = store - .update(cx, |store, cx| { - let agent = store - .get_external_agent(&CLAUDE_AGENT_NAME.into()) - .context("Claude Agent is not registered")?; - anyhow::Ok(agent.get_command( - root_dir.as_deref(), - extra_env, - delegate.status_tx, - delegate.new_version_available, - &mut cx.to_async(), - )) - })?? - .await?; - let connection = crate::acp::connect( - name, - command, - root_dir.as_ref(), - default_mode, - default_model, - default_config_options, - is_remote, - cx, - ) - .await?; - Ok((connection, login)) - }) - } - - fn into_any(self: Rc) -> Rc { - self - } -} diff --git a/crates/agent_servers/src/codex.rs b/crates/agent_servers/src/codex.rs deleted file mode 100644 index 49deaa6192bb96bd41113f696e53a45a9e31dec0..0000000000000000000000000000000000000000 --- a/crates/agent_servers/src/codex.rs +++ /dev/null @@ -1,275 +0,0 @@ -use std::rc::Rc; -use std::sync::Arc; -use std::{any::Any, path::Path}; - -use acp_thread::AgentConnection; -use agent_client_protocol as acp; -use anyhow::{Context as _, Result}; -use collections::HashSet; -use fs::Fs; -use gpui::{App, AppContext as _, SharedString, Task}; -use project::agent_server_store::{AllAgentServersSettings, CODEX_NAME}; -use settings::{SettingsStore, update_settings_file}; - -use crate::{AgentServer, AgentServerDelegate, load_proxy_env}; - -#[derive(Clone)] -pub struct Codex; - -const CODEX_API_KEY_VAR_NAME: &str = "CODEX_API_KEY"; -const OPEN_AI_API_KEY_VAR_NAME: &str = "OPEN_AI_API_KEY"; - -impl AgentServer for Codex { - fn name(&self) -> SharedString { - "Codex".into() - } - - fn logo(&self) -> ui::IconName { - ui::IconName::AiOpenAi - } - - fn default_mode(&self, cx: &App) -> Option { - let settings = cx.read_global(|settings: &SettingsStore, _| { - settings.get::(None).codex.clone() - }); - - settings - .as_ref() - .and_then(|s| s.default_mode.clone().map(acp::SessionModeId::new)) - } - - fn set_default_mode(&self, mode_id: Option, fs: Arc, cx: &mut App) { - update_settings_file(fs, cx, |settings, _| { - settings - .agent_servers - .get_or_insert_default() - .codex - .get_or_insert_default() - .default_mode = mode_id.map(|m| m.to_string()) - }); - } - - fn default_model(&self, cx: &App) -> Option { - let settings = cx.read_global(|settings: &SettingsStore, _| { - settings.get::(None).codex.clone() - }); - - settings - .as_ref() - .and_then(|s| s.default_model.clone().map(acp::ModelId::new)) - } - - fn set_default_model(&self, model_id: Option, fs: Arc, cx: &mut App) { - update_settings_file(fs, cx, |settings, _| { - settings - .agent_servers - .get_or_insert_default() - .codex - .get_or_insert_default() - .default_model = model_id.map(|m| m.to_string()) - }); - } - - fn favorite_model_ids(&self, cx: &mut App) -> HashSet { - let settings = cx.read_global(|settings: &SettingsStore, _| { - settings.get::(None).codex.clone() - }); - - settings - .as_ref() - .map(|s| { - s.favorite_models - .iter() - .map(|id| acp::ModelId::new(id.clone())) - .collect() - }) - .unwrap_or_default() - } - - fn toggle_favorite_model( - &self, - model_id: acp::ModelId, - should_be_favorite: bool, - fs: Arc, - cx: &App, - ) { - update_settings_file(fs, cx, move |settings, _| { - let favorite_models = &mut settings - .agent_servers - .get_or_insert_default() - .codex - .get_or_insert_default() - .favorite_models; - - let model_id_str = model_id.to_string(); - if should_be_favorite { - if !favorite_models.contains(&model_id_str) { - favorite_models.push(model_id_str); - } - } else { - favorite_models.retain(|id| id != &model_id_str); - } - }); - } - - fn default_config_option(&self, config_id: &str, cx: &App) -> Option { - let settings = cx.read_global(|settings: &SettingsStore, _| { - settings.get::(None).codex.clone() - }); - - settings - .as_ref() - .and_then(|s| s.default_config_options.get(config_id).cloned()) - } - - fn set_default_config_option( - &self, - config_id: &str, - value_id: Option<&str>, - fs: Arc, - cx: &mut App, - ) { - let config_id = config_id.to_string(); - let value_id = value_id.map(|s| s.to_string()); - update_settings_file(fs, cx, move |settings, _| { - let config_options = &mut settings - .agent_servers - .get_or_insert_default() - .codex - .get_or_insert_default() - .default_config_options; - - if let Some(value) = value_id.clone() { - config_options.insert(config_id.clone(), value); - } else { - config_options.remove(&config_id); - } - }); - } - - fn favorite_config_option_value_ids( - &self, - config_id: &acp::SessionConfigId, - cx: &mut App, - ) -> HashSet { - let settings = cx.read_global(|settings: &SettingsStore, _| { - settings.get::(None).codex.clone() - }); - - settings - .as_ref() - .and_then(|s| s.favorite_config_option_values.get(config_id.0.as_ref())) - .map(|values| { - values - .iter() - .cloned() - .map(acp::SessionConfigValueId::new) - .collect() - }) - .unwrap_or_default() - } - - fn toggle_favorite_config_option_value( - &self, - config_id: acp::SessionConfigId, - value_id: acp::SessionConfigValueId, - should_be_favorite: bool, - fs: Arc, - cx: &App, - ) { - let config_id = config_id.to_string(); - let value_id = value_id.to_string(); - - update_settings_file(fs, cx, move |settings, _| { - let favorites = &mut settings - .agent_servers - .get_or_insert_default() - .codex - .get_or_insert_default() - .favorite_config_option_values; - - let entry = favorites.entry(config_id.clone()).or_insert_with(Vec::new); - - if should_be_favorite { - if !entry.iter().any(|v| v == &value_id) { - entry.push(value_id.clone()); - } - } else { - entry.retain(|v| v != &value_id); - if entry.is_empty() { - favorites.remove(&config_id); - } - } - }); - } - - fn connect( - &self, - root_dir: Option<&Path>, - delegate: AgentServerDelegate, - cx: &mut App, - ) -> Task, Option)>> { - let name = self.name(); - let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned()); - let is_remote = delegate.project.read(cx).is_via_remote_server(); - let store = delegate.store.downgrade(); - let mut extra_env = load_proxy_env(cx); - let default_mode = self.default_mode(cx); - let default_model = self.default_model(cx); - let default_config_options = cx.read_global(|settings: &SettingsStore, _| { - settings - .get::(None) - .codex - .as_ref() - .map(|s| s.default_config_options.clone()) - .unwrap_or_default() - }); - if let Ok(api_key) = std::env::var(CODEX_API_KEY_VAR_NAME) { - extra_env.insert(CODEX_API_KEY_VAR_NAME.into(), api_key); - } - if let Ok(api_key) = std::env::var(OPEN_AI_API_KEY_VAR_NAME) { - extra_env.insert(OPEN_AI_API_KEY_VAR_NAME.into(), api_key); - } - - cx.spawn(async move |cx| { - let (command, root_dir, login) = store - .update(cx, |store, cx| { - let agent = store - .get_external_agent(&CODEX_NAME.into()) - .context("Codex is not registered")?; - anyhow::Ok(agent.get_command( - root_dir.as_deref(), - extra_env, - delegate.status_tx, - delegate.new_version_available, - &mut cx.to_async(), - )) - })?? - .await?; - - let connection = crate::acp::connect( - name, - command, - root_dir.as_ref(), - default_mode, - default_model, - default_config_options, - is_remote, - cx, - ) - .await?; - Ok((connection, login)) - }) - } - - fn into_any(self: Rc) -> Rc { - self - } -} - -#[cfg(test)] -pub(crate) mod tests { - use super::*; - - crate::common_e2e_tests!(async |_, _| Codex, allow_option_id = "proceed_once"); -} diff --git a/crates/agent_servers/src/custom.rs b/crates/agent_servers/src/custom.rs index dc65a65fc74ce303393b6cca43836e000f1dafa9..b0669d1fb69e110f0ba206a3579f16738de5e7e2 100644 --- a/crates/agent_servers/src/custom.rs +++ b/crates/agent_servers/src/custom.rs @@ -3,11 +3,15 @@ use acp_thread::AgentConnection; use agent_client_protocol as acp; use anyhow::{Context as _, Result}; use collections::HashSet; +use credentials_provider::CredentialsProvider; use fs::Fs; use gpui::{App, AppContext as _, SharedString, Task}; -use project::agent_server_store::{AllAgentServersSettings, ExternalAgentServerName}; +use language_model::{ApiKey, EnvVar}; +use project::agent_server_store::{ + AllAgentServersSettings, CLAUDE_AGENT_NAME, CODEX_NAME, ExternalAgentServerName, GEMINI_NAME, +}; use settings::{SettingsStore, update_settings_file}; -use std::{path::Path, rc::Rc, sync::Arc}; +use std::{rc::Rc, sync::Arc}; use ui::IconName; /// A generic agent server implementation for custom user-defined agents @@ -34,7 +38,6 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .custom .get(self.name().as_ref()) .cloned() }); @@ -52,7 +55,6 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .custom .get(self.name().as_ref()) .cloned() }); @@ -86,7 +88,6 @@ impl AgentServer for CustomAgentServer { let settings = settings .agent_servers .get_or_insert_default() - .custom .entry(name.to_string()) .or_insert_with(|| settings::CustomAgentServerSettings::Extension { default_model: None, @@ -135,7 +136,6 @@ impl AgentServer for CustomAgentServer { let settings = settings .agent_servers .get_or_insert_default() - .custom .entry(name.to_string()) .or_insert_with(|| settings::CustomAgentServerSettings::Extension { default_model: None, @@ -160,7 +160,6 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .custom .get(self.name().as_ref()) .cloned() }); @@ -176,7 +175,6 @@ impl AgentServer for CustomAgentServer { let settings = settings .agent_servers .get_or_insert_default() - .custom .entry(name.to_string()) .or_insert_with(|| settings::CustomAgentServerSettings::Extension { default_model: None, @@ -201,7 +199,6 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .custom .get(self.name().as_ref()) .cloned() }); @@ -229,7 +226,6 @@ impl AgentServer for CustomAgentServer { let settings = settings .agent_servers .get_or_insert_default() - .custom .entry(name.to_string()) .or_insert_with(|| settings::CustomAgentServerSettings::Extension { default_model: None, @@ -267,7 +263,6 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .custom .get(self.name().as_ref()) .cloned() }); @@ -291,7 +286,6 @@ impl AgentServer for CustomAgentServer { let settings = settings .agent_servers .get_or_insert_default() - .custom .entry(name.to_string()) .or_insert_with(|| settings::CustomAgentServerSettings::Extension { default_model: None, @@ -327,20 +321,23 @@ impl AgentServer for CustomAgentServer { fn connect( &self, - root_dir: Option<&Path>, delegate: AgentServerDelegate, cx: &mut App, - ) -> Task, Option)>> { + ) -> Task>> { let name = self.name(); - let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned()); - let is_remote = delegate.project.read(cx).is_via_remote_server(); + let display_name = delegate + .store + .read(cx) + .agent_display_name(&ExternalAgentServerName(name.clone())) + .unwrap_or_else(|| name.clone()); let default_mode = self.default_mode(cx); let default_model = self.default_model(cx); + let is_previous_built_in = + matches!(name.as_ref(), CLAUDE_AGENT_NAME | CODEX_NAME | GEMINI_NAME); let (default_config_options, is_registry_agent) = cx.read_global(|settings: &SettingsStore, _| { let agent_settings = settings .get::(None) - .custom .get(self.name().as_ref()); let is_registry = agent_settings @@ -372,16 +369,46 @@ impl AgentServer for CustomAgentServer { (config_options, is_registry) }); + // Intermediate step to allow for previous built-ins to also be triggered if they aren't in settings yet. + let is_registry_agent = is_registry_agent || is_previous_built_in; + if is_registry_agent { if let Some(registry_store) = project::AgentRegistryStore::try_global(cx) { registry_store.update(cx, |store, cx| store.refresh_if_stale(cx)); } } + let mut extra_env = load_proxy_env(cx); + if delegate.store.read(cx).no_browser() { + extra_env.insert("NO_BROWSER".to_owned(), "1".to_owned()); + } + if is_registry_agent { + match name.as_ref() { + CLAUDE_AGENT_NAME => { + extra_env.insert("ANTHROPIC_API_KEY".into(), "".into()); + } + CODEX_NAME => { + if let Ok(api_key) = std::env::var("CODEX_API_KEY") { + extra_env.insert("CODEX_API_KEY".into(), api_key); + } + if let Ok(api_key) = std::env::var("OPEN_AI_API_KEY") { + extra_env.insert("OPEN_AI_API_KEY".into(), api_key); + } + } + GEMINI_NAME => { + extra_env.insert("SURFACE".to_owned(), "zed".to_owned()); + } + _ => {} + } + } let store = delegate.store.downgrade(); - let extra_env = load_proxy_env(cx); cx.spawn(async move |cx| { - let (command, root_dir, login) = store + if is_registry_agent && name.as_ref() == GEMINI_NAME { + if let Some(api_key) = cx.update(api_key_for_gemini_cli).await.ok() { + extra_env.insert("GEMINI_API_KEY".into(), api_key); + } + } + let command = store .update(cx, |store, cx| { let agent = store .get_external_agent(&ExternalAgentServerName(name.clone())) @@ -389,7 +416,6 @@ impl AgentServer for CustomAgentServer { format!("Custom agent server `{}` is not registered", name) })?; anyhow::Ok(agent.get_command( - root_dir.as_deref(), extra_env, delegate.status_tx, delegate.new_version_available, @@ -399,16 +425,15 @@ impl AgentServer for CustomAgentServer { .await?; let connection = crate::acp::connect( name, + display_name, command, - root_dir.as_ref(), default_mode, default_model, default_config_options, - is_remote, cx, ) .await?; - Ok((connection, login)) + Ok(connection) }) } @@ -416,3 +441,20 @@ impl AgentServer for CustomAgentServer { self } } + +fn api_key_for_gemini_cli(cx: &mut App) -> Task> { + let env_var = EnvVar::new("GEMINI_API_KEY".into()).or(EnvVar::new("GOOGLE_AI_API_KEY".into())); + if let Some(key) = env_var.value { + return Task::ready(Ok(key)); + } + let credentials_provider = ::global(cx); + let api_url = google_ai::API_URL.to_string(); + cx.spawn(async move |cx| { + Ok( + ApiKey::load_from_system_keychain(&api_url, credentials_provider.as_ref(), cx) + .await? + .key() + .to_string(), + ) + }) +} diff --git a/crates/agent_servers/src/e2e_tests.rs b/crates/agent_servers/src/e2e_tests.rs index 4fe068ee5a9b68ce87bba27fb82db967e7a8aa4a..c5754bcd7610dbf0c858058ea726a746bef37ab1 100644 --- a/crates/agent_servers/src/e2e_tests.rs +++ b/crates/agent_servers/src/e2e_tests.rs @@ -4,8 +4,6 @@ use agent_client_protocol as acp; use futures::{FutureExt, StreamExt, channel::mpsc, select}; use gpui::{Entity, TestAppContext}; use indoc::indoc; -#[cfg(test)] -use project::agent_server_store::BuiltinAgentServerSettings; use project::{FakeFs, Project}; #[cfg(test)] use settings::Settings; @@ -414,18 +412,7 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc { #[cfg(test)] project::agent_server_store::AllAgentServersSettings::override_global( - project::agent_server_store::AllAgentServersSettings { - claude: Some(BuiltinAgentServerSettings { - path: Some("claude-agent-acp".into()), - ..Default::default() - }), - gemini: Some(crate::gemini::tests::local_command().into()), - codex: Some(BuiltinAgentServerSettings { - path: Some("codex-acp".into()), - ..Default::default() - }), - custom: collections::HashMap::default(), - }, + project::agent_server_store::AllAgentServersSettings(collections::HashMap::default()), cx, ); }); @@ -444,10 +431,7 @@ pub async fn new_test_thread( let store = project.read_with(cx, |project, _| project.agent_server_store().clone()); let delegate = AgentServerDelegate::new(store, project.clone(), None, None); - let (connection, _) = cx - .update(|cx| server.connect(Some(current_dir.as_ref()), delegate, cx)) - .await - .unwrap(); + let connection = cx.update(|cx| server.connect(delegate, cx)).await.unwrap(); cx.update(|cx| connection.new_session(project.clone(), current_dir.as_ref(), cx)) .await diff --git a/crates/agent_servers/src/gemini.rs b/crates/agent_servers/src/gemini.rs deleted file mode 100644 index 1805e64a3a94dddd2b7b3c8762123b98a384ec23..0000000000000000000000000000000000000000 --- a/crates/agent_servers/src/gemini.rs +++ /dev/null @@ -1,130 +0,0 @@ -use std::rc::Rc; -use std::{any::Any, path::Path}; - -use crate::{AgentServer, AgentServerDelegate, load_proxy_env}; -use acp_thread::AgentConnection; -use anyhow::{Context as _, Result}; -use credentials_provider::CredentialsProvider; -use gpui::{App, AppContext as _, SharedString, Task}; -use language_model::{ApiKey, EnvVar}; -use project::agent_server_store::{AllAgentServersSettings, GEMINI_NAME}; -use settings::SettingsStore; - -const GEMINI_API_KEY_VAR_NAME: &str = "GEMINI_API_KEY"; -const GOOGLE_AI_API_KEY_VAR_NAME: &str = "GOOGLE_AI_API_KEY"; - -fn api_key_for_gemini_cli(cx: &mut App) -> Task> { - let env_var = EnvVar::new(GEMINI_API_KEY_VAR_NAME.into()) - .or(EnvVar::new(GOOGLE_AI_API_KEY_VAR_NAME.into())); - if let Some(key) = env_var.value { - return Task::ready(Ok(key)); - } - let credentials_provider = ::global(cx); - let api_url = google_ai::API_URL.to_string(); - cx.spawn(async move |cx| { - Ok( - ApiKey::load_from_system_keychain(&api_url, credentials_provider.as_ref(), cx) - .await? - .key() - .to_string(), - ) - }) -} - -#[derive(Clone)] -pub struct Gemini; - -impl AgentServer for Gemini { - fn name(&self) -> SharedString { - "Gemini CLI".into() - } - - fn logo(&self) -> ui::IconName { - ui::IconName::AiGemini - } - - fn connect( - &self, - root_dir: Option<&Path>, - delegate: AgentServerDelegate, - cx: &mut App, - ) -> Task, Option)>> { - let name = self.name(); - let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned()); - let is_remote = delegate.project.read(cx).is_via_remote_server(); - let store = delegate.store.downgrade(); - let mut extra_env = load_proxy_env(cx); - let default_mode = self.default_mode(cx); - let default_model = self.default_model(cx); - let default_config_options = cx.read_global(|settings: &SettingsStore, _| { - settings - .get::(None) - .gemini - .as_ref() - .map(|s| s.default_config_options.clone()) - .unwrap_or_default() - }); - - cx.spawn(async move |cx| { - extra_env.insert("SURFACE".to_owned(), "zed".to_owned()); - - if let Some(api_key) = cx.update(api_key_for_gemini_cli).await.ok() { - extra_env.insert("GEMINI_API_KEY".into(), api_key); - } - let (command, root_dir, login) = store - .update(cx, |store, cx| { - let agent = store - .get_external_agent(&GEMINI_NAME.into()) - .context("Gemini CLI is not registered")?; - anyhow::Ok(agent.get_command( - root_dir.as_deref(), - extra_env, - delegate.status_tx, - delegate.new_version_available, - &mut cx.to_async(), - )) - })?? - .await?; - - let connection = crate::acp::connect( - name, - command, - root_dir.as_ref(), - default_mode, - default_model, - default_config_options, - is_remote, - cx, - ) - .await?; - Ok((connection, login)) - }) - } - - fn into_any(self: Rc) -> Rc { - self - } -} - -#[cfg(test)] -pub(crate) mod tests { - use project::agent_server_store::AgentServerCommand; - - use super::*; - use std::path::Path; - - crate::common_e2e_tests!(async |_, _| Gemini, allow_option_id = "proceed_once"); - - pub fn local_command() -> AgentServerCommand { - let cli_path = Path::new(env!("CARGO_MANIFEST_DIR")) - .join("../../../gemini-cli/packages/cli") - .to_string_lossy() - .to_string(); - - AgentServerCommand { - path: "node".into(), - args: vec![cli_path], - env: None, - } - } -} diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 2a31781054fd29b30a3c8119e87491edbfb1e658..3e46e14b53c46a2aec3ac9552246a10ffc2aeee9 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -58,6 +58,7 @@ feature_flags.workspace = true file_icons.workspace = true fs.workspace = true futures.workspace = true +git.workspace = true fuzzy.workspace = true gpui.workspace = true gpui_tokio.workspace = true diff --git a/crates/agent_ui/src/acp.rs b/crates/agent_ui/src/acp.rs deleted file mode 100644 index f76e64b557e7ee2ec6054bd0fab0afc36b201e2c..0000000000000000000000000000000000000000 --- a/crates/agent_ui/src/acp.rs +++ /dev/null @@ -1,14 +0,0 @@ -mod config_options; -mod entry_view_state; -mod message_editor; -mod mode_selector; -mod model_selector; -mod model_selector_popover; -mod thread_history; -pub(crate) mod thread_view; - -pub use mode_selector::ModeSelector; -pub use model_selector::AcpModelSelector; -pub use model_selector_popover::AcpModelSelectorPopover; -pub use thread_history::*; -pub use thread_view::AcpServerView; diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index e667a6c1d0507f399b4339c84fec4fc7099eab4e..aa316ba7c5efe5f679764cd7d4626a1f1310e4c6 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -8,7 +8,6 @@ use std::{ops::Range, sync::Arc}; use agent::ContextServerRegistry; use anyhow::Result; -use client::zed_urls; use cloud_api_types::Plan; use collections::HashMap; use context_server::ContextServerId; @@ -20,6 +19,7 @@ use gpui::{ Action, AnyView, App, AsyncWindowContext, Corner, Entity, EventEmitter, FocusHandle, Focusable, ScrollHandle, Subscription, Task, WeakEntity, }; +use itertools::Itertools; use language::LanguageRegistry; use language_model::{ IconOrSvg, LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, @@ -28,10 +28,7 @@ use language_model::{ use language_models::AllLanguageModelSettings; use notifications::status_toast::{StatusToast, ToastIcon}; use project::{ - agent_server_store::{ - AgentServerStore, CLAUDE_AGENT_NAME, CODEX_NAME, ExternalAgentServerName, - ExternalAgentSource, GEMINI_NAME, - }, + agent_server_store::{AgentServerStore, ExternalAgentServerName, ExternalAgentSource}, context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore}, }; use settings::{Settings, SettingsStore, update_settings_file}; @@ -941,9 +938,6 @@ impl AgentConfiguration { let user_defined_agents = agent_server_store .external_agents() - .filter(|name| { - name.0 != GEMINI_NAME && name.0 != CLAUDE_AGENT_NAME && name.0 != CODEX_NAME - }) .cloned() .collect::>(); @@ -961,6 +955,7 @@ impl AgentConfiguration { let source = agent_server_store.agent_source(&name).unwrap_or_default(); (name, icon, display_name, source) }) + .sorted_unstable_by_key(|(_, _, display_name, _)| display_name.to_lowercase()) .collect(); let add_agent_popover = PopoverMenu::new("add-agent-server-popover") @@ -998,22 +993,6 @@ impl AgentConfiguration { }) .separator() .header("Learn More") - .item( - ContextMenuEntry::new("Agent Servers Docs") - .icon(IconName::ArrowUpRight) - .icon_color(Color::Muted) - .icon_position(IconPosition::End) - .handler({ - move |window, cx| { - window.dispatch_action( - Box::new(OpenBrowser { - url: zed_urls::agent_server_docs(cx), - }), - cx, - ); - } - }), - ) .item( ContextMenuEntry::new("ACP Docs") .icon(IconName::ArrowUpRight) @@ -1049,51 +1028,24 @@ impl AgentConfiguration { "All agents connected through the Agent Client Protocol.", add_agent_popover.into_any_element(), )) - .child( - v_flex() - .p_4() - .pt_0() - .gap_2() - .child(self.render_agent_server( - AgentIcon::Name(IconName::AiClaude), - "Claude Agent", - "Claude Agent", - ExternalAgentSource::Builtin, - cx, - )) - .child(Divider::horizontal().color(DividerColor::BorderFaded)) - .child(self.render_agent_server( - AgentIcon::Name(IconName::AiOpenAi), - "Codex CLI", - "Codex CLI", - ExternalAgentSource::Builtin, - cx, - )) - .child(Divider::horizontal().color(DividerColor::BorderFaded)) - .child(self.render_agent_server( - AgentIcon::Name(IconName::AiGemini), - "Gemini CLI", - "Gemini CLI", - ExternalAgentSource::Builtin, + .child(v_flex().p_4().pt_0().gap_2().map(|mut parent| { + let mut first = true; + for (name, icon, display_name, source) in user_defined_agents { + if !first { + parent = parent + .child(Divider::horizontal().color(DividerColor::BorderFaded)); + } + first = false; + parent = parent.child(self.render_agent_server( + icon, + name, + display_name, + source, cx, - )) - .map(|mut parent| { - for (name, icon, display_name, source) in user_defined_agents { - parent = parent - .child( - Divider::horizontal().color(DividerColor::BorderFaded), - ) - .child(self.render_agent_server( - icon, - name, - display_name, - source, - cx, - )); - } - parent - }), - ), + )); + } + parent + })), ) } @@ -1134,7 +1086,7 @@ impl AgentConfiguration { )), IconName::AcpRegistry, )), - ExternalAgentSource::Builtin | ExternalAgentSource::Custom => None, + ExternalAgentSource::Custom => None, }; let agent_server_name = ExternalAgentServerName(id.clone()); @@ -1176,19 +1128,46 @@ impl AgentConfiguration { let Some(agent_servers) = settings.agent_servers.as_mut() else { return; }; - if let Some(entry) = agent_servers.custom.get(agent_name.0.as_ref()) + if let Some(entry) = agent_servers.get(agent_name.0.as_ref()) && matches!( entry, settings::CustomAgentServerSettings::Registry { .. } ) { - agent_servers.custom.remove(agent_name.0.as_ref()); + agent_servers.remove(agent_name.0.as_ref()); + } + }); + })), + ) + } + ExternalAgentSource::Custom => { + let fs = self.fs.clone(); + Some( + IconButton::new( + SharedString::from(format!("uninstall-{}", id)), + IconName::Trash, + ) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Remove Custom Agent")) + .on_click(cx.listener(move |_, _, _window, cx| { + let agent_name = agent_server_name.clone(); + update_settings_file(fs.clone(), cx, move |settings, _| { + let Some(agent_servers) = settings.agent_servers.as_mut() else { + return; + }; + if let Some(entry) = agent_servers.get(agent_name.0.as_ref()) + && matches!( + entry, + settings::CustomAgentServerSettings::Custom { .. } + ) + { + agent_servers.remove(agent_name.0.as_ref()); } }); })), ) } - ExternalAgentSource::Builtin | ExternalAgentSource::Custom => None, }; h_flex() @@ -1367,29 +1346,23 @@ async fn open_new_agent_servers_entry_in_settings_editor( !settings .agent_servers .as_ref() - .is_some_and(|agent_servers| { - agent_servers.custom.contains_key(name.as_str()) - }) + .is_some_and(|agent_servers| agent_servers.contains_key(name.as_str())) }); if let Some(server_name) = server_name { unique_server_name = Some(SharedString::from(server_name.clone())); - settings - .agent_servers - .get_or_insert_default() - .custom - .insert( - server_name, - settings::CustomAgentServerSettings::Custom { - path: "path_to_executable".into(), - args: vec![], - env: HashMap::default(), - default_mode: None, - default_model: None, - favorite_models: vec![], - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - }, - ); + settings.agent_servers.get_or_insert_default().insert( + server_name, + settings::CustomAgentServerSettings::Custom { + path: "path_to_executable".into(), + args: vec![], + env: HashMap::default(), + default_mode: None, + default_model: None, + favorite_models: vec![], + default_config_options: Default::default(), + favorite_config_option_values: Default::default(), + }, + ); } }); diff --git a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs index 78c032a565522a7eac145add3f65568d559ceb24..38805f2c26693f168c7273afddf5aceea44f83e3 100644 --- a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs +++ b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs @@ -877,9 +877,14 @@ fn wait_for_context_server( context_server_id: ContextServerId, cx: &mut App, ) -> Task>> { + use std::time::Duration; + + const WAIT_TIMEOUT: Duration = Duration::from_secs(120); + let (tx, rx) = futures::channel::oneshot::channel(); let tx = Arc::new(Mutex::new(Some(tx))); + let context_server_id_for_timeout = context_server_id.clone(); let subscription = cx.subscribe(context_server_store, move |_, event, _cx| { let project::context_server_store::ServerStatusChangedEvent { server_id, status } = event; @@ -909,12 +914,20 @@ fn wait_for_context_server( } }); - cx.spawn(async move |_cx| { - let result = rx - .await - .map_err(|_| Arc::from("Context server store was dropped"))?; + cx.spawn(async move |cx| { + let timeout = cx.background_executor().timer(WAIT_TIMEOUT); + let result = futures::future::select(rx, timeout).await; drop(subscription); - result + match result { + futures::future::Either::Left((Ok(inner), _)) => inner, + futures::future::Either::Left((Err(_), _)) => { + Err(Arc::from("Context server store was dropped")) + } + futures::future::Either::Right(_) => Err(Arc::from(format!( + "Timed out waiting for context server `{}` to start. Check the Zed log for details.", + context_server_id_for_timeout + ))), + } }) } diff --git a/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs b/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs index f46588c79033d965cbee0aaeb2624e7ae0756af6..744c92a7f7739c9fda2664de45d536769e802986 100644 --- a/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs +++ b/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs @@ -2,10 +2,9 @@ mod profile_modal_header; use std::sync::Arc; -use agent::{AgentTool, ContextServerRegistry, SpawnAgentTool}; +use agent::ContextServerRegistry; use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, builtin_profiles}; use editor::Editor; -use feature_flags::{FeatureFlagAppExt as _, SubagentsFeatureFlag}; use fs::Fs; use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*}; use language_model::{LanguageModel, LanguageModelRegistry}; @@ -363,10 +362,7 @@ impl ManageProfilesModal { let supported_by_provider = provider.as_ref().map_or(true, |provider| { agent::tool_supports_provider(name, provider) }); - let enabled_by_feature_flag = - *name != SpawnAgentTool::NAME || cx.has_flag::(); - - supported_by_provider && enabled_by_feature_flag + supported_by_provider }) .map(Arc::from) .collect(); diff --git a/crates/agent_ui/src/agent_configuration/tool_picker.rs b/crates/agent_ui/src/agent_configuration/tool_picker.rs index 1c99f665ab1c8fc995d47682f92365852bbc9637..be6fcb5bd2b5eeb4d33f4aaefc31cfeb4a978564 100644 --- a/crates/agent_ui/src/agent_configuration/tool_picker.rs +++ b/crates/agent_ui/src/agent_configuration/tool_picker.rs @@ -172,12 +172,7 @@ impl PickerDelegate for ToolPickerDelegate { self.selected_index = ix; } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { let item = &self.filtered_items[ix]; match item { PickerItem::Tool { .. } => true, diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index b02af97881cff92714641b7f4e3fd10601e0685f..8fa68b0c510c086d7c6e224b24675e6f19344b82 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -1403,7 +1403,7 @@ impl AgentDiff { self.update_reviewing_editors(workspace, window, cx); } } - AcpThreadEvent::Stopped => { + AcpThreadEvent::Stopped(_) => { self.update_reviewing_editors(workspace, window, cx); } AcpThreadEvent::Error | AcpThreadEvent::LoadError(_) | AcpThreadEvent::Refusal => { diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index cdc0ee0b1fd9287f065e2bc7c8f7c84086689050..0a216ad4bd39e0ea3949eca95f8f7461271ba8de 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -1,6 +1,6 @@ use std::{ ops::Range, - path::Path, + path::{Path, PathBuf}, rc::Rc, sync::{ Arc, @@ -9,11 +9,12 @@ use std::{ time::Duration, }; -use acp_thread::{AcpThread, AgentSessionInfo, MentionUri}; +use acp_thread::{AcpThread, AgentSessionInfo, MentionUri, ThreadStatus}; use agent::{ContextServerRegistry, SharedThread, ThreadStore}; use agent_client_protocol as acp; use agent_servers::AgentServer; use db::kvp::{Dismissable, KEY_VALUE_STORE}; +use itertools::Itertools; use project::{ ExternalAgentServerName, agent_server_store::{CLAUDE_AGENT_NAME, CODEX_NAME, GEMINI_NAME}, @@ -21,16 +22,18 @@ use project::{ use serde::{Deserialize, Serialize}; use settings::{LanguageModelProviderSetting, LanguageModelSelection}; +use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt as _}; use zed_actions::agent::{OpenClaudeAgentOnboardingModal, ReauthenticateAgent, ReviewBranchDiff}; +use crate::ManageProfiles; use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal}; use crate::{ - AddContextServer, AgentDiffPane, CopyThreadToClipboard, Follow, InlineAssistant, - LoadThreadFromClipboard, NewTextThread, NewThread, OpenActiveThreadAsMarkdown, OpenAgentDiff, - OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, ToggleNavigationMenu, ToggleNewThreadMenu, - ToggleOptionsMenu, - acp::AcpServerView, + AddContextServer, AgentDiffPane, ConnectionView, CopyThreadToClipboard, Follow, + InlineAssistant, LoadThreadFromClipboard, NewTextThread, NewThread, OpenActiveThreadAsMarkdown, + OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, StartThreadIn, + ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu, agent_configuration::{AgentConfiguration, AssistantConfigurationEvent}, + connection_view::{AcpThreadViewEvent, ThreadView}, slash_command::SlashCommandCompletionProvider, text_thread_editor::{AgentPanelDelegate, TextThreadEditor, make_lsp_adapter_delegate}, ui::EndTrialUpsell, @@ -39,11 +42,9 @@ use crate::{ AgentInitialContent, ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary, }; use crate::{ - ExpandMessageEditor, - acp::{AcpThreadHistory, ThreadHistoryEvent}, + ExpandMessageEditor, ThreadHistory, ThreadHistoryEvent, text_thread_history::{TextThreadHistory, TextThreadHistoryEvent}, }; -use crate::{ManageProfiles, acp::thread_view::AcpThreadView}; use agent_settings::AgentSettings; use ai_onboarding::AgentPanelOnboarding; use anyhow::{Result, anyhow}; @@ -51,10 +52,12 @@ use assistant_slash_command::SlashCommandWorkingSet; use assistant_text_thread::{TextThread, TextThreadEvent, TextThreadSummary}; use client::UserStore; use cloud_api_types::Plan; +use collections::HashMap; use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer}; use extension::ExtensionEvents; use extension_host::ExtensionStore; use fs::Fs; +use git::repository::validate_worktree_directory; use gpui::{ Action, Animation, AnimationExt, AnyElement, App, AsyncWindowContext, ClipboardItem, Corner, DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, @@ -62,6 +65,7 @@ use gpui::{ }; use language::LanguageRegistry; use language_model::{ConfigurationError, LanguageModelRegistry}; +use project::project_settings::ProjectSettings; use project::{Project, ProjectPath, Worktree}; use prompt_store::{PromptBuilder, PromptStore, UserPromptId}; use rules_library::{RulesLibrary, open_rules_library}; @@ -69,8 +73,8 @@ use search::{BufferSearchBar, buffer_search}; use settings::{Settings, update_settings_file}; use theme::ThemeSettings; use ui::{ - Callout, ContextMenu, ContextMenuEntry, KeyBinding, PopoverMenu, PopoverMenuHandle, Tab, - Tooltip, prelude::*, utils::WithRemSize, + Button, Callout, ContextMenu, ContextMenuEntry, DocumentationSide, KeyBinding, PopoverMenu, + PopoverMenuHandle, SpinnerLabel, Tab, Tooltip, prelude::*, utils::WithRemSize, }; use util::ResultExt as _; use workspace::{ @@ -124,6 +128,8 @@ struct SerializedAgentPanel { selected_agent: Option, #[serde(default)] last_active_thread: Option, + #[serde(default)] + start_thread_in: Option, } #[derive(Serialize, Deserialize, Debug, Clone)] @@ -325,6 +331,13 @@ pub fn init(cx: &mut App) { cx, ); }); + }) + .register_action(|workspace, action: &StartThreadIn, _window, cx| { + if let Some(panel) = workspace.panel::(cx) { + panel.update(cx, |panel, cx| { + panel.set_start_thread_in(action, cx); + }); + } }); }, ) @@ -340,7 +353,7 @@ enum HistoryKind { enum ActiveView { Uninitialized, AgentThread { - server_view: Entity, + server_view: Entity, }, TextThread { text_thread_editor: Entity, @@ -366,21 +379,19 @@ pub enum AgentType { #[default] NativeAgent, TextThread, - Gemini, - ClaudeAgent, - Codex, Custom { name: SharedString, }, } impl AgentType { + pub fn is_native(&self) -> bool { + matches!(self, Self::NativeAgent) + } + fn label(&self) -> SharedString { match self { Self::NativeAgent | Self::TextThread => "Zed Agent".into(), - Self::Gemini => "Gemini CLI".into(), - Self::ClaudeAgent => "Claude Agent".into(), - Self::Codex => "Codex".into(), Self::Custom { name, .. } => name.into(), } } @@ -388,9 +399,6 @@ impl AgentType { fn icon(&self) -> Option { match self { Self::NativeAgent | Self::TextThread => None, - Self::Gemini => Some(IconName::AiGemini), - Self::ClaudeAgent => Some(IconName::AiClaude), - Self::Codex => Some(IconName::AiOpenAi), Self::Custom { .. } => Some(IconName::Sparkle), } } @@ -399,15 +407,35 @@ impl AgentType { impl From for AgentType { fn from(value: ExternalAgent) -> Self { match value { - ExternalAgent::Gemini => Self::Gemini, - ExternalAgent::ClaudeCode => Self::ClaudeAgent, - ExternalAgent::Codex => Self::Codex, ExternalAgent::Custom { name } => Self::Custom { name }, ExternalAgent::NativeAgent => Self::NativeAgent, } } } +impl StartThreadIn { + fn label(&self) -> SharedString { + match self { + Self::LocalProject => "Local Project".into(), + Self::NewWorktree => "New Worktree".into(), + } + } + + fn icon(&self) -> IconName { + match self { + Self::LocalProject => IconName::Screen, + Self::NewWorktree => IconName::GitBranchPlus, + } + } +} + +#[derive(Clone, Debug)] +#[allow(dead_code)] +pub enum WorktreeCreationStatus { + Creating, + Error(SharedString), +} + impl ActiveView { pub fn which_font_size_used(&self) -> WhichFontSize { match self { @@ -515,7 +543,7 @@ pub struct AgentPanel { project: Entity, fs: Arc, language_registry: Arc, - acp_history: Entity, + acp_history: Entity, text_thread_history: Entity, thread_store: Entity, text_thread_store: Entity, @@ -526,8 +554,9 @@ pub struct AgentPanel { focus_handle: FocusHandle, active_view: ActiveView, previous_view: Option, - _active_view_observation: Option, + background_threads: HashMap>, new_thread_menu_handle: PopoverMenuHandle, + start_thread_in_menu_handle: PopoverMenuHandle, agent_panel_menu_handle: PopoverMenuHandle, agent_navigation_menu_handle: PopoverMenuHandle, agent_navigation_menu: Option>, @@ -538,9 +567,14 @@ pub struct AgentPanel { pending_serialization: Option>>, onboarding: Entity, selected_agent: AgentType, + start_thread_in: StartThreadIn, + worktree_creation_status: Option, + _thread_view_subscription: Option, + _worktree_creation_task: Option>, show_trust_workspace_message: bool, last_configuration_error_telemetry: Option, on_boarding_upsell_dismissed: AtomicBool, + _active_view_observation: Option, } impl AgentPanel { @@ -551,6 +585,7 @@ impl AgentPanel { let width = self.width; let selected_agent = self.selected_agent.clone(); + let start_thread_in = Some(self.start_thread_in); let last_active_thread = self.active_agent_thread(cx).map(|thread| { let thread = thread.read(cx); @@ -574,6 +609,7 @@ impl AgentPanel { width, selected_agent: Some(selected_agent), last_active_thread, + start_thread_in, }, ) .await?; @@ -618,6 +654,37 @@ impl AgentPanel { })? .await?; + let last_active_thread = if let Some(thread_info) = serialized_panel + .as_ref() + .and_then(|p| p.last_active_thread.clone()) + { + if thread_info.agent_type.is_native() { + let session_id = acp::SessionId::new(thread_info.session_id.clone()); + let load_result = cx.update(|_window, cx| { + let thread_store = ThreadStore::global(cx); + thread_store.update(cx, |store, cx| store.load_thread(session_id, cx)) + }); + let thread_exists = if let Ok(task) = load_result { + task.await.ok().flatten().is_some() + } else { + false + }; + if thread_exists { + Some(thread_info) + } else { + log::warn!( + "last active thread {} not found in database, skipping restoration", + thread_info.session_id + ); + None + } + } else { + Some(thread_info) + } + } else { + None + }; + let panel = workspace.update_in(cx, |workspace, window, cx| { let panel = cx.new(|cx| Self::new(workspace, text_thread_store, prompt_store, window, cx)); @@ -628,44 +695,45 @@ impl AgentPanel { if let Some(selected_agent) = serialized_panel.selected_agent.clone() { panel.selected_agent = selected_agent; } + if let Some(start_thread_in) = serialized_panel.start_thread_in { + let is_worktree_flag_enabled = + cx.has_flag::(); + let is_valid = match &start_thread_in { + StartThreadIn::LocalProject => true, + StartThreadIn::NewWorktree => { + let project = panel.project.read(cx); + is_worktree_flag_enabled && !project.is_via_collab() + } + }; + if is_valid { + panel.start_thread_in = start_thread_in; + } else { + log::info!( + "deserialized start_thread_in {:?} is no longer valid, falling back to LocalProject", + start_thread_in, + ); + } + } cx.notify(); }); } - panel - })?; - - if let Some(thread_info) = serialized_panel.and_then(|p| p.last_active_thread) { - let session_id = acp::SessionId::new(thread_info.session_id.clone()); - let load_task = panel.update(cx, |panel, cx| { - let thread_store = panel.thread_store.clone(); - thread_store.update(cx, |store, cx| store.load_thread(session_id, cx)) - }); - let thread_exists = load_task - .await - .map(|thread: Option| thread.is_some()) - .unwrap_or(false); - - if thread_exists { - panel.update_in(cx, |panel, window, cx| { - panel.selected_agent = thread_info.agent_type.clone(); - let session_info = AgentSessionInfo { - session_id: acp::SessionId::new(thread_info.session_id), - cwd: thread_info.cwd, - title: thread_info.title.map(SharedString::from), - updated_at: None, - meta: None, - }; + if let Some(thread_info) = last_active_thread { + let agent_type = thread_info.agent_type.clone(); + let session_info = AgentSessionInfo { + session_id: acp::SessionId::new(thread_info.session_id), + cwd: thread_info.cwd, + title: thread_info.title.map(SharedString::from), + updated_at: None, + meta: None, + }; + panel.update(cx, |panel, cx| { + panel.selected_agent = agent_type; panel.load_agent_thread(session_info, window, cx); - })?; - } else { - log::error!( - "could not restore last active thread: \ - no thread found in database with ID {:?}", - thread_info.session_id - ); + }); } - } + panel + })?; Ok(panel) }) @@ -690,7 +758,7 @@ impl AgentPanel { cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let thread_store = ThreadStore::global(cx); - let acp_history = cx.new(|cx| AcpThreadHistory::new(None, window, cx)); + let acp_history = cx.new(|cx| ThreadHistory::new(None, window, cx)); let text_thread_history = cx.new(|cx| TextThreadHistory::new(text_thread_store.clone(), window, cx)); cx.subscribe_in( @@ -811,8 +879,9 @@ impl AgentPanel { focus_handle: cx.focus_handle(), context_server_registry, previous_view: None, - _active_view_observation: None, + background_threads: HashMap::default(), new_thread_menu_handle: PopoverMenuHandle::default(), + start_thread_in_menu_handle: PopoverMenuHandle::default(), agent_panel_menu_handle: PopoverMenuHandle::default(), agent_navigation_menu_handle: PopoverMenuHandle::default(), agent_navigation_menu: None, @@ -826,9 +895,14 @@ impl AgentPanel { text_thread_history, thread_store, selected_agent: AgentType::default(), + start_thread_in: StartThreadIn::default(), + worktree_creation_status: None, + _thread_view_subscription: None, + _worktree_creation_task: None, show_trust_workspace_message: false, last_configuration_error_telemetry: None, on_boarding_upsell_dismissed: AtomicBool::new(OnboardingUpsell::dismissed()), + _active_view_observation: None, }; // Initial sync of agent servers from extensions @@ -874,7 +948,7 @@ impl AgentPanel { &self.thread_store } - pub fn history(&self) -> &Entity { + pub fn history(&self) -> &Entity { &self.acp_history } @@ -914,7 +988,7 @@ impl AgentPanel { .unwrap_or(false) } - pub(crate) fn active_thread_view(&self) -> Option<&Entity> { + pub(crate) fn active_thread_view(&self) -> Option<&Entity> { match &self.active_view { ActiveView::AgentThread { server_view, .. } => Some(server_view), ActiveView::Uninitialized @@ -924,7 +998,7 @@ impl AgentPanel { } } - fn new_thread(&mut self, _action: &NewThread, window: &mut Window, cx: &mut Context) { + pub fn new_thread(&mut self, _action: &NewThread, window: &mut Window, cx: &mut Context) { self.new_agent_thread(AgentType::NativeAgent, window, cx); } @@ -1057,7 +1131,7 @@ impl AgentPanel { let server = ext_agent.server(fs, thread_store); this.update_in(cx, |agent_panel, window, cx| { - agent_panel._external_thread( + agent_panel.create_external_thread( server, resume_thread, initial_content, @@ -1117,10 +1191,7 @@ impl AgentPanel { match self.selected_agent { AgentType::NativeAgent => Some(HistoryKind::AgentThreads), AgentType::TextThread => Some(HistoryKind::TextThreads), - AgentType::Gemini - | AgentType::ClaudeAgent - | AgentType::Codex - | AgentType::Custom { .. } => { + AgentType::Custom { .. } => { if self.acp_history.read(cx).has_session_list() { Some(HistoryKind::AgentThreads) } else { @@ -1477,7 +1548,7 @@ impl AgentPanel { cx.spawn_in(window, async move |this, cx| { thread_store .update(&mut cx.clone(), |store, cx| { - store.save_thread(session_id.clone(), db_thread, cx) + store.save_thread(session_id.clone(), db_thread, Default::default(), cx) }) .await?; @@ -1560,14 +1631,14 @@ impl AgentPanel { } } - pub fn as_active_server_view(&self) -> Option<&Entity> { + pub fn as_active_server_view(&self) -> Option<&Entity> { match &self.active_view { ActiveView::AgentThread { server_view } => Some(server_view), _ => None, } } - pub fn as_active_thread_view(&self, cx: &App) -> Option> { + pub fn as_active_thread_view(&self, cx: &App) -> Option> { let server_view = self.as_active_server_view()?; server_view.read(cx).active_thread().cloned() } @@ -1582,6 +1653,53 @@ impl AgentPanel { } } + /// Returns the primary thread views for all retained connections: the + pub fn is_background_thread(&self, session_id: &acp::SessionId) -> bool { + self.background_threads.contains_key(session_id) + } + + /// active thread plus any background threads that are still running or + /// completed but unseen. + pub fn parent_threads(&self, cx: &App) -> Vec> { + let mut views = Vec::new(); + + if let Some(server_view) = self.as_active_server_view() { + if let Some(thread_view) = server_view.read(cx).parent_thread(cx) { + views.push(thread_view); + } + } + + for server_view in self.background_threads.values() { + if let Some(thread_view) = server_view.read(cx).parent_thread(cx) { + views.push(thread_view); + } + } + + views + } + + fn retain_running_thread(&mut self, old_view: ActiveView, cx: &mut Context) { + let ActiveView::AgentThread { server_view } = old_view else { + return; + }; + + let Some(thread_view) = server_view.read(cx).parent_thread(cx) else { + return; + }; + + let thread = &thread_view.read(cx).thread; + let (status, session_id) = { + let thread = thread.read(cx); + (thread.status(), thread.session_id().clone()) + }; + + if status != ThreadStatus::Generating { + return; + } + + self.background_threads.insert(session_id, server_view); + } + pub(crate) fn active_native_agent_thread(&self, cx: &App) -> Option> { match &self.active_view { ActiveView::AgentThread { server_view, .. } => { @@ -1620,29 +1738,45 @@ impl AgentPanel { let current_is_config = matches!(self.active_view, ActiveView::Configuration); let new_is_config = matches!(new_view, ActiveView::Configuration); - let current_is_special = current_is_history || current_is_config; - let new_is_special = new_is_history || new_is_config; + let current_is_overlay = current_is_history || current_is_config; + let new_is_overlay = new_is_history || new_is_config; - if current_is_uninitialized || (current_is_special && !new_is_special) { + if current_is_uninitialized || (current_is_overlay && !new_is_overlay) { self.active_view = new_view; - } else if !current_is_special && new_is_special { + } else if !current_is_overlay && new_is_overlay { self.previous_view = Some(std::mem::replace(&mut self.active_view, new_view)); } else { - if !new_is_special { - self.previous_view = None; + let old_view = std::mem::replace(&mut self.active_view, new_view); + if !new_is_overlay { + if let Some(previous) = self.previous_view.take() { + self.retain_running_thread(previous, cx); + } } - self.active_view = new_view; + self.retain_running_thread(old_view, cx); } + // Subscribe to the active ThreadView's events (e.g. FirstSendRequested) + // so the panel can intercept the first send for worktree creation. + // Re-subscribe whenever the ConnectionView changes, since the inner + // ThreadView may have been replaced (e.g. navigating between threads). self._active_view_observation = match &self.active_view { ActiveView::AgentThread { server_view } => { - Some(cx.observe(server_view, |this, _, cx| { - cx.emit(AgentPanelEvent::ActiveViewChanged); - this.serialize(cx); - cx.notify(); - })) + self._thread_view_subscription = + Self::subscribe_to_active_thread_view(server_view, window, cx); + Some( + cx.observe_in(server_view, window, |this, server_view, window, cx| { + this._thread_view_subscription = + Self::subscribe_to_active_thread_view(&server_view, window, cx); + cx.emit(AgentPanelEvent::ActiveViewChanged); + this.serialize(cx); + cx.notify(); + }), + ) + } + _ => { + self._thread_view_subscription = None; + None } - _ => None, }; let is_in_agent_history = matches!( @@ -1756,12 +1890,57 @@ impl AgentPanel { self.selected_agent.clone() } + fn subscribe_to_active_thread_view( + server_view: &Entity, + window: &mut Window, + cx: &mut Context, + ) -> Option { + server_view.read(cx).active_thread().cloned().map(|tv| { + cx.subscribe_in( + &tv, + window, + |this, view, event: &AcpThreadViewEvent, window, cx| match event { + AcpThreadViewEvent::FirstSendRequested { content } => { + this.handle_first_send_requested(view.clone(), content.clone(), window, cx); + } + }, + ) + }) + } + + pub fn start_thread_in(&self) -> &StartThreadIn { + &self.start_thread_in + } + + fn set_start_thread_in(&mut self, action: &StartThreadIn, cx: &mut Context) { + if matches!(action, StartThreadIn::NewWorktree) && !cx.has_flag::() { + return; + } + + let new_target = match *action { + StartThreadIn::LocalProject => StartThreadIn::LocalProject, + StartThreadIn::NewWorktree => { + if !self.project_has_git_repository(cx) { + log::error!( + "set_start_thread_in: cannot use NewWorktree without a git repository" + ); + return; + } + if self.project.read(cx).is_via_collab() { + log::error!("set_start_thread_in: cannot use NewWorktree in a collab project"); + return; + } + StartThreadIn::NewWorktree + } + }; + self.start_thread_in = new_target; + self.serialize(cx); + cx.notify(); + } + fn selected_external_agent(&self) -> Option { match &self.selected_agent { AgentType::NativeAgent => Some(ExternalAgent::NativeAgent), - AgentType::Gemini => Some(ExternalAgent::Gemini), - AgentType::ClaudeAgent => Some(ExternalAgent::ClaudeCode), - AgentType::Codex => Some(ExternalAgent::Codex), AgentType::Custom { name } => Some(ExternalAgent::Custom { name: name.clone() }), AgentType::TextThread => None, } @@ -1827,25 +2006,6 @@ impl AgentPanel { window, cx, ), - AgentType::Gemini => { - self.external_thread(Some(crate::ExternalAgent::Gemini), None, None, window, cx) - } - AgentType::ClaudeAgent => { - self.selected_agent = AgentType::ClaudeAgent; - self.serialize(cx); - self.external_thread( - Some(crate::ExternalAgent::ClaudeCode), - None, - None, - window, - cx, - ) - } - AgentType::Codex => { - self.selected_agent = AgentType::Codex; - self.serialize(cx); - self.external_thread(Some(crate::ExternalAgent::Codex), None, None, window, cx) - } AgentType::Custom { name } => self.external_thread( Some(crate::ExternalAgent::Custom { name }), None, @@ -1862,13 +2022,43 @@ impl AgentPanel { window: &mut Window, cx: &mut Context, ) { + let session_id = thread.session_id.clone(); + if let Some(server_view) = self.background_threads.remove(&session_id) { + self.set_active_view(ActiveView::AgentThread { server_view }, true, window, cx); + return; + } + + if let ActiveView::AgentThread { server_view } = &self.active_view { + if server_view + .read(cx) + .active_thread() + .map(|t| t.read(cx).id.clone()) + == Some(session_id.clone()) + { + return; + } + } + + if let Some(ActiveView::AgentThread { server_view }) = &self.previous_view { + if server_view + .read(cx) + .active_thread() + .map(|t| t.read(cx).id.clone()) + == Some(session_id.clone()) + { + let view = self.previous_view.take().unwrap(); + self.set_active_view(view, true, window, cx); + return; + } + } + let Some(agent) = self.selected_external_agent() else { return; }; self.external_thread(Some(agent), Some(thread), None, window, cx); } - fn _external_thread( + pub(crate) fn create_external_thread( &mut self, server: Rc, resume_thread: Option, @@ -1891,7 +2081,7 @@ impl AgentPanel { .then(|| self.thread_store.clone()); let server_view = cx.new(|cx| { - crate::acp::AcpServerView::new( + crate::ConnectionView::new( server, resume_thread, initial_content, @@ -1905,101 +2095,622 @@ impl AgentPanel { ) }); + cx.observe(&server_view, |this, server_view, cx| { + let is_active = this + .as_active_server_view() + .is_some_and(|active| active.entity_id() == server_view.entity_id()); + if is_active { + cx.emit(AgentPanelEvent::ActiveViewChanged); + this.serialize(cx); + } else { + cx.emit(AgentPanelEvent::BackgroundThreadChanged); + } + cx.notify(); + }) + .detach(); + self.set_active_view(ActiveView::AgentThread { server_view }, true, window, cx); } -} -impl Focusable for AgentPanel { - fn focus_handle(&self, cx: &App) -> FocusHandle { - match &self.active_view { - ActiveView::Uninitialized => self.focus_handle.clone(), - ActiveView::AgentThread { server_view, .. } => server_view.focus_handle(cx), - ActiveView::History { kind } => match kind { - HistoryKind::AgentThreads => self.acp_history.focus_handle(cx), - HistoryKind::TextThreads => self.text_thread_history.focus_handle(cx), - }, - ActiveView::TextThread { - text_thread_editor, .. - } => text_thread_editor.focus_handle(cx), - ActiveView::Configuration => { - if let Some(configuration) = self.configuration.as_ref() { - configuration.focus_handle(cx) - } else { - self.focus_handle.clone() - } - } - } + fn active_thread_has_messages(&self, cx: &App) -> bool { + self.active_agent_thread(cx) + .is_some_and(|thread| !thread.read(cx).entries().is_empty()) } -} -fn agent_panel_dock_position(cx: &App) -> DockPosition { - AgentSettings::get_global(cx).dock.into() -} + fn handle_first_send_requested( + &mut self, + thread_view: Entity, + content: Vec, + window: &mut Window, + cx: &mut Context, + ) { + if self.start_thread_in == StartThreadIn::NewWorktree { + self.handle_worktree_creation_requested(content, window, cx); + } else { + cx.defer_in(window, move |_this, window, cx| { + thread_view.update(cx, |thread_view, cx| { + let editor = thread_view.message_editor.clone(); + thread_view.send_impl(editor, window, cx); + }); + }); + } + } -pub enum AgentPanelEvent { - ActiveViewChanged, -} + /// Partitions the project's visible worktrees into git-backed repositories + /// and plain (non-git) paths. Git repos will have worktrees created for + /// them; non-git paths are carried over to the new workspace as-is. + /// + /// When multiple worktrees map to the same repository, the most specific + /// match wins (deepest work directory path), with a deterministic + /// tie-break on entity id. Each repository appears at most once. + fn classify_worktrees( + &self, + cx: &App, + ) -> (Vec>, Vec) { + let project = &self.project; + let repositories = project.read(cx).repositories(cx).clone(); + let mut git_repos: Vec> = Vec::new(); + let mut non_git_paths: Vec = Vec::new(); + let mut seen_repo_ids = std::collections::HashSet::new(); + + for worktree in project.read(cx).visible_worktrees(cx) { + let wt_path = worktree.read(cx).abs_path(); + + let matching_repo = repositories + .iter() + .filter_map(|(id, repo)| { + let work_dir = repo.read(cx).work_directory_abs_path.clone(); + if wt_path.starts_with(work_dir.as_ref()) + || work_dir.starts_with(wt_path.as_ref()) + { + Some((*id, repo.clone(), work_dir.as_ref().components().count())) + } else { + None + } + }) + .max_by( + |(left_id, _left_repo, left_depth), (right_id, _right_repo, right_depth)| { + left_depth + .cmp(right_depth) + .then_with(|| left_id.cmp(right_id)) + }, + ); -impl EventEmitter for AgentPanel {} -impl EventEmitter for AgentPanel {} + if let Some((id, repo, _)) = matching_repo { + if seen_repo_ids.insert(id) { + git_repos.push(repo); + } + } else { + non_git_paths.push(wt_path.to_path_buf()); + } + } -impl Panel for AgentPanel { - fn persistent_name() -> &'static str { - "AgentPanel" + (git_repos, non_git_paths) } - fn panel_key() -> &'static str { - AGENT_PANEL_KEY - } + /// Kicks off an async git-worktree creation for each repository. Returns: + /// + /// - `creation_infos`: a vec of `(repo, new_path, receiver)` tuples—the + /// receiver resolves once the git worktree command finishes. + /// - `path_remapping`: `(old_work_dir, new_worktree_path)` pairs used + /// later to remap open editor tabs into the new workspace. + fn start_worktree_creations( + git_repos: &[Entity], + branch_name: &str, + worktree_directory_setting: &str, + cx: &mut Context, + ) -> Result<( + Vec<( + Entity, + PathBuf, + futures::channel::oneshot::Receiver>, + )>, + Vec<(PathBuf, PathBuf)>, + )> { + let mut creation_infos = Vec::new(); + let mut path_remapping = Vec::new(); + + for repo in git_repos { + let (work_dir, new_path, receiver) = repo.update(cx, |repo, _cx| { + let original_repo = repo.original_repo_abs_path.clone(); + let directory = + validate_worktree_directory(&original_repo, worktree_directory_setting)?; + let new_path = directory.join(branch_name); + let receiver = repo.create_worktree(branch_name.to_string(), directory, None); + let work_dir = repo.work_directory_abs_path.clone(); + anyhow::Ok((work_dir, new_path, receiver)) + })?; + path_remapping.push((work_dir.to_path_buf(), new_path.clone())); + creation_infos.push((repo.clone(), new_path, receiver)); + } - fn position(&self, _window: &Window, cx: &App) -> DockPosition { - agent_panel_dock_position(cx) + Ok((creation_infos, path_remapping)) } - fn position_is_valid(&self, position: DockPosition) -> bool { - position != DockPosition::Bottom - } + /// Waits for every in-flight worktree creation to complete. If any + /// creation fails, all successfully-created worktrees are rolled back + /// (removed) so the project isn't left in a half-migrated state. + async fn await_and_rollback_on_failure( + creation_infos: Vec<( + Entity, + PathBuf, + futures::channel::oneshot::Receiver>, + )>, + cx: &mut AsyncWindowContext, + ) -> Result> { + let mut created_paths: Vec = Vec::new(); + let mut repos_and_paths: Vec<(Entity, PathBuf)> = + Vec::new(); + let mut first_error: Option = None; + + for (repo, new_path, receiver) in creation_infos { + match receiver.await { + Ok(Ok(())) => { + created_paths.push(new_path.clone()); + repos_and_paths.push((repo, new_path)); + } + Ok(Err(err)) => { + if first_error.is_none() { + first_error = Some(err); + } + } + Err(_canceled) => { + if first_error.is_none() { + first_error = Some(anyhow!("Worktree creation was canceled")); + } + } + } + } - fn set_position(&mut self, position: DockPosition, _: &mut Window, cx: &mut Context) { - settings::update_settings_file(self.fs.clone(), cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .set_dock(position.into()); - }); - } + let Some(err) = first_error else { + return Ok(created_paths); + }; - fn size(&self, window: &Window, cx: &App) -> Pixels { - let settings = AgentSettings::get_global(cx); - match self.position(window, cx) { - DockPosition::Left | DockPosition::Right => { - self.width.unwrap_or(settings.default_width) + // Rollback all successfully created worktrees + let mut rollback_receivers = Vec::new(); + for (rollback_repo, rollback_path) in &repos_and_paths { + if let Ok(receiver) = cx.update(|_, cx| { + rollback_repo.update(cx, |repo, _cx| { + repo.remove_worktree(rollback_path.clone(), true) + }) + }) { + rollback_receivers.push((rollback_path.clone(), receiver)); } - DockPosition::Bottom => self.height.unwrap_or(settings.default_height), } - } - - fn set_size(&mut self, size: Option, window: &mut Window, cx: &mut Context) { - match self.position(window, cx) { - DockPosition::Left | DockPosition::Right => self.width = size, - DockPosition::Bottom => self.height = size, + let mut rollback_failures: Vec = Vec::new(); + for (path, receiver) in rollback_receivers { + match receiver.await { + Ok(Ok(())) => {} + Ok(Err(rollback_err)) => { + log::error!( + "failed to rollback worktree at {}: {rollback_err}", + path.display() + ); + rollback_failures.push(format!("{}: {rollback_err}", path.display())); + } + Err(rollback_err) => { + log::error!( + "failed to rollback worktree at {}: {rollback_err}", + path.display() + ); + rollback_failures.push(format!("{}: {rollback_err}", path.display())); + } + } } - self.serialize(cx); - cx.notify(); + let mut error_message = format!("Failed to create worktree: {err}"); + if !rollback_failures.is_empty() { + error_message.push_str("\n\nFailed to clean up: "); + error_message.push_str(&rollback_failures.join(", ")); + } + Err(anyhow!(error_message)) } - fn set_active(&mut self, active: bool, window: &mut Window, cx: &mut Context) { - if active && matches!(self.active_view, ActiveView::Uninitialized) { + fn set_worktree_creation_error( + &mut self, + message: SharedString, + window: &mut Window, + cx: &mut Context, + ) { + self.worktree_creation_status = Some(WorktreeCreationStatus::Error(message)); + if matches!(self.active_view, ActiveView::Uninitialized) { let selected_agent = self.selected_agent.clone(); self.new_agent_thread(selected_agent, window, cx); } + cx.notify(); } - fn remote_id() -> Option { - Some(proto::PanelId::AssistantPanel) - } - - fn icon(&self, _window: &Window, cx: &App) -> Option { + fn handle_worktree_creation_requested( + &mut self, + content: Vec, + window: &mut Window, + cx: &mut Context, + ) { + if matches!( + self.worktree_creation_status, + Some(WorktreeCreationStatus::Creating) + ) { + return; + } + + self.worktree_creation_status = Some(WorktreeCreationStatus::Creating); + cx.notify(); + + let (git_repos, non_git_paths) = self.classify_worktrees(cx); + + if git_repos.is_empty() { + self.set_worktree_creation_error( + "No git repositories found in the project".into(), + window, + cx, + ); + return; + } + + // Kick off branch listing as early as possible so it can run + // concurrently with the remaining synchronous setup work. + let branch_receivers: Vec<_> = git_repos + .iter() + .map(|repo| repo.update(cx, |repo, _cx| repo.branches())) + .collect(); + + let worktree_directory_setting = ProjectSettings::get_global(cx) + .git + .worktree_directory + .clone(); + + let (dock_structure, open_file_paths) = self + .workspace + .upgrade() + .map(|workspace| { + let dock_structure = workspace.read(cx).capture_dock_state(window, cx); + let open_file_paths = workspace.read(cx).open_item_abs_paths(cx); + (dock_structure, open_file_paths) + }) + .unwrap_or_default(); + + let workspace = self.workspace.clone(); + let window_handle = window + .window_handle() + .downcast::(); + + let task = cx.spawn_in(window, async move |this, cx| { + // Await the branch listings we kicked off earlier. + let mut existing_branches = Vec::new(); + for result in futures::future::join_all(branch_receivers).await { + match result { + Ok(Ok(branches)) => { + for branch in branches { + existing_branches.push(branch.name().to_string()); + } + } + Ok(Err(err)) => { + Err::<(), _>(err).log_err(); + } + Err(_) => {} + } + } + + let existing_branch_refs: Vec<&str> = + existing_branches.iter().map(|s| s.as_str()).collect(); + let mut rng = rand::rng(); + let branch_name = + match crate::branch_names::generate_branch_name(&existing_branch_refs, &mut rng) { + Some(name) => name, + None => { + this.update_in(cx, |this, window, cx| { + this.set_worktree_creation_error( + "Failed to generate a branch name: all typewriter names are taken" + .into(), + window, + cx, + ); + })?; + return anyhow::Ok(()); + } + }; + + let (creation_infos, path_remapping) = match this.update_in(cx, |_this, _window, cx| { + Self::start_worktree_creations( + &git_repos, + &branch_name, + &worktree_directory_setting, + cx, + ) + }) { + Ok(Ok(result)) => result, + Ok(Err(err)) | Err(err) => { + this.update_in(cx, |this, window, cx| { + this.set_worktree_creation_error( + format!("Failed to validate worktree directory: {err}").into(), + window, + cx, + ); + }) + .log_err(); + return anyhow::Ok(()); + } + }; + + let created_paths = match Self::await_and_rollback_on_failure(creation_infos, cx).await + { + Ok(paths) => paths, + Err(err) => { + this.update_in(cx, |this, window, cx| { + this.set_worktree_creation_error(format!("{err}").into(), window, cx); + })?; + return anyhow::Ok(()); + } + }; + + let mut all_paths = created_paths; + let has_non_git = !non_git_paths.is_empty(); + all_paths.extend(non_git_paths.iter().cloned()); + + let app_state = match workspace.upgrade() { + Some(workspace) => cx.update(|_, cx| workspace.read(cx).app_state().clone())?, + None => { + this.update_in(cx, |this, window, cx| { + this.set_worktree_creation_error( + "Workspace no longer available".into(), + window, + cx, + ); + })?; + return anyhow::Ok(()); + } + }; + + let this_for_error = this.clone(); + if let Err(err) = Self::setup_new_workspace( + this, + all_paths, + app_state, + window_handle, + dock_structure, + open_file_paths, + path_remapping, + non_git_paths, + has_non_git, + content, + cx, + ) + .await + { + this_for_error + .update_in(cx, |this, window, cx| { + this.set_worktree_creation_error( + format!("Failed to set up workspace: {err}").into(), + window, + cx, + ); + }) + .log_err(); + } + anyhow::Ok(()) + }); + + self._worktree_creation_task = Some(cx.foreground_executor().spawn(async move { + task.await.log_err(); + })); + } + + async fn setup_new_workspace( + this: WeakEntity, + all_paths: Vec, + app_state: Arc, + window_handle: Option>, + dock_structure: workspace::DockStructure, + open_file_paths: Vec, + path_remapping: Vec<(PathBuf, PathBuf)>, + non_git_paths: Vec, + has_non_git: bool, + content: Vec, + cx: &mut AsyncWindowContext, + ) -> Result<()> { + let init: Option< + Box) + Send>, + > = Some(Box::new(move |workspace, window, cx| { + workspace.set_dock_structure(dock_structure, window, cx); + })); + + let (new_window_handle, _) = cx + .update(|_window, cx| { + Workspace::new_local(all_paths, app_state, window_handle, None, init, false, cx) + })? + .await?; + + let new_workspace = new_window_handle.update(cx, |multi_workspace, _window, _cx| { + let workspaces = multi_workspace.workspaces(); + workspaces.last().cloned() + })?; + + let Some(new_workspace) = new_workspace else { + anyhow::bail!("New workspace was not added to MultiWorkspace"); + }; + + let panels_task = new_window_handle.update(cx, |_, _, cx| { + new_workspace.update(cx, |workspace, _cx| workspace.take_panels_task()) + })?; + if let Some(task) = panels_task { + task.await.log_err(); + } + + let initial_content = AgentInitialContent::ContentBlock { + blocks: content, + auto_submit: true, + }; + + new_window_handle.update(cx, |_multi_workspace, window, cx| { + new_workspace.update(cx, |workspace, cx| { + if has_non_git { + let toast_id = workspace::notifications::NotificationId::unique::(); + workspace.show_toast( + workspace::Toast::new( + toast_id, + "Some project folders are not git repositories. \ + They were included as-is without creating a worktree.", + ), + cx, + ); + } + + let remapped_paths: Vec = open_file_paths + .iter() + .filter_map(|original_path| { + let best_match = path_remapping + .iter() + .filter_map(|(old_root, new_root)| { + original_path.strip_prefix(old_root).ok().map(|relative| { + (old_root.components().count(), new_root.join(relative)) + }) + }) + .max_by_key(|(depth, _)| *depth); + + if let Some((_, remapped_path)) = best_match { + return Some(remapped_path); + } + + for non_git in &non_git_paths { + if original_path.starts_with(non_git) { + return Some(original_path.clone()); + } + } + None + }) + .collect(); + + if !remapped_paths.is_empty() { + workspace + .open_paths( + remapped_paths, + workspace::OpenOptions::default(), + None, + window, + cx, + ) + .detach(); + } + + workspace.focus_panel::(window, cx); + if let Some(panel) = workspace.panel::(cx) { + panel.update(cx, |panel, cx| { + panel.external_thread(None, None, Some(initial_content), window, cx); + }); + } + }); + })?; + + new_window_handle.update(cx, |multi_workspace, _window, cx| { + multi_workspace.activate(new_workspace.clone(), cx); + })?; + + this.update_in(cx, |this, _window, cx| { + this.worktree_creation_status = None; + cx.notify(); + })?; + + anyhow::Ok(()) + } +} + +impl Focusable for AgentPanel { + fn focus_handle(&self, cx: &App) -> FocusHandle { + match &self.active_view { + ActiveView::Uninitialized => self.focus_handle.clone(), + ActiveView::AgentThread { server_view, .. } => server_view.focus_handle(cx), + ActiveView::History { kind } => match kind { + HistoryKind::AgentThreads => self.acp_history.focus_handle(cx), + HistoryKind::TextThreads => self.text_thread_history.focus_handle(cx), + }, + ActiveView::TextThread { + text_thread_editor, .. + } => text_thread_editor.focus_handle(cx), + ActiveView::Configuration => { + if let Some(configuration) = self.configuration.as_ref() { + configuration.focus_handle(cx) + } else { + self.focus_handle.clone() + } + } + } + } +} + +fn agent_panel_dock_position(cx: &App) -> DockPosition { + AgentSettings::get_global(cx).dock.into() +} + +pub enum AgentPanelEvent { + ActiveViewChanged, + BackgroundThreadChanged, +} + +impl EventEmitter for AgentPanel {} +impl EventEmitter for AgentPanel {} + +impl Panel for AgentPanel { + fn persistent_name() -> &'static str { + "AgentPanel" + } + + fn panel_key() -> &'static str { + AGENT_PANEL_KEY + } + + fn position(&self, _window: &Window, cx: &App) -> DockPosition { + agent_panel_dock_position(cx) + } + + fn position_is_valid(&self, position: DockPosition) -> bool { + position != DockPosition::Bottom + } + + fn set_position(&mut self, position: DockPosition, _: &mut Window, cx: &mut Context) { + settings::update_settings_file(self.fs.clone(), cx, move |settings, _| { + settings + .agent + .get_or_insert_default() + .set_dock(position.into()); + }); + } + + fn size(&self, window: &Window, cx: &App) -> Pixels { + let settings = AgentSettings::get_global(cx); + match self.position(window, cx) { + DockPosition::Left | DockPosition::Right => { + self.width.unwrap_or(settings.default_width) + } + DockPosition::Bottom => self.height.unwrap_or(settings.default_height), + } + } + + fn set_size(&mut self, size: Option, window: &mut Window, cx: &mut Context) { + match self.position(window, cx) { + DockPosition::Left | DockPosition::Right => self.width = size, + DockPosition::Bottom => self.height = size, + } + self.serialize(cx); + cx.notify(); + } + + fn set_active(&mut self, active: bool, window: &mut Window, cx: &mut Context) { + if active + && matches!(self.active_view, ActiveView::Uninitialized) + && !matches!( + self.worktree_creation_status, + Some(WorktreeCreationStatus::Creating) + ) + { + let selected_agent = self.selected_agent.clone(); + self.new_agent_thread(selected_agent, window, cx); + } + } + + fn remote_id() -> Option { + Some(proto::PanelId::AssistantPanel) + } + + fn icon(&self, _window: &Window, cx: &App) -> Option { (self.enabled(cx) && AgentSettings::get_global(cx).button).then_some(IconName::ZedAssistant) } @@ -2164,7 +2875,7 @@ impl AgentPanel { .into_any() } - fn handle_regenerate_thread_title(thread_view: Entity, cx: &mut App) { + fn handle_regenerate_thread_title(thread_view: Entity, cx: &mut App) { thread_view.update(cx, |thread_view, cx| { if let Some(thread) = thread_view.as_native_thread(cx) { thread.update(cx, |thread, cx| { @@ -2196,8 +2907,6 @@ impl AgentPanel { "Enable Full Screen" }; - let selected_agent = self.selected_agent.clone(); - let text_thread_view = match &self.active_view { ActiveView::TextThread { text_thread_editor, .. @@ -2226,6 +2935,10 @@ impl AgentPanel { } _ => false, }; + let has_auth_methods = match &self.active_view { + ActiveView::AgentThread { server_view } => server_view.read(cx).has_auth_methods(), + _ => false, + }; PopoverMenu::new("agent-options-menu") .trigger_with_tooltip( @@ -2301,7 +3014,7 @@ impl AgentPanel { .separator() .action(full_screen_label, Box::new(ToggleZoom)); - if selected_agent == AgentType::Gemini { + if has_auth_methods { menu = menu.action("Reauthenticate", Box::new(ReauthenticateAgent)) } @@ -2367,6 +3080,99 @@ impl AgentPanel { }) } + fn project_has_git_repository(&self, cx: &App) -> bool { + !self.project.read(cx).repositories(cx).is_empty() + } + + fn render_start_thread_in_selector(&self, cx: &mut Context) -> impl IntoElement { + let has_git_repo = self.project_has_git_repository(cx); + let is_via_collab = self.project.read(cx).is_via_collab(); + + let is_creating = matches!( + self.worktree_creation_status, + Some(WorktreeCreationStatus::Creating) + ); + + let current_target = self.start_thread_in; + let trigger_label = self.start_thread_in.label(); + + let icon = if self.start_thread_in_menu_handle.is_deployed() { + IconName::ChevronUp + } else { + IconName::ChevronDown + }; + + let trigger_button = Button::new("thread-target-trigger", trigger_label) + .label_size(LabelSize::Small) + .color(Color::Muted) + .icon(icon) + .icon_size(IconSize::XSmall) + .icon_position(IconPosition::End) + .icon_color(Color::Muted) + .disabled(is_creating); + + let dock_position = AgentSettings::get_global(cx).dock; + let documentation_side = match dock_position { + settings::DockPosition::Left => DocumentationSide::Right, + settings::DockPosition::Bottom | settings::DockPosition::Right => { + DocumentationSide::Left + } + }; + + PopoverMenu::new("thread-target-selector") + .trigger(trigger_button) + .anchor(gpui::Corner::BottomRight) + .with_handle(self.start_thread_in_menu_handle.clone()) + .menu(move |window, cx| { + let current_target = current_target; + Some(ContextMenu::build(window, cx, move |menu, _window, _cx| { + let is_local_selected = current_target == StartThreadIn::LocalProject; + let is_new_worktree_selected = current_target == StartThreadIn::NewWorktree; + + let new_worktree_disabled = !has_git_repo || is_via_collab; + + menu.header("Start Thread In…") + .item( + ContextMenuEntry::new("Local Project") + .icon(StartThreadIn::LocalProject.icon()) + .icon_color(Color::Muted) + .toggleable(IconPosition::End, is_local_selected) + .handler(|window, cx| { + window + .dispatch_action(Box::new(StartThreadIn::LocalProject), cx); + }), + ) + .item({ + let entry = ContextMenuEntry::new("New Worktree") + .icon(StartThreadIn::NewWorktree.icon()) + .icon_color(Color::Muted) + .toggleable(IconPosition::End, is_new_worktree_selected) + .disabled(new_worktree_disabled) + .handler(|window, cx| { + window + .dispatch_action(Box::new(StartThreadIn::NewWorktree), cx); + }); + + if new_worktree_disabled { + entry.documentation_aside(documentation_side, move |_| { + let reason = if !has_git_repo { + "No git repository found in this project." + } else { + "Not available for remote/collab projects yet." + }; + Label::new(reason) + .color(Color::Muted) + .size(LabelSize::Small) + .into_any_element() + }) + } else { + entry + } + }) + })) + }) + } + fn render_toolbar(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let agent_server_store = self.project.read(cx).agent_server_store().clone(); let focus_handle = self.focus_handle(cx); @@ -2510,140 +3316,73 @@ impl AgentPanel { ) .separator() .header("External Agents") - .item( - ContextMenuEntry::new("Claude Agent") - .when(is_agent_selected(AgentType::ClaudeAgent), |this| { - this.action(Box::new(NewExternalAgentThread { - agent: None, - })) + .map(|mut menu| { + let agent_server_store = agent_server_store.read(cx); + let registry_store = + project::AgentRegistryStore::try_global(cx); + let registry_store_ref = + registry_store.as_ref().map(|s| s.read(cx)); + + struct AgentMenuItem { + id: ExternalAgentServerName, + display_name: SharedString, + } + + let agent_items = agent_server_store + .external_agents() + .map(|name| { + let display_name = agent_server_store + .agent_display_name(name) + .or_else(|| { + registry_store_ref + .as_ref() + .and_then(|store| store.agent(name.0.as_ref())) + .map(|a| a.name().clone()) + }) + .unwrap_or_else(|| name.0.clone()); + AgentMenuItem { + id: name.clone(), + display_name, + } }) - .icon(IconName::AiClaude) - .disabled(is_via_collab) - .icon_color(Color::Muted) - .handler({ - let workspace = workspace.clone(); - move |window, cx| { - if let Some(workspace) = workspace.upgrade() { - workspace.update(cx, |workspace, cx| { - if let Some(panel) = - workspace.panel::(cx) - { - panel.update(cx, |panel, cx| { - panel.new_agent_thread( - AgentType::ClaudeAgent, - window, - cx, - ); - }); - } - }); - } - } - }), - ) - .item( - ContextMenuEntry::new("Codex CLI") - .when(is_agent_selected(AgentType::Codex), |this| { - this.action(Box::new(NewExternalAgentThread { - agent: None, - })) - }) - .icon(IconName::AiOpenAi) - .disabled(is_via_collab) - .icon_color(Color::Muted) - .handler({ - let workspace = workspace.clone(); - move |window, cx| { - if let Some(workspace) = workspace.upgrade() { - workspace.update(cx, |workspace, cx| { - if let Some(panel) = - workspace.panel::(cx) - { - panel.update(cx, |panel, cx| { - panel.new_agent_thread( - AgentType::Codex, - window, - cx, - ); - }); - } - }); - } - } - }), - ) - .item( - ContextMenuEntry::new("Gemini CLI") - .when(is_agent_selected(AgentType::Gemini), |this| { - this.action(Box::new(NewExternalAgentThread { - agent: None, - })) - }) - .icon(IconName::AiGemini) - .icon_color(Color::Muted) - .disabled(is_via_collab) - .handler({ - let workspace = workspace.clone(); - move |window, cx| { - if let Some(workspace) = workspace.upgrade() { - workspace.update(cx, |workspace, cx| { - if let Some(panel) = - workspace.panel::(cx) - { - panel.update(cx, |panel, cx| { - panel.new_agent_thread( - AgentType::Gemini, - window, - cx, - ); - }); - } - }); - } - } - }), - ) - .map(|mut menu| { - let agent_server_store = agent_server_store.read(cx); - let agent_names = agent_server_store - .external_agents() - .filter(|name| { - name.0 != GEMINI_NAME - && name.0 != CLAUDE_AGENT_NAME - && name.0 != CODEX_NAME - }) - .cloned() + .sorted_unstable_by_key(|e| e.display_name.to_lowercase()) .collect::>(); - for agent_name in agent_names { - let icon_path = agent_server_store.agent_icon(&agent_name); - let display_name = agent_server_store - .agent_display_name(&agent_name) - .unwrap_or_else(|| agent_name.0.clone()); - - let mut entry = ContextMenuEntry::new(display_name); + for item in &agent_items { + let mut entry = + ContextMenuEntry::new(item.display_name.clone()); + + let icon_path = agent_server_store + .agent_icon(&item.id) + .or_else(|| { + registry_store_ref + .as_ref() + .and_then(|store| store.agent(item.id.0.as_str())) + .and_then(|a| a.icon_path().cloned()) + }); if let Some(icon_path) = icon_path { entry = entry.custom_icon_svg(icon_path); } else { entry = entry.icon(IconName::Sparkle); } + entry = entry .when( is_agent_selected(AgentType::Custom { - name: agent_name.0.clone(), + name: item.id.0.clone(), }), |this| { - this.action(Box::new(NewExternalAgentThread { - agent: None, - })) + this.action(Box::new( + NewExternalAgentThread { agent: None }, + )) }, ) .icon_color(Color::Muted) .disabled(is_via_collab) .handler({ let workspace = workspace.clone(); - let agent_name = agent_name.clone(); + let agent_id = item.id.clone(); move |window, cx| { if let Some(workspace) = workspace.upgrade() { workspace.update(cx, |workspace, cx| { @@ -2653,9 +3392,7 @@ impl AgentPanel { panel.update(cx, |panel, cx| { panel.new_agent_thread( AgentType::Custom { - name: agent_name - .clone() - .into(), + name: agent_id.0.clone(), }, window, cx, @@ -2673,6 +3410,101 @@ impl AgentPanel { menu }) .separator() + .map(|mut menu| { + let agent_server_store = agent_server_store.read(cx); + let registry_store = + project::AgentRegistryStore::try_global(cx); + let registry_store_ref = + registry_store.as_ref().map(|s| s.read(cx)); + + let previous_built_in_ids: &[ExternalAgentServerName] = + &[CLAUDE_AGENT_NAME.into(), CODEX_NAME.into(), GEMINI_NAME.into()]; + + let promoted_items = previous_built_in_ids + .iter() + .filter(|id| { + !agent_server_store.external_agents.contains_key(*id) + }) + .filter_map(|name| { + let display_name = registry_store_ref + .as_ref() + .and_then(|store| store.agent(name.0.as_ref())) + .map(|a| a.name().clone())?; + Some((name.clone(), display_name)) + }) + .sorted_unstable_by_key(|(_, display_name)| display_name.to_lowercase()) + .collect::>(); + + for (agent_id, display_name) in &promoted_items { + let mut entry = + ContextMenuEntry::new(display_name.clone()); + + let icon_path = registry_store_ref + .as_ref() + .and_then(|store| store.agent(agent_id.0.as_str())) + .and_then(|a| a.icon_path().cloned()); + + if let Some(icon_path) = icon_path { + entry = entry.custom_icon_svg(icon_path); + } else { + entry = entry.icon(IconName::Sparkle); + } + + entry = entry + .icon_color(Color::Muted) + .disabled(is_via_collab) + .handler({ + let workspace = workspace.clone(); + let agent_id = agent_id.clone(); + move |window, cx| { + let fs = ::global(cx); + let agent_id_string = + agent_id.to_string(); + settings::update_settings_file( + fs, + cx, + move |settings, _| { + let agent_servers = settings + .agent_servers + .get_or_insert_default(); + agent_servers.entry(agent_id_string).or_insert_with(|| { + settings::CustomAgentServerSettings::Registry { + default_mode: None, + default_model: None, + env: Default::default(), + favorite_models: Vec::new(), + default_config_options: Default::default(), + favorite_config_option_values: Default::default(), + } + }); + }, + ); + + if let Some(workspace) = workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + if let Some(panel) = + workspace.panel::(cx) + { + panel.update(cx, |panel, cx| { + panel.new_agent_thread( + AgentType::Custom { + name: agent_id.0.clone(), + }, + window, + cx, + ); + }); + } + }); + } + } + }); + + menu = menu.item(entry); + } + + menu + }) .item( ContextMenuEntry::new("Add More Agents") .icon(IconName::Plus) @@ -2727,6 +3559,7 @@ impl AgentPanel { }; let show_history_menu = self.history_kind_for_selected_agent(cx).is_some(); + let has_v2_flag = cx.has_flag::(); h_flex() .id("agent-panel-toolbar") @@ -2757,6 +3590,10 @@ impl AgentPanel { .gap(DynamicSpacing::Base02.rems(cx)) .pl(DynamicSpacing::Base04.rems(cx)) .pr(DynamicSpacing::Base06.rems(cx)) + .when( + has_v2_flag && !self.active_thread_has_messages(cx), + |this| this.child(self.render_start_thread_in_selector(cx)), + ) .child(new_thread_menu) .when(show_history_menu, |this| { this.child(self.render_recent_entries_menu( @@ -2769,6 +3606,51 @@ impl AgentPanel { ) } + fn render_worktree_creation_status(&self, cx: &mut Context) -> Option { + let status = self.worktree_creation_status.as_ref()?; + match status { + WorktreeCreationStatus::Creating => Some( + h_flex() + .w_full() + .px(DynamicSpacing::Base06.rems(cx)) + .py(DynamicSpacing::Base02.rems(cx)) + .gap_2() + .bg(cx.theme().colors().surface_background) + .border_b_1() + .border_color(cx.theme().colors().border) + .child(SpinnerLabel::new().size(LabelSize::Small)) + .child( + Label::new("Creating worktree…") + .color(Color::Muted) + .size(LabelSize::Small), + ) + .into_any_element(), + ), + WorktreeCreationStatus::Error(message) => Some( + h_flex() + .w_full() + .px(DynamicSpacing::Base06.rems(cx)) + .py(DynamicSpacing::Base02.rems(cx)) + .gap_2() + .bg(cx.theme().colors().surface_background) + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + Icon::new(IconName::Warning) + .size(IconSize::Small) + .color(Color::Warning), + ) + .child( + Label::new(message.clone()) + .color(Color::Warning) + .size(LabelSize::Small) + .truncate(), + ) + .into_any_element(), + ), + } + } + fn should_render_trial_end_upsell(&self, cx: &mut Context) -> bool { if TrialEndUpsell::dismissed() { return false; @@ -3200,6 +4082,7 @@ impl Render for AgentPanel { } })) .child(self.render_toolbar(window, cx)) + .children(self.render_worktree_creation_status(cx)) .children(self.render_workspace_trust_message(cx)) .children(self.render_onboarding(window, cx)) .map(|parent| { @@ -3447,6 +4330,15 @@ impl Dismissable for TrialEndUpsell { /// Test-only helper methods #[cfg(any(test, feature = "test-support"))] impl AgentPanel { + pub fn test_new( + workspace: &Workspace, + text_thread_store: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Self { + Self::new(workspace, text_thread_store, None, window, cx) + } + /// Opens an external thread using an arbitrary AgentServer. /// /// This is a test-only helper that allows visual tests and integration tests @@ -3465,7 +4357,7 @@ impl AgentPanel { name: server.name(), }; - self._external_thread( + self.create_external_thread( server, None, None, workspace, project, ext_agent, window, cx, ); } @@ -3474,20 +4366,78 @@ impl AgentPanel { /// /// This is a test-only accessor that exposes the private `active_thread_view()` /// method for test assertions. Not compiled into production builds. - pub fn active_thread_view_for_tests(&self) -> Option<&Entity> { + pub fn active_thread_view_for_tests(&self) -> Option<&Entity> { self.active_thread_view() } + + /// Sets the start_thread_in value directly, bypassing validation. + /// + /// This is a test-only helper for visual tests that need to show specific + /// start_thread_in states without requiring a real git repository. + pub fn set_start_thread_in_for_tests(&mut self, target: StartThreadIn, cx: &mut Context) { + self.start_thread_in = target; + cx.notify(); + } + + /// Returns the current worktree creation status. + /// + /// This is a test-only helper for visual tests. + pub fn worktree_creation_status_for_tests(&self) -> Option<&WorktreeCreationStatus> { + self.worktree_creation_status.as_ref() + } + + /// Sets the worktree creation status directly. + /// + /// This is a test-only helper for visual tests that need to show the + /// "Creating worktree…" spinner or error banners. + pub fn set_worktree_creation_status_for_tests( + &mut self, + status: Option, + cx: &mut Context, + ) { + self.worktree_creation_status = status; + cx.notify(); + } + + /// Opens the history view. + /// + /// This is a test-only helper that exposes the private `open_history()` + /// method for visual tests. + pub fn open_history_for_tests(&mut self, window: &mut Window, cx: &mut Context) { + self.open_history(window, cx); + } + + /// Opens the start_thread_in selector popover menu. + /// + /// This is a test-only helper for visual tests. + pub fn open_start_thread_in_menu_for_tests( + &mut self, + window: &mut Window, + cx: &mut Context, + ) { + self.start_thread_in_menu_handle.show(window, cx); + } + + /// Dismisses the start_thread_in dropdown menu. + /// + /// This is a test-only helper for visual tests. + pub fn close_start_thread_in_menu_for_tests(&mut self, cx: &mut Context) { + self.start_thread_in_menu_handle.hide(cx); + } } #[cfg(test)] mod tests { use super::*; - use crate::acp::thread_view::tests::{StubAgentServer, init_test}; + use crate::connection_view::tests::{StubAgentServer, init_test}; + use crate::test_support::{active_session_id, open_thread_with_connection, send_message}; + use acp_thread::{StubAgentConnection, ThreadStatus}; use assistant_text_thread::TextThreadStore; use feature_flags::FeatureFlagAppExt; use fs::FakeFs; use gpui::{TestAppContext, VisualTestContext}; use project::Project; + use serde_json::json; use workspace::MultiWorkspace; #[gpui::test] @@ -3565,7 +4515,9 @@ mod tests { panel_b.update(cx, |panel, _cx| { panel.width = Some(px(400.0)); - panel.selected_agent = AgentType::ClaudeAgent; + panel.selected_agent = AgentType::Custom { + name: "claude-acp".into(), + }; }); // --- Serialize both panels --- @@ -3588,9 +4540,7 @@ mod tests { .expect("panel B load should succeed"); cx.run_until_parked(); - // Workspace A should restore width and agent type, but the thread - // should NOT be restored because the stub agent never persisted it - // to the database (the load-side validation skips missing threads). + // Workspace A should restore its thread, width, and agent type loaded_a.read_with(cx, |panel, _cx| { assert_eq!( panel.width, @@ -3601,6 +4551,10 @@ mod tests { panel.selected_agent, agent_type_a, "workspace A agent type should be restored" ); + assert!( + panel.active_thread_view().is_some(), + "workspace A should have its active thread restored" + ); }); // Workspace B should restore its own width and agent type, with no thread @@ -3612,7 +4566,9 @@ mod tests { ); assert_eq!( panel.selected_agent, - AgentType::ClaudeAgent, + AgentType::Custom { + name: "claude-acp".into() + }, "workspace B agent type should be restored" ); assert!( @@ -3668,4 +4624,462 @@ mod tests { cx.run_until_parked(); } + + async fn setup_panel(cx: &mut TestAppContext) -> (Entity, VisualTestContext) { + init_test(cx); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + }); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [], cx).await; + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace + .read_with(cx, |mw, _cx| mw.workspace().clone()) + .unwrap(); + + let mut cx = VisualTestContext::from_window(multi_workspace.into(), cx); + + let panel = workspace.update_in(&mut cx, |workspace, window, cx| { + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx)) + }); + + (panel, cx) + } + + #[gpui::test] + async fn test_running_thread_retained_when_navigating_away(cx: &mut TestAppContext) { + let (panel, mut cx) = setup_panel(cx).await; + + let connection_a = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_a.clone(), &mut cx); + send_message(&panel, &mut cx); + + let session_id_a = active_session_id(&panel, &cx); + + // Send a chunk to keep thread A generating (don't end the turn). + cx.update(|_, cx| { + connection_a.send_update( + session_id_a.clone(), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("chunk".into())), + cx, + ); + }); + cx.run_until_parked(); + + // Verify thread A is generating. + panel.read_with(&cx, |panel, cx| { + let thread = panel.active_agent_thread(cx).unwrap(); + assert_eq!(thread.read(cx).status(), ThreadStatus::Generating); + assert!(panel.background_threads.is_empty()); + }); + + // Open a new thread B — thread A should be retained in background. + let connection_b = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_b, &mut cx); + + panel.read_with(&cx, |panel, _cx| { + assert_eq!( + panel.background_threads.len(), + 1, + "Running thread A should be retained in background_views" + ); + assert!( + panel.background_threads.contains_key(&session_id_a), + "Background view should be keyed by thread A's session ID" + ); + }); + } + + #[gpui::test] + async fn test_idle_thread_dropped_when_navigating_away(cx: &mut TestAppContext) { + let (panel, mut cx) = setup_panel(cx).await; + + let connection_a = StubAgentConnection::new(); + connection_a.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Response".into()), + )]); + open_thread_with_connection(&panel, connection_a, &mut cx); + send_message(&panel, &mut cx); + + let weak_view_a = panel.read_with(&cx, |panel, _cx| { + panel.active_thread_view().unwrap().downgrade() + }); + + // Thread A should be idle (auto-completed via set_next_prompt_updates). + panel.read_with(&cx, |panel, cx| { + let thread = panel.active_agent_thread(cx).unwrap(); + assert_eq!(thread.read(cx).status(), ThreadStatus::Idle); + }); + + // Open a new thread B — thread A should NOT be retained. + let connection_b = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_b, &mut cx); + + panel.read_with(&cx, |panel, _cx| { + assert!( + panel.background_threads.is_empty(), + "Idle thread A should not be retained in background_views" + ); + }); + + // Verify the old ConnectionView entity was dropped (no strong references remain). + assert!( + weak_view_a.upgrade().is_none(), + "Idle ConnectionView should have been dropped" + ); + } + + #[gpui::test] + async fn test_background_thread_promoted_via_load(cx: &mut TestAppContext) { + let (panel, mut cx) = setup_panel(cx).await; + + let connection_a = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_a.clone(), &mut cx); + send_message(&panel, &mut cx); + + let session_id_a = active_session_id(&panel, &cx); + + // Keep thread A generating. + cx.update(|_, cx| { + connection_a.send_update( + session_id_a.clone(), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("chunk".into())), + cx, + ); + }); + cx.run_until_parked(); + + // Open thread B — thread A goes to background. + let connection_b = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_b, &mut cx); + + let session_id_b = active_session_id(&panel, &cx); + + panel.read_with(&cx, |panel, _cx| { + assert_eq!(panel.background_threads.len(), 1); + assert!(panel.background_threads.contains_key(&session_id_a)); + }); + + // Load thread A back via load_agent_thread — should promote from background. + panel.update_in(&mut cx, |panel, window, cx| { + panel.load_agent_thread( + AgentSessionInfo { + session_id: session_id_a.clone(), + cwd: None, + title: None, + updated_at: None, + meta: None, + }, + window, + cx, + ); + }); + + // Thread A should now be the active view, promoted from background. + let active_session = active_session_id(&panel, &cx); + assert_eq!( + active_session, session_id_a, + "Thread A should be the active thread after promotion" + ); + + panel.read_with(&cx, |panel, _cx| { + assert!( + !panel.background_threads.contains_key(&session_id_a), + "Promoted thread A should no longer be in background_views" + ); + assert!( + !panel.background_threads.contains_key(&session_id_b), + "Thread B (idle) should not have been retained in background_views" + ); + }); + } + + #[gpui::test] + async fn test_thread_target_local_project(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + json!({ + ".git": {}, + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + fs.set_branch_name(Path::new("/project/.git"), Some("main")); + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace + .read_with(cx, |multi_workspace, _cx| { + multi_workspace.workspace().clone() + }) + .unwrap(); + + workspace.update(cx, |workspace, _cx| { + workspace.set_random_database_id(); + }); + + let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx); + + // Wait for the project to discover the git repository. + cx.run_until_parked(); + + let panel = workspace.update_in(cx, |workspace, window, cx| { + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let panel = + cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx)); + workspace.add_panel(panel.clone(), window, cx); + panel + }); + + cx.run_until_parked(); + + // Default thread target should be LocalProject. + panel.read_with(cx, |panel, _cx| { + assert_eq!( + *panel.start_thread_in(), + StartThreadIn::LocalProject, + "default thread target should be LocalProject" + ); + }); + + // Start a new thread with the default LocalProject target. + // Use StubAgentServer so the thread connects immediately in tests. + panel.update_in(cx, |panel, window, cx| { + panel.open_external_thread_with_server( + Rc::new(StubAgentServer::default_response()), + window, + cx, + ); + }); + + cx.run_until_parked(); + + // MultiWorkspace should still have exactly one workspace (no worktree created). + multi_workspace + .read_with(cx, |multi_workspace, _cx| { + assert_eq!( + multi_workspace.workspaces().len(), + 1, + "LocalProject should not create a new workspace" + ); + }) + .unwrap(); + + // The thread should be active in the panel. + panel.read_with(cx, |panel, cx| { + assert!( + panel.active_agent_thread(cx).is_some(), + "a thread should be running in the current workspace" + ); + }); + + // The thread target should still be LocalProject (unchanged). + panel.read_with(cx, |panel, _cx| { + assert_eq!( + *panel.start_thread_in(), + StartThreadIn::LocalProject, + "thread target should remain LocalProject" + ); + }); + + // No worktree creation status should be set. + panel.read_with(cx, |panel, _cx| { + assert!( + panel.worktree_creation_status.is_none(), + "no worktree creation should have occurred" + ); + }); + } + + #[gpui::test] + async fn test_thread_target_serialization_round_trip(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + json!({ + ".git": {}, + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + fs.set_branch_name(Path::new("/project/.git"), Some("main")); + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace + .read_with(cx, |multi_workspace, _cx| { + multi_workspace.workspace().clone() + }) + .unwrap(); + + workspace.update(cx, |workspace, _cx| { + workspace.set_random_database_id(); + }); + + let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx); + + // Wait for the project to discover the git repository. + cx.run_until_parked(); + + let panel = workspace.update_in(cx, |workspace, window, cx| { + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let panel = + cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx)); + workspace.add_panel(panel.clone(), window, cx); + panel + }); + + cx.run_until_parked(); + + // Default should be LocalProject. + panel.read_with(cx, |panel, _cx| { + assert_eq!(*panel.start_thread_in(), StartThreadIn::LocalProject); + }); + + // Change thread target to NewWorktree. + panel.update(cx, |panel, cx| { + panel.set_start_thread_in(&StartThreadIn::NewWorktree, cx); + }); + + panel.read_with(cx, |panel, _cx| { + assert_eq!( + *panel.start_thread_in(), + StartThreadIn::NewWorktree, + "thread target should be NewWorktree after set_thread_target" + ); + }); + + // Let serialization complete. + cx.run_until_parked(); + + // Load a fresh panel from the serialized data. + let prompt_builder = Arc::new(prompt_store::PromptBuilder::new(None).unwrap()); + let async_cx = cx.update(|window, cx| window.to_async(cx)); + let loaded_panel = + AgentPanel::load(workspace.downgrade(), prompt_builder.clone(), async_cx) + .await + .expect("panel load should succeed"); + cx.run_until_parked(); + + loaded_panel.read_with(cx, |panel, _cx| { + assert_eq!( + *panel.start_thread_in(), + StartThreadIn::NewWorktree, + "thread target should survive serialization round-trip" + ); + }); + } + + #[gpui::test] + async fn test_set_active_blocked_during_worktree_creation(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + ::set_global(fs.clone(), cx); + }); + + fs.insert_tree( + "/project", + json!({ + ".git": {}, + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace + .read_with(cx, |multi_workspace, _cx| { + multi_workspace.workspace().clone() + }) + .unwrap(); + + let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx); + + let panel = workspace.update_in(cx, |workspace, window, cx| { + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let panel = + cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx)); + workspace.add_panel(panel.clone(), window, cx); + panel + }); + + cx.run_until_parked(); + + // Simulate worktree creation in progress and reset to Uninitialized + panel.update_in(cx, |panel, window, cx| { + panel.worktree_creation_status = Some(WorktreeCreationStatus::Creating); + panel.active_view = ActiveView::Uninitialized; + Panel::set_active(panel, true, window, cx); + assert!( + matches!(panel.active_view, ActiveView::Uninitialized), + "set_active should not create a thread while worktree is being created" + ); + }); + + // Clear the creation status and use open_external_thread_with_server + // (which bypasses new_agent_thread) to verify the panel can transition + // out of Uninitialized. We can't call set_active directly because + // new_agent_thread requires full agent server infrastructure. + panel.update_in(cx, |panel, window, cx| { + panel.worktree_creation_status = None; + panel.active_view = ActiveView::Uninitialized; + panel.open_external_thread_with_server( + Rc::new(StubAgentServer::default_response()), + window, + cx, + ); + }); + + cx.run_until_parked(); + + panel.read_with(cx, |panel, _cx| { + assert!( + !matches!(panel.active_view, ActiveView::Uninitialized), + "panel should transition out of Uninitialized once worktree creation is cleared" + ); + }); + } } diff --git a/crates/agent_ui/src/agent_registry_ui.rs b/crates/agent_ui/src/agent_registry_ui.rs index 77539dd7c7deac569e63eeebae90d85d9da80131..d003ba958276c8c2370011d83028eda2e9121440 100644 --- a/crates/agent_ui/src/agent_registry_ui.rs +++ b/crates/agent_ui/src/agent_registry_ui.rs @@ -1,6 +1,4 @@ -use std::collections::{BTreeMap, BTreeSet}; use std::ops::Range; -use std::sync::OnceLock; use client::zed_urls; use collections::HashMap; @@ -16,7 +14,7 @@ use project::{AgentRegistryStore, RegistryAgent}; use settings::{Settings, SettingsStore, update_settings_file}; use theme::ThemeSettings; use ui::{ - Banner, ButtonStyle, ScrollableHandle, Severity, ToggleButtonGroup, ToggleButtonGroupSize, + ButtonStyle, ScrollableHandle, ToggleButtonGroup, ToggleButtonGroupSize, ToggleButtonGroupStyle, ToggleButtonSimple, Tooltip, WithScrollbar, prelude::*, }; use workspace::{ @@ -24,10 +22,6 @@ use workspace::{ item::{Item, ItemEvent}, }; -/// Registry IDs for built-in agents that Zed already provides first-class support for. -/// These are filtered out of the ACP Agent Registry UI to avoid showing duplicates. -const BUILT_IN_REGISTRY_IDS: [&str; 4] = ["claude-acp", "claude-code-acp", "codex-acp", "gemini"]; - #[derive(Clone, Copy, Debug, PartialEq, Eq)] enum RegistryFilter { All, @@ -43,28 +37,6 @@ enum RegistryInstallStatus { InstalledExtension, } -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -enum BuiltInAgent { - Claude, - Codex, - Gemini, -} - -fn keywords_by_agent_feature() -> &'static BTreeMap> { - static KEYWORDS_BY_FEATURE: OnceLock>> = - OnceLock::new(); - KEYWORDS_BY_FEATURE.get_or_init(|| { - BTreeMap::from_iter([ - ( - BuiltInAgent::Claude, - vec!["claude", "claude code", "claude agent"], - ), - (BuiltInAgent::Codex, vec!["codex", "codex cli"]), - (BuiltInAgent::Gemini, vec!["gemini", "gemini cli"]), - ]) - }) -} - #[derive(IntoElement)] struct AgentRegistryCard { children: Vec, @@ -110,7 +82,6 @@ pub struct AgentRegistryPage { installed_statuses: HashMap, query_editor: Entity, filter: RegistryFilter, - upsells: BTreeSet, _subscriptions: Vec, } @@ -145,7 +116,6 @@ impl AgentRegistryPage { installed_statuses: HashMap::default(), query_editor, filter: RegistryFilter::All, - upsells: BTreeSet::new(), _subscriptions: subscriptions, }; @@ -162,8 +132,14 @@ impl AgentRegistryPage { self.registry_agents.sort_by(|left, right| { left.name() .as_ref() - .cmp(right.name().as_ref()) - .then_with(|| left.id().as_ref().cmp(right.id().as_ref())) + .to_lowercase() + .cmp(&right.name().as_ref().to_lowercase()) + .then_with(|| { + left.id() + .as_ref() + .to_lowercase() + .cmp(&right.id().as_ref().to_lowercase()) + }) }); self.filter_registry_agents(cx); } @@ -173,7 +149,7 @@ impl AgentRegistryPage { .global::() .get::(None); self.installed_statuses.clear(); - for (id, settings) in &settings.custom { + for (id, settings) in settings.iter() { let status = match settings { CustomAgentServerSettings::Registry { .. } => { RegistryInstallStatus::InstalledRegistry @@ -205,7 +181,6 @@ impl AgentRegistryPage { fn filter_registry_agents(&mut self, cx: &mut Context) { self.refresh_installed_statuses(cx); - self.refresh_feature_upsells(cx); let search = self.search_query(cx).map(|search| search.to_lowercase()); let filter = self.filter; let installed_statuses = self.installed_statuses.clone(); @@ -215,12 +190,6 @@ impl AgentRegistryPage { .iter() .enumerate() .filter(|(_, agent)| { - // Filter out built-in agents since they already appear in the main - // agent configuration UI and don't need to be installed from the registry. - if BUILT_IN_REGISTRY_IDS.contains(&agent.id().as_ref()) { - return false; - } - let matches_search = search.as_ref().is_none_or(|query| { let query = query.as_str(); agent.id().as_ref().to_lowercase().contains(query) @@ -269,83 +238,6 @@ impl AgentRegistryPage { } } - fn refresh_feature_upsells(&mut self, cx: &mut Context) { - let Some(search) = self.search_query(cx) else { - self.upsells.clear(); - return; - }; - - let search = search.to_lowercase(); - let search_terms = search - .split_whitespace() - .map(|term| term.trim()) - .collect::>(); - - for (feature, keywords) in keywords_by_agent_feature() { - if keywords - .iter() - .any(|keyword| search_terms.contains(keyword)) - { - self.upsells.insert(*feature); - } else { - self.upsells.remove(feature); - } - } - } - - fn render_feature_upsell_banner( - &self, - label: SharedString, - docs_url: SharedString, - ) -> impl IntoElement { - let docs_url_button = Button::new("open_docs", "View Documentation") - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::Small) - .icon_position(IconPosition::End) - .icon_color(Color::Muted) - .on_click({ - move |_event, _window, cx| { - telemetry::event!( - "Documentation Viewed", - source = "Agent Registry Feature Upsell", - url = docs_url, - ); - cx.open_url(&docs_url) - } - }); - - div().pt_4().px_4().child( - Banner::new() - .severity(Severity::Success) - .child(Label::new(label).mt_0p5()) - .action_slot(docs_url_button), - ) - } - - fn render_feature_upsells(&self) -> impl IntoElement { - let mut container = v_flex(); - - for feature in &self.upsells { - let banner = match feature { - BuiltInAgent::Claude => self.render_feature_upsell_banner( - "Claude Agent support is built-in to Zed!".into(), - "https://zed.dev/docs/ai/external-agents#claude-agent".into(), - ), - BuiltInAgent::Codex => self.render_feature_upsell_banner( - "Codex CLI support is built-in to Zed!".into(), - "https://zed.dev/docs/ai/external-agents#codex-cli".into(), - ), - BuiltInAgent::Gemini => self.render_feature_upsell_banner( - "Gemini CLI support is built-in to Zed!".into(), - "https://zed.dev/docs/ai/external-agents#gemini-cli".into(), - ), - }; - container = container.child(banner); - } - - container - } - fn render_search(&self, cx: &mut Context) -> Div { let mut key_context = KeyContext::new_with_defaults(); key_context.add("BufferSearchBar"); @@ -583,7 +475,7 @@ impl AgentRegistryPage { let agent_id = agent_id.clone(); update_settings_file(fs.clone(), cx, move |settings, _| { let agent_servers = settings.agent_servers.get_or_insert_default(); - agent_servers.custom.entry(agent_id).or_insert_with(|| { + agent_servers.entry(agent_id).or_insert_with(|| { settings::CustomAgentServerSettings::Registry { default_mode: None, default_model: None, @@ -607,13 +499,13 @@ impl AgentRegistryPage { let Some(agent_servers) = settings.agent_servers.as_mut() else { return; }; - if let Some(entry) = agent_servers.custom.get(agent_id.as_str()) + if let Some(entry) = agent_servers.get(agent_id.as_str()) && matches!( entry, settings::CustomAgentServerSettings::Registry { .. } ) { - agent_servers.custom.remove(agent_id.as_str()); + agent_servers.remove(agent_id.as_str()); } }); }) @@ -708,14 +600,10 @@ impl Render for AgentRegistryPage { ), ), ) - .child(self.render_feature_upsells()) .child(v_flex().px_4().size_full().overflow_y_hidden().map(|this| { let count = self.filtered_registry_indices.len(); - let has_upsells = !self.upsells.is_empty(); - if count == 0 && !has_upsells { + if count == 0 { this.child(self.render_empty_state(cx)).into_any_element() - } else if count == 0 { - this.into_any_element() } else { let scroll_handle = &self.list; this.child( diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 3f082e86b1f4f8e9ea601ec0de15b22a972c1d67..caecce3d0282e33daf8164fb17f48bd53be60b9f 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -1,31 +1,40 @@ -pub mod acp; mod agent_configuration; mod agent_diff; mod agent_model_selector; mod agent_panel; mod agent_registry_ui; +mod branch_names; mod buffer_codegen; mod completion_provider; +mod config_options; +pub(crate) mod connection_view; mod context; mod context_server_configuration; +mod entry_view_state; mod favorite_models; mod inline_assistant; mod inline_prompt_editor; mod language_model_selector; mod mention_set; +mod message_editor; +mod mode_selector; +mod model_selector; +mod model_selector_popover; mod profile_selector; mod slash_command; mod slash_command_picker; mod terminal_codegen; mod terminal_inline_assistant; +#[cfg(any(test, feature = "test-support"))] +pub mod test_support; mod text_thread_editor; mod text_thread_history; +mod thread_history; mod ui; use std::rc::Rc; use std::sync::Arc; -// Another comment use agent_settings::{AgentProfileId, AgentSettings}; use assistant_slash_command::SlashCommandRegistry; use client::Client; @@ -49,11 +58,18 @@ use std::any::TypeId; use workspace::Workspace; use crate::agent_configuration::{ConfigureContextServerModal, ManageProfilesModal}; -pub use crate::agent_panel::{AgentPanel, AgentPanelEvent, ConcreteAssistantPanelDelegate}; +pub use crate::agent_panel::{ + AgentPanel, AgentPanelEvent, ConcreteAssistantPanelDelegate, WorktreeCreationStatus, +}; use crate::agent_registry_ui::AgentRegistryPage; pub use crate::inline_assistant::InlineAssistant; pub use agent_diff::{AgentDiffPane, AgentDiffToolbar}; +pub(crate) use connection_view::ConnectionView; +pub(crate) use mode_selector::ModeSelector; +pub(crate) use model_selector::ModelSelector; +pub(crate) use model_selector_popover::ModelSelectorPopover; pub use text_thread_editor::{AgentPanelDelegate, TextThreadEditor}; +pub(crate) use thread_history::*; use zed_actions; actions!( @@ -149,6 +165,8 @@ actions!( CycleThinkingEffort, /// Toggles the thinking effort selector menu open or closed. ToggleThinkingEffortMenu, + /// Toggles fast mode for models that support it. + ToggleFastMode, ] ); @@ -166,18 +184,6 @@ pub struct AuthorizeToolCall { pub option_kind: String, } -/// Action to select a permission granularity option from the dropdown. -/// This updates the selected granularity without triggering authorization. -#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)] -#[action(namespace = agent)] -#[serde(deny_unknown_fields)] -pub struct SelectPermissionGranularity { - /// The tool call ID for which to select the granularity. - pub tool_call_id: String, - /// The index of the selected granularity option. - pub index: usize, -} - /// Creates a new conversation thread, optionally based on an existing thread. #[derive(Default, Clone, PartialEq, Deserialize, JsonSchema, Action)] #[action(namespace = agent)] @@ -204,9 +210,6 @@ pub struct NewNativeAgentThreadFromSummary { #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum ExternalAgent { - Gemini, - ClaudeCode, - Codex, NativeAgent, Custom { name: SharedString }, } @@ -218,15 +221,24 @@ impl ExternalAgent { thread_store: Entity, ) -> Rc { match self { - Self::Gemini => Rc::new(agent_servers::Gemini), - Self::ClaudeCode => Rc::new(agent_servers::ClaudeCode), - Self::Codex => Rc::new(agent_servers::Codex), Self::NativeAgent => Rc::new(agent::NativeAgentServer::new(fs, thread_store)), Self::Custom { name } => Rc::new(agent_servers::CustomAgentServer::new(name.clone())), } } } +/// Sets where new threads will run. +#[derive( + Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, Action, +)] +#[action(namespace = agent)] +#[serde(rename_all = "snake_case", tag = "kind")] +pub enum StartThreadIn { + #[default] + LocalProject, + NewWorktree, +} + /// Content to initialize new external agent with. pub enum AgentInitialContent { ThreadSummary(acp_thread::AgentSessionInfo), @@ -310,6 +322,10 @@ pub fn init( .find_map(|item| item.downcast::()); if let Some(existing) = existing { + existing.update(cx, |_, cx| { + project::AgentRegistryStore::global(cx) + .update(cx, |store, cx| store.refresh(cx)); + }); workspace.activate_item(&existing, true, true, window, cx); } else { let registry_page = AgentRegistryPage::new(workspace, window, cx); @@ -372,7 +388,6 @@ fn update_command_palette_filter(cx: &mut App) { filter.hide_namespace("agents"); filter.hide_namespace("assistant"); filter.hide_namespace("copilot"); - filter.hide_namespace("supermaven"); filter.hide_namespace("zed_predict_onboarding"); filter.hide_namespace("edit_prediction"); @@ -393,19 +408,11 @@ fn update_command_palette_filter(cx: &mut App) { EditPredictionProvider::None => { filter.hide_namespace("edit_prediction"); filter.hide_namespace("copilot"); - filter.hide_namespace("supermaven"); filter.hide_action_types(&edit_prediction_actions); } EditPredictionProvider::Copilot => { filter.show_namespace("edit_prediction"); filter.show_namespace("copilot"); - filter.hide_namespace("supermaven"); - filter.show_action_types(edit_prediction_actions.iter()); - } - EditPredictionProvider::Supermaven => { - filter.show_namespace("edit_prediction"); - filter.hide_namespace("copilot"); - filter.show_namespace("supermaven"); filter.show_action_types(edit_prediction_actions.iter()); } EditPredictionProvider::Zed @@ -417,7 +424,6 @@ fn update_command_palette_filter(cx: &mut App) { | EditPredictionProvider::Experimental(_) => { filter.show_namespace("edit_prediction"); filter.hide_namespace("copilot"); - filter.hide_namespace("supermaven"); filter.show_action_types(edit_prediction_actions.iter()); } } diff --git a/crates/agent_ui/src/branch_names.rs b/crates/agent_ui/src/branch_names.rs new file mode 100644 index 0000000000000000000000000000000000000000..74e3dbc76b729309403606dfbecc8ea87f271913 --- /dev/null +++ b/crates/agent_ui/src/branch_names.rs @@ -0,0 +1,847 @@ +use collections::HashSet; +use rand::Rng; + +/// Names of historical typewriter brands, for use in auto-generated branch names. +/// (Hyphens and parens have been dropped so that the branch names are one-word.) +/// +/// Thanks to https://typewriterdatabase.com/alph.0.brands for the names! +const TYPEWRITER_NAMES: &[&str] = &[ + "abeille", + "acme", + "addo", + "adler", + "adlerette", + "adlerita", + "admiral", + "agamli", + "agar", + "agidel", + "agil", + "aguia", + "aguila", + "ahram", + "aigle", + "ajax", + "aktiv", + "ala", + "alba", + "albus", + "alexander", + "alexis", + "alfa", + "allen", + "alonso", + "alpina", + "amata", + "amaya", + "amka", + "anavi", + "anderson", + "andina", + "antares", + "apex", + "apsco", + "aquila", + "archo", + "ardita", + "argyle", + "aristocrat", + "aristokrat", + "arlington", + "armstrong", + "arpha", + "artus", + "astoria", + "atlantia", + "atlantic", + "atlas", + "augusta", + "aurora", + "austro", + "automatic", + "avanti", + "avona", + "azzurra", + "bajnok", + "baldwin", + "balkan", + "baltica", + "baltimore", + "barlock", + "barr", + "barrat", + "bartholomew", + "bashkiriya", + "bavaria", + "beaucourt", + "beko", + "belka", + "bennett", + "bennington", + "berni", + "bianca", + "bijou", + "bing", + "bisei", + "biser", + "bluebird", + "bolida", + "borgo", + "boston", + "boyce", + "bradford", + "brandenburg", + "brigitte", + "briton", + "brooks", + "brosette", + "buddy", + "burns", + "burroughs", + "byron", + "calanda", + "caligraph", + "cappel", + "cardinal", + "carissima", + "carlem", + "carlton", + "carmen", + "cawena", + "cella", + "celtic", + "century", + "champignon", + "cherryland", + "chevron", + "chicago", + "cicero", + "cifra", + "citizen", + "claudia", + "cleveland", + "clover", + "coffman", + "cole", + "columbia", + "commercial", + "companion", + "concentra", + "concord", + "concordia", + "conover", + "constanta", + "consul", + "conta", + "contenta", + "contimat", + "contina", + "continento", + "cornelia", + "coronado", + "cosmopolita", + "courier", + "craftamatic", + "crandall", + "crown", + "culema", + "dactyle", + "dankers", + "dart", + "daugherty", + "davis", + "dayton", + "dea", + "delmar", + "densmore", + "depantio", + "diadema", + "dial", + "diamant", + "diana", + "dictatype", + "diplomat", + "diskret", + "dolfus", + "dollar", + "domus", + "drake", + "draper", + "duplex", + "durabel", + "dynacord", + "eagle", + "eclipse", + "edelmann", + "edelweiss", + "edison", + "edita", + "edland", + "efka", + "eldorado", + "electa", + "electromatic", + "elektro", + "elgin", + "elliot", + "emerson", + "emka", + "emona", + "empire", + "engadine", + "engler", + "erfurt", + "erika", + "esko", + "essex", + "eureka", + "europa", + "everest", + "everlux", + "excelsior", + "express", + "fabers", + "facit", + "fairbanks", + "faktotum", + "famos", + "federal", + "felio", + "fidat", + "filius", + "fips", + "fish", + "fitch", + "fleet", + "florida", + "flott", + "flyer", + "flying", + "fontana", + "ford", + "forto", + "fortuna", + "fox", + "framo", + "franconia", + "franklin", + "friden", + "frolio", + "furstenberg", + "galesburg", + "galiette", + "gallia", + "garbell", + "gardner", + "geka", + "generation", + "genia", + "geniatus", + "gerda", + "gisela", + "glashutte", + "gloria", + "godrej", + "gossen", + "gourland", + "grandjean", + "granta", + "granville", + "graphic", + "gritzner", + "groma", + "guhl", + "guidonia", + "gundka", + "hacabo", + "haddad", + "halberg", + "halda", + "hall", + "hammond", + "hammonia", + "hanford", + "hansa", + "harmony", + "harris", + "hartford", + "hassia", + "hatch", + "heady", + "hebronia", + "hebros", + "hega", + "helios", + "helma", + "herald", + "hercules", + "hermes", + "herold", + "heros", + "hesperia", + "hogar", + "hooven", + "hopkins", + "horton", + "hugin", + "hungaria", + "hurtu", + "iberia", + "idea", + "ideal", + "imperia", + "impo", + "industria", + "industrio", + "ingersoll", + "international", + "invicta", + "irene", + "iris", + "iskra", + "ivitsa", + "ivriah", + "jackson", + "janalif", + "janos", + "jolux", + "juki", + "junior", + "juventa", + "juwel", + "kamkap", + "kamo", + "kanzler", + "kappel", + "karli", + "karstadt", + "keaton", + "kenbar", + "keystone", + "kim", + "klein", + "kneist", + "knoch", + "koh", + "kolibri", + "kolumbus", + "komet", + "kondor", + "koniger", + "konryu", + "kontor", + "kosmopolit", + "krypton", + "lambert", + "lasalle", + "lectra", + "leframa", + "lemair", + "lemco", + "liberty", + "libia", + "liga", + "lignose", + "lilliput", + "lindeteves", + "linowriter", + "listvitsa", + "ludolf", + "lutece", + "luxa", + "lyubava", + "mafra", + "magnavox", + "maher", + "majestic", + "majitouch", + "manhattan", + "mapuua", + "marathon", + "marburger", + "maritsa", + "maruzen", + "maskelyne", + "masspro", + "matous", + "mccall", + "mccool", + "mcloughlin", + "mead", + "mechno", + "mehano", + "meiselbach", + "melbi", + "melior", + "melotyp", + "mentor", + "mepas", + "mercedesia", + "mercurius", + "mercury", + "merkur", + "merritt", + "merz", + "messa", + "meteco", + "meteor", + "micron", + "mignon", + "mikro", + "minerva", + "mirian", + "mirina", + "mitex", + "molle", + "monac", + "monarch", + "mondiale", + "monica", + "monofix", + "monopol", + "monpti", + "monta", + "montana", + "montgomery", + "moon", + "morgan", + "morris", + "morse", + "moya", + "moyer", + "munson", + "musicwriter", + "nadex", + "nakajima", + "neckermann", + "neubert", + "neya", + "ninety", + "nisa", + "noiseless", + "noor", + "nora", + "nord", + "norden", + "norica", + "norma", + "norman", + "north", + "nototyp", + "nova", + "novalevi", + "odell", + "odhner", + "odo", + "odoma", + "ohio", + "ohtani", + "oliva", + "oliver", + "olivetti", + "olympia", + "omega", + "optima", + "orbis", + "orel", + "orga", + "oriette", + "orion", + "orn", + "orplid", + "pacior", + "pagina", + "parisienne", + "passat", + "pearl", + "peerless", + "perfect", + "perfecta", + "perkeo", + "perkins", + "perlita", + "pettypet", + "phoenix", + "piccola", + "picht", + "pinnock", + "pionier", + "plurotyp", + "plutarch", + "pneumatic", + "pocket", + "polyglott", + "polygraph", + "pontiac", + "portable", + "portex", + "pozzi", + "premier", + "presto", + "primavera", + "progress", + "protos", + "pterotype", + "pullman", + "pulsatta", + "quick", + "racer", + "radio", + "rally", + "rand", + "readers", + "reed", + "referent", + "reff", + "regent", + "regia", + "regina", + "rekord", + "reliable", + "reliance", + "remagg", + "rembrandt", + "remer", + "remington", + "remsho", + "remstar", + "remtor", + "reporters", + "resko", + "rex", + "rexpel", + "rheinita", + "rheinmetall", + "rival", + "roberts", + "robotron", + "rocher", + "rochester", + "roebuck", + "rofa", + "roland", + "rooy", + "rover", + "roxy", + "roy", + "royal", + "rundstatler", + "sabaudia", + "sabb", + "saleem", + "salter", + "sampo", + "sarafan", + "saturn", + "saxonia", + "schade", + "schapiro", + "schreibi", + "scripta", + "sears", + "secor", + "selectric", + "selekta", + "senator", + "sense", + "senta", + "serd", + "shilling", + "shimade", + "shimer", + "sholes", + "shuang", + "siegfried", + "siemag", + "silma", + "silver", + "simplex", + "simtype", + "singer", + "smith", + "soemtron", + "sonja", + "speedwriter", + "sphinx", + "starlet", + "stearns", + "steel", + "stella", + "steno", + "sterling", + "stoewer", + "stolzenberg", + "stott", + "strangfeld", + "sture", + "stylotyp", + "sun", + "superba", + "superia", + "supermetall", + "surety", + "swintec", + "swissa", + "talbos", + "talleres", + "tatrapoint", + "taurus", + "taylorix", + "tell", + "tempotype", + "tippco", + "titania", + "tops", + "towa", + "toyo", + "tradition", + "transatlantic", + "traveller", + "trebla", + "triumph", + "turia", + "typatune", + "typen", + "typorium", + "ugro", + "ultima", + "unda", + "underwood", + "unica", + "unitype", + "ursula", + "utax", + "varityper", + "vasanta", + "vendex", + "venus", + "victor", + "victoria", + "video", + "viking", + "vira", + "virotyp", + "visigraph", + "vittoria", + "volcan", + "vornado", + "voss", + "vultur", + "waltons", + "wanamaker", + "wanderer", + "ward", + "warner", + "waterloo", + "waverley", + "wayne", + "webster", + "wedgefield", + "welco", + "wellington", + "wellon", + "weltblick", + "westphalia", + "wiedmer", + "williams", + "wilson", + "winkel", + "winsor", + "wizard", + "woodstock", + "woodwards", + "yatran", + "yost", + "zenit", + "zentronik", + "zeta", + "zeya", +]; + +/// Picks a typewriter name that isn't already taken by an existing branch. +/// +/// Each entry in `existing_branches` is expected to be a full branch name +/// like `"olivetti-a3f9b2c1"`. The prefix before the last `'-'` is treated +/// as the taken typewriter name. Branches without a `'-'` are ignored. +/// +/// Returns `None` when every name in the pool is already taken. +pub fn pick_typewriter_name( + existing_branches: &[&str], + rng: &mut impl Rng, +) -> Option<&'static str> { + let disallowed: HashSet<&str> = existing_branches + .iter() + .filter_map(|branch| branch.rsplit_once('-').map(|(prefix, _)| prefix)) + .collect(); + + let available: Vec<&'static str> = TYPEWRITER_NAMES + .iter() + .copied() + .filter(|name| !disallowed.contains(name)) + .collect(); + + if available.is_empty() { + return None; + } + + let index = rng.random_range(0..available.len()); + Some(available[index]) +} + +/// Generates a branch name like `"olivetti-a3f9b2c1"` by picking a typewriter +/// name that isn't already taken and appending an 8-character alphanumeric hash. +/// +/// Returns `None` when every typewriter name in the pool is already taken. +pub fn generate_branch_name(existing_branches: &[&str], rng: &mut impl Rng) -> Option { + let typewriter_name = pick_typewriter_name(existing_branches, rng)?; + let hash: String = (0..8) + .map(|_| { + let idx: u8 = rng.random_range(0..36); + if idx < 10 { + (b'0' + idx) as char + } else { + (b'a' + idx - 10) as char + } + }) + .collect(); + Some(format!("{typewriter_name}-{hash}")) +} + +#[cfg(test)] +mod tests { + use super::*; + use rand::rngs::StdRng; + + #[gpui::test(iterations = 10)] + fn test_pick_typewriter_name_with_no_disallowed(mut rng: StdRng) { + let name = pick_typewriter_name(&[], &mut rng); + assert!(name.is_some()); + assert!(TYPEWRITER_NAMES.contains(&name.unwrap())); + } + + #[gpui::test(iterations = 10)] + fn test_pick_typewriter_name_excludes_taken_names(mut rng: StdRng) { + let branch_names = &["olivetti-abc12345", "selectric-def67890"]; + let name = pick_typewriter_name(branch_names, &mut rng).unwrap(); + assert_ne!(name, "olivetti"); + assert_ne!(name, "selectric"); + } + + #[gpui::test] + fn test_pick_typewriter_name_all_taken(mut rng: StdRng) { + let branch_names: Vec = TYPEWRITER_NAMES + .iter() + .map(|name| format!("{name}-00000000")) + .collect(); + let branch_name_refs: Vec<&str> = branch_names.iter().map(|s| s.as_str()).collect(); + let name = pick_typewriter_name(&branch_name_refs, &mut rng); + assert!(name.is_none()); + } + + #[gpui::test(iterations = 10)] + fn test_pick_typewriter_name_ignores_branches_without_hyphen(mut rng: StdRng) { + let branch_names = &["main", "develop", "feature"]; + let name = pick_typewriter_name(branch_names, &mut rng); + assert!(name.is_some()); + assert!(TYPEWRITER_NAMES.contains(&name.unwrap())); + } + + #[gpui::test(iterations = 10)] + fn test_generate_branch_name_format(mut rng: StdRng) { + let branch_name = generate_branch_name(&[], &mut rng).unwrap(); + let (prefix, suffix) = branch_name.rsplit_once('-').unwrap(); + assert!(TYPEWRITER_NAMES.contains(&prefix)); + assert_eq!(suffix.len(), 8); + assert!(suffix.chars().all(|c| c.is_ascii_alphanumeric())); + } + + #[gpui::test] + fn test_generate_branch_name_returns_none_when_exhausted(mut rng: StdRng) { + let branch_names: Vec = TYPEWRITER_NAMES + .iter() + .map(|name| format!("{name}-00000000")) + .collect(); + let branch_name_refs: Vec<&str> = branch_names.iter().map(|s| s.as_str()).collect(); + let result = generate_branch_name(&branch_name_refs, &mut rng); + assert!(result.is_none()); + } + + #[gpui::test(iterations = 100)] + fn test_generate_branch_name_never_reuses_taken_prefix(mut rng: StdRng) { + let existing = &["olivetti-123abc", "selectric-def456"]; + let branch_name = generate_branch_name(existing, &mut rng).unwrap(); + let (prefix, _) = branch_name.rsplit_once('-').unwrap(); + assert_ne!(prefix, "olivetti"); + assert_ne!(prefix, "selectric"); + } + + #[gpui::test(iterations = 100)] + fn test_generate_branch_name_avoids_multiple_taken_prefixes(mut rng: StdRng) { + let existing = &[ + "olivetti-aaa11111", + "selectric-bbb22222", + "corona-ccc33333", + "remington-ddd44444", + "underwood-eee55555", + ]; + let taken_prefixes: HashSet<&str> = existing + .iter() + .filter_map(|b| b.rsplit_once('-').map(|(prefix, _)| prefix)) + .collect(); + let branch_name = generate_branch_name(existing, &mut rng).unwrap(); + let (prefix, _) = branch_name.rsplit_once('-').unwrap(); + assert!( + !taken_prefixes.contains(prefix), + "generated prefix {prefix:?} collides with an existing branch" + ); + } + + #[gpui::test(iterations = 100)] + fn test_generate_branch_name_with_varied_hash_suffixes(mut rng: StdRng) { + let existing = &[ + "olivetti-aaaaaaaa", + "olivetti-bbbbbbbb", + "olivetti-cccccccc", + ]; + let branch_name = generate_branch_name(existing, &mut rng).unwrap(); + let (prefix, _) = branch_name.rsplit_once('-').unwrap(); + assert_ne!( + prefix, "olivetti", + "should avoid olivetti regardless of how many variants exist" + ); + } + + #[test] + fn test_typewriter_names_are_valid() { + let mut seen = HashSet::default(); + for &name in TYPEWRITER_NAMES { + assert!( + seen.insert(name), + "duplicate entry in TYPEWRITER_NAMES: {name:?}" + ); + } + + for window in TYPEWRITER_NAMES.windows(2) { + assert!( + window[0] <= window[1], + "TYPEWRITER_NAMES is not sorted: {0:?} should come after {1:?}", + window[1], + window[0], + ); + } + + for &name in TYPEWRITER_NAMES { + assert!( + !name.contains('-'), + "TYPEWRITER_NAMES entry contains a hyphen: {name:?}" + ); + } + + for &name in TYPEWRITER_NAMES { + assert!( + name.chars().all(|c| c.is_lowercase() || !c.is_alphabetic()), + "TYPEWRITER_NAMES entry is not lowercase: {name:?}" + ); + } + } +} diff --git a/crates/agent_ui/src/buffer_codegen.rs b/crates/agent_ui/src/buffer_codegen.rs index 39759f264996ee07a7efd2b2bee8b1d1e3847f51..4f7bf084b7e96a14e6ecaafb04adfdbb6712e574 100644 --- a/crates/agent_ui/src/buffer_codegen.rs +++ b/crates/agent_ui/src/buffer_codegen.rs @@ -526,11 +526,13 @@ impl CodegenAlternative { name: REWRITE_SECTION_TOOL_NAME.to_string(), description: "Replaces text in tags with your replacement_text.".to_string(), input_schema: language_model::tool_schema::root_schema_for::(tool_input_format).to_value(), + use_input_streaming: false, }, LanguageModelRequestTool { name: FAILURE_MESSAGE_TOOL_NAME.to_string(), description: "Use this tool to provide a message to the user when you're unable to complete a task.".to_string(), input_schema: language_model::tool_schema::root_schema_for::(tool_input_format).to_value(), + use_input_streaming: false, }, ]; @@ -545,6 +547,7 @@ impl CodegenAlternative { messages, thinking_allowed: false, thinking_effort: None, + speed: None, } })) } @@ -624,6 +627,7 @@ impl CodegenAlternative { messages: vec![request_message], thinking_allowed: false, thinking_effort: None, + speed: None, } })) } diff --git a/crates/agent_ui/src/completion_provider.rs b/crates/agent_ui/src/completion_provider.rs index baa57368e50fb7a604138e6158b06349b65b75e6..30778909b2c9a91dab0b20417e973b7e83ea6a17 100644 --- a/crates/agent_ui/src/completion_provider.rs +++ b/crates/agent_ui/src/completion_provider.rs @@ -4,7 +4,7 @@ use std::path::PathBuf; use std::sync::Arc; use std::sync::atomic::AtomicBool; -use crate::acp::AcpThreadHistory; +use crate::ThreadHistory; use acp_thread::{AgentSessionInfo, MentionUri}; use anyhow::Result; use editor::{ @@ -206,7 +206,7 @@ pub struct PromptCompletionProvider { source: Arc, editor: WeakEntity, mention_set: Entity, - history: WeakEntity, + history: WeakEntity, prompt_store: Option>, workspace: WeakEntity, } @@ -216,7 +216,7 @@ impl PromptCompletionProvider { source: T, editor: WeakEntity, mention_set: Entity, - history: WeakEntity, + history: WeakEntity, prompt_store: Option>, workspace: WeakEntity, ) -> Self { @@ -617,6 +617,7 @@ impl PromptCompletionProvider { let crease = crate::mention_set::crease_for_mention( mention_uri.name().into(), mention_uri.icon_path(cx), + None, range, editor.downgrade(), ); diff --git a/crates/agent_ui/src/acp/config_options.rs b/crates/agent_ui/src/config_options.rs similarity index 98% rename from crates/agent_ui/src/acp/config_options.rs rename to crates/agent_ui/src/config_options.rs index 387069cd1671fa811ad3933d943f5d691d848b37..6ec2595202490ca7474717f8985b6e4f6d7ca0b9 100644 --- a/crates/agent_ui/src/acp/config_options.rs +++ b/crates/agent_ui/src/config_options.rs @@ -49,7 +49,7 @@ impl ConfigOptionsView { if let Some(mut rx) = rx { while let Ok(()) = rx.recv().await { this.update_in(cx, |this, window, cx| { - this.refresh_selectors_if_needed(window, cx); + this.rebuild_selectors(window, cx); cx.notify(); }) .log_err(); @@ -184,15 +184,10 @@ impl ConfigOptionsView { .collect() } - fn refresh_selectors_if_needed(&mut self, window: &mut Window, cx: &mut Context) { - let current_ids = Self::config_option_ids(&self.config_options); - if current_ids != self.config_option_ids { - self.config_option_ids = current_ids; - self.rebuild_selectors(window, cx); - } - } - fn rebuild_selectors(&mut self, window: &mut Window, cx: &mut Context) { + // Config option updates can mutate option values for existing IDs (for example, + // reasoning levels after a model switch). Rebuild to refresh cached picker entries. + self.config_option_ids = Self::config_option_ids(&self.config_options); self.selectors = Self::build_selectors( &self.config_options, &self.agent_server, @@ -498,12 +493,7 @@ impl PickerDelegate for ConfigOptionPickerDelegate { cx.notify(); } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { match self.filtered_entries.get(ix) { Some(ConfigOptionPickerEntry::Option(_)) => true, Some(ConfigOptionPickerEntry::Separator(_)) | None => false, diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/connection_view.rs similarity index 92% rename from crates/agent_ui/src/acp/thread_view.rs rename to crates/agent_ui/src/connection_view.rs index cbdc3ad5d1e5d28b1597ba405846ac48dbfeb928..e7e9403e052f6578ab20982fbb27c7c6a29d1a80 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -26,10 +26,10 @@ use fs::Fs; use futures::FutureExt as _; use gpui::{ Action, Animation, AnimationExt, AnyView, App, ClickEvent, ClipboardItem, CursorStyle, - ElementId, Empty, Entity, FocusHandle, Focusable, Hsla, ListOffset, ListState, ObjectFit, - PlatformDisplay, ScrollHandle, SharedString, Subscription, Task, TextStyle, WeakEntity, Window, - WindowHandle, div, ease_in_out, img, linear_color_stop, linear_gradient, list, point, - pulsating_between, + ElementId, Empty, Entity, EventEmitter, FocusHandle, Focusable, Hsla, ListOffset, ListState, + ObjectFit, PlatformDisplay, ScrollHandle, SharedString, Subscription, Task, TextStyle, + WeakEntity, Window, WindowHandle, div, ease_in_out, img, linear_color_stop, linear_gradient, + list, point, pulsating_between, }; use language::Buffer; use language_model::LanguageModelRegistry; @@ -62,28 +62,28 @@ use zed_actions::assistant::OpenRulesLibrary; use super::config_options::ConfigOptionsView; use super::entry_view_state::EntryViewState; -use super::thread_history::AcpThreadHistory; -use crate::acp::AcpModelSelectorPopover; -use crate::acp::ModeSelector; -use crate::acp::entry_view_state::{EntryViewEvent, ViewEvent}; -use crate::acp::message_editor::{MessageEditor, MessageEditorEvent}; +use super::thread_history::ThreadHistory; +use crate::ModeSelector; +use crate::ModelSelectorPopover; use crate::agent_diff::AgentDiff; +use crate::entry_view_state::{EntryViewEvent, ViewEvent}; +use crate::message_editor::{MessageEditor, MessageEditorEvent}; use crate::profile_selector::{ProfileProvider, ProfileSelector}; use crate::ui::{AgentNotification, AgentNotificationEvent}; use crate::{ AgentDiffPane, AgentInitialContent, AgentPanel, AllowAlways, AllowOnce, AuthorizeToolCall, ClearMessageQueue, CycleFavoriteModels, CycleModeSelector, CycleThinkingEffort, EditFirstQueuedMessage, ExpandMessageEditor, Follow, KeepAll, NewThread, OpenAddContextMenu, - OpenAgentDiff, OpenHistory, RejectAll, RejectOnce, RemoveFirstQueuedMessage, - SelectPermissionGranularity, SendImmediately, SendNextQueuedMessage, ToggleProfileSelector, - ToggleThinkingEffortMenu, ToggleThinkingMode, UndoLastReject, + OpenAgentDiff, OpenHistory, RejectAll, RejectOnce, RemoveFirstQueuedMessage, SendImmediately, + SendNextQueuedMessage, ToggleFastMode, ToggleProfileSelector, ToggleThinkingEffortMenu, + ToggleThinkingMode, UndoLastReject, }; const STOPWATCH_THRESHOLD: Duration = Duration::from_secs(30); const TOKEN_THRESHOLD: u64 = 250; -mod active_thread; -pub use active_thread::*; +mod thread_view; +pub use thread_view::*; pub struct QueuedMessage { pub content: Vec, @@ -107,8 +107,8 @@ pub(crate) enum ThreadError { }, } -impl ThreadError { - fn from_err(error: anyhow::Error, agent_name: &str) -> Self { +impl From for ThreadError { + fn from(error: anyhow::Error) -> Self { if error.is::() { Self::PaymentRequired } else if let Some(acp_error) = error.downcast_ref::() @@ -123,18 +123,9 @@ impl ThreadError { .downcast_ref::() .map(|acp_error| SharedString::from(acp_error.code.to_string())); - // TODO: we should have Gemini return better errors here. - if agent_name == "Gemini CLI" - && message.contains("Could not load the default credentials") - || message.contains("API key not valid") - || message.contains("Request had invalid authentication credentials") - { - Self::AuthenticationRequired(message) - } else { - Self::Other { - message, - acp_error_code, - } + Self::Other { + message, + acp_error_code, } } } @@ -164,6 +155,9 @@ pub(crate) struct Conversation { threads: HashMap>, permission_requests: IndexMap>, subscriptions: Vec, + /// Tracks the selected granularity index for each tool call's permission dropdown. + /// The index corresponds to the position in the allow_options list. + selected_permission_granularity: HashMap>, } impl Conversation { @@ -191,7 +185,7 @@ impl Conversation { | AcpThreadEvent::EntriesRemoved(_) | AcpThreadEvent::Retry(_) | AcpThreadEvent::SubagentSpawned(_) - | AcpThreadEvent::Stopped + | AcpThreadEvent::Stopped(_) | AcpThreadEvent::Error | AcpThreadEvent::LoadError(_) | AcpThreadEvent::PromptCapabilitiesUpdated @@ -205,6 +199,29 @@ impl Conversation { .insert(thread.read(cx).session_id().clone(), thread); } + pub fn selected_permission_granularity( + &self, + session_id: &acp::SessionId, + tool_call_id: &acp::ToolCallId, + ) -> Option { + self.selected_permission_granularity + .get(session_id) + .and_then(|map| map.get(tool_call_id)) + .copied() + } + + pub fn set_selected_permission_granularity( + &mut self, + session_id: acp::SessionId, + tool_call_id: acp::ToolCallId, + granularity: usize, + ) { + self.selected_permission_granularity + .entry(session_id) + .or_default() + .insert(tool_call_id, granularity); + } + pub fn pending_tool_call<'a>( &'a self, session_id: &acp::SessionId, @@ -278,7 +295,13 @@ impl Conversation { } } -pub struct AcpServerView { +pub enum AcpServerViewEvent { + ActiveThreadChanged, +} + +impl EventEmitter for ConnectionView {} + +pub struct ConnectionView { agent: Rc, agent_server_store: Entity, workspace: WeakEntity, @@ -286,8 +309,7 @@ pub struct AcpServerView { thread_store: Option>, prompt_store: Option>, server_state: ServerState, - login: Option, // is some <=> Active | Unauthenticated - history: Entity, + history: Entity, focus_handle: FocusHandle, notifications: Vec>, notification_subscriptions: HashMap, Vec>, @@ -295,8 +317,14 @@ pub struct AcpServerView { _subscriptions: Vec, } -impl AcpServerView { - pub fn active_thread(&self) -> Option<&Entity> { +impl ConnectionView { + pub fn has_auth_methods(&self) -> bool { + self.as_connected().map_or(false, |connected| { + !connected.connection.auth_methods().is_empty() + }) + } + + pub fn active_thread(&self) -> Option<&Entity> { match &self.server_state { ServerState::Connected(connected) => connected.active_view(), _ => None, @@ -314,7 +342,7 @@ impl AcpServerView { .pending_tool_call(id, cx) } - pub fn parent_thread(&self, cx: &App) -> Option> { + pub fn parent_thread(&self, cx: &App) -> Option> { match &self.server_state { ServerState::Connected(connected) => { let mut current = connected.active_view()?; @@ -331,7 +359,7 @@ impl AcpServerView { } } - pub fn thread_view(&self, session_id: &acp::SessionId) -> Option> { + pub fn thread_view(&self, session_id: &acp::SessionId) -> Option> { let connected = self.as_connected()?; connected.threads.get(session_id).cloned() } @@ -364,6 +392,7 @@ impl AcpServerView { if let Some(view) = self.active_thread() { view.focus_handle(cx).focus(window, cx); } + cx.emit(AcpServerViewEvent::ActiveThreadChanged); cx.notify(); } } @@ -379,7 +408,7 @@ enum ServerState { pub struct ConnectedServerState { auth_state: AuthState, active_id: Option, - threads: HashMap>, + threads: HashMap>, connection: Rc, conversation: Entity, } @@ -407,7 +436,7 @@ struct LoadingView { } impl ConnectedServerState { - pub fn active_view(&self) -> Option<&Entity> { + pub fn active_view(&self) -> Option<&Entity> { self.active_id.as_ref().and_then(|id| self.threads.get(id)) } @@ -434,7 +463,7 @@ impl ConnectedServerState { } } -impl AcpServerView { +impl ConnectionView { pub fn new( agent: Rc, resume_thread: Option, @@ -443,7 +472,7 @@ impl AcpServerView { project: Entity, thread_store: Option>, prompt_store: Option>, - history: Entity, + history: Entity, window: &mut Window, cx: &mut Context, ) -> Self { @@ -487,7 +516,6 @@ impl AcpServerView { window, cx, ), - login: None, notifications: Vec::new(), notification_subscriptions: HashMap::default(), auth_task: None, @@ -503,6 +531,7 @@ impl AcpServerView { } self.server_state = state; + cx.emit(AcpServerViewEvent::ActiveThreadChanged); cx.notify(); } @@ -569,22 +598,29 @@ impl AcpServerView { } }) .collect(); - let root_dir = worktree_roots.first().cloned(); let session_cwd = resume_thread .as_ref() .and_then(|resume| { resume .cwd .as_ref() - .and_then(|cwd| util::paths::normalize_lexically(cwd).ok()) .filter(|cwd| { - worktree_roots - .iter() - .any(|root| cwd.starts_with(root.as_ref())) + // Validate with the normalized path (rejects `..` traversals), + // but return the original cwd to preserve its path separators. + // On Windows, `normalize_lexically` rebuilds the path with + // backslashes via `PathBuf::push`, which would corrupt + // forward-slash Linux paths used by WSL agents. + util::paths::normalize_lexically(cwd) + .ok() + .is_some_and(|normalized| { + worktree_roots + .iter() + .any(|root| normalized.starts_with(root.as_ref())) + }) }) - .map(|path| path.into()) + .map(|path| Arc::from(path.as_path())) }) - .or_else(|| root_dir.clone()) + .or_else(|| worktree_roots.first().cloned()) .unwrap_or_else(|| paths::home_dir().as_path().into()); let (status_tx, mut status_rx) = watch::channel("Loading…".into()); @@ -596,19 +632,18 @@ impl AcpServerView { Some(new_version_available_tx), ); - let connect_task = agent.connect(root_dir.as_deref(), delegate, cx); + let connect_task = agent.connect(delegate, cx); let load_task = cx.spawn_in(window, async move |this, cx| { let connection = match connect_task.await { - Ok((connection, login)) => { - this.update(cx, |this, _| this.login = login).ok(); - connection - } + Ok(connection) => connection, Err(err) => { this.update_in(cx, |this, window, cx| { if err.downcast_ref::().is_some() { this.handle_load_error(err, window, cx); } else if let Some(active) = this.active_thread() { - active.update(cx, |active, cx| active.handle_any_thread_error(err, cx)); + active.update(cx, |active, cx| active.handle_thread_error(err, cx)); + } else { + this.handle_load_error(err, window, cx); } cx.notify(); }) @@ -701,6 +736,14 @@ impl AcpServerView { } let id = current.read(cx).thread.read(cx).session_id().clone(); + let session_list = if connection.supports_session_history() { + connection.session_list(cx) + } else { + None + }; + this.history.update(cx, |history, cx| { + history.set_session_list(session_list, cx); + }); this.set_server_state( ServerState::Connected(ConnectedServerState { connection, @@ -768,7 +811,7 @@ impl AcpServerView { initial_content: Option, window: &mut Window, cx: &mut Context, - ) -> Entity { + ) -> Entity { let agent_name = self.agent.name(); let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default())); let available_commands = Rc::new(RefCell::new(vec![])); @@ -802,18 +845,14 @@ impl AcpServerView { ); }); + if let Some(scroll_position) = thread.read(cx).ui_scroll_position() { + list_state.scroll_to(scroll_position); + } + AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx); let connection = thread.read(cx).connection().clone(); let session_id = thread.read(cx).session_id().clone(); - let session_list = if connection.supports_session_history() { - connection.session_list(cx) - } else { - None - }; - self.history.update(cx, |history, cx| { - history.set_session_list(session_list, cx); - }); // Check for config options first // Config options take precedence over legacy mode/model selectors @@ -840,7 +879,7 @@ impl AcpServerView { let agent_server = self.agent.clone(); let fs = self.project.read(cx).fs().clone(); cx.new(|cx| { - AcpModelSelectorPopover::new( + ModelSelectorPopover::new( selector, agent_server, fs, @@ -871,7 +910,10 @@ impl AcpServerView { .entries() .iter() .filter_map(|entry| match entry { - AgentThreadEntry::ToolCall(call) => call.subagent_session_id.clone(), + AgentThreadEntry::ToolCall(call) => call + .subagent_session_info + .as_ref() + .map(|i| i.session_id.clone()), _ => None, }) .collect::>(); @@ -914,16 +956,28 @@ impl AcpServerView { .unwrap_or_else(|| agent_name.clone()); let agent_icon = self.agent.logo(); + let agent_icon_from_external_svg = self + .agent_server_store + .read(cx) + .agent_icon(&ExternalAgentServerName(self.agent.name())) + .or_else(|| { + project::AgentRegistryStore::try_global(cx).and_then(|store| { + store + .read(cx) + .agent(self.agent.name().as_ref()) + .and_then(|a| a.icon_path().cloned()) + }) + }); let weak = cx.weak_entity(); cx.new(|cx| { - AcpThreadView::new( + ThreadView::new( parent_id, thread, conversation, - self.login.clone(), weak, agent_icon, + agent_icon_from_external_svg, agent_name, agent_display_name, self.workspace.clone(), @@ -1136,6 +1190,20 @@ impl AcpServerView { } } + fn move_queued_message_to_main_editor( + &mut self, + index: usize, + inserted_text: Option<&str>, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(active) = self.active_thread() { + active.update(cx, |active, cx| { + active.move_queued_message_to_main_editor(index, inserted_text, window, cx); + }); + } + } + fn handle_thread_event( &mut self, thread: &Entity, @@ -1198,13 +1266,18 @@ impl AcpServerView { }); } } - AcpThreadEvent::Stopped => { + AcpThreadEvent::Stopped(stop_reason) => { if let Some(active) = self.thread_view(&thread_id) { active.update(cx, |active, _cx| { active.thread_retry_status.take(); }); } if is_subagent { + if *stop_reason == acp::StopReason::EndTurn { + thread.update(cx, |thread, cx| { + thread.mark_as_subagent_output(cx); + }); + } return; } @@ -1300,6 +1373,7 @@ impl AcpServerView { } }); } + cx.notify(); } AcpThreadEvent::PromptCapabilitiesUpdated => { if let Some(active) = self.thread_view(&thread_id) { @@ -1417,13 +1491,6 @@ impl AcpServerView { }) .unwrap_or_default(); - // Run SpawnInTerminal in the same dir as the ACP server - let cwd = connected - .connection - .clone() - .downcast::() - .map(|acp_conn| acp_conn.root_dir().to_path_buf()); - // Build SpawnInTerminal from _meta let login = task::SpawnInTerminal { id: task::TaskId(format!("external-agent-{}-login", label)), @@ -1432,7 +1499,6 @@ impl AcpServerView { command: Some(command.to_string()), args, command_label: label.to_string(), - cwd, env, use_new_terminal: true, allow_concurrent_runs: true, @@ -1487,7 +1553,7 @@ impl AcpServerView { } if let Some(active) = this.active_thread() { active.update(cx, |active, cx| { - active.handle_any_thread_error(err, cx); + active.handle_thread_error(err, cx); }) } } else { @@ -1503,79 +1569,10 @@ impl AcpServerView { } } - if method.0.as_ref() == "gemini-api-key" { - let registry = LanguageModelRegistry::global(cx); - let provider = registry - .read(cx) - .provider(&language_model::GOOGLE_PROVIDER_ID) - .unwrap(); - if !provider.is_authenticated(cx) { - let this = cx.weak_entity(); - let agent_name = self.agent.name(); - let connection = connection.clone(); - window.defer(cx, |window, cx| { - Self::handle_auth_required( - this, - AuthRequired { - description: Some("GEMINI_API_KEY must be set".to_owned()), - provider_id: Some(language_model::GOOGLE_PROVIDER_ID), - }, - agent_name, - connection, - window, - cx, - ); - }); - return; - } - } else if method.0.as_ref() == "vertex-ai" - && std::env::var("GOOGLE_API_KEY").is_err() - && (std::env::var("GOOGLE_CLOUD_PROJECT").is_err() - || (std::env::var("GOOGLE_CLOUD_PROJECT").is_err())) - { - let this = cx.weak_entity(); - let agent_name = self.agent.name(); - let connection = connection.clone(); - - window.defer(cx, |window, cx| { - Self::handle_auth_required( - this, - AuthRequired { - description: Some( - "GOOGLE_API_KEY must be set in the environment to use Vertex AI authentication for Gemini CLI. Please export it and restart Zed." - .to_owned(), - ), - provider_id: None, - }, - agent_name, - connection, - window, - cx, - ) - }); - return; - } - configuration_view.take(); pending_auth_method.replace(method.clone()); - let authenticate = if let Some(login) = self.login.clone() { - if let Some(workspace) = self.workspace.upgrade() { - let project = self.project.clone(); - Self::spawn_external_agent_login( - login, - workspace, - project, - method.clone(), - false, - window, - cx, - ) - } else { - Task::ready(Ok(())) - } - } else { - connection.authenticate(method, cx) - }; + + let authenticate = connection.authenticate(method, cx); cx.notify(); self.auth_task = Some(cx.spawn_in(window, { async move |this, cx| { @@ -1605,7 +1602,7 @@ impl AcpServerView { pending_auth_method.take(); } if let Some(active) = this.active_thread() { - active.update(cx, |active, cx| active.handle_any_thread_error(err, cx)); + active.update(cx, |active, cx| active.handle_thread_error(err, cx)); } } else { this.reset(window, cx); @@ -1850,15 +1847,7 @@ impl AcpServerView { .enumerate() .rev() .map(|(ix, method)| { - let (method_id, name) = if self.project.read(cx).is_via_remote_server() - && method.id.0.as_ref() == "oauth-personal" - && method.name == "Log in with Google" - { - ("spawn-gemini-cli".into(), "Log in with Gemini CLI".into()) - } else { - (method.id.0.clone(), method.name.clone()) - }; - + let (method_id, name) = (method.id.0.clone(), method.name.clone()); let agent_telemetry_id = connection.telemetry_id(); Button::new(method_id.clone(), name) @@ -2213,6 +2202,7 @@ impl AcpServerView { for (index, editor) in editors.into_iter().enumerate() { if let Some(content) = queued_messages.get(index) { editor.update(cx, |editor, cx| { + editor.set_read_only(true, cx); editor.set_message(content.clone(), window, cx); }); } @@ -2241,6 +2231,7 @@ impl AcpServerView { window, cx, ); + editor.set_read_only(true, cx); editor.set_message(content, window, cx); editor }); @@ -2249,6 +2240,8 @@ impl AcpServerView { &editor, window, move |this, _editor, event, window, cx| match event { + MessageEditorEvent::InputAttempted(text) => this + .move_queued_message_to_main_editor(index, Some(text.as_ref()), window, cx), MessageEditorEvent::LostFocus => { this.save_queued_message_at_index(index, cx); } @@ -2283,7 +2276,7 @@ impl AcpServerView { fn render_markdown(&self, markdown: Entity, style: MarkdownStyle) -> MarkdownElement { let workspace = self.workspace.clone(); MarkdownElement::new(markdown, style).on_url_click(move |text, window, cx| { - crate::acp::thread_view::active_thread::open_link(text, &workspace, window, cx); + crate::connection_view::thread_view::open_link(text, &workspace, window, cx); }) } @@ -2606,7 +2599,7 @@ fn placeholder_text(agent_name: &str, has_commands: bool) -> String { } } -impl Focusable for AcpServerView { +impl Focusable for ConnectionView { fn focus_handle(&self, cx: &App) -> FocusHandle { match self.active_thread() { Some(thread) => thread.read(cx).focus_handle(cx), @@ -2616,7 +2609,7 @@ impl Focusable for AcpServerView { } #[cfg(any(test, feature = "test-support"))] -impl AcpServerView { +impl ConnectionView { /// Expands a tool call so its content is visible. /// This is primarily useful for visual testing. pub fn expand_tool_call(&mut self, tool_call_id: acp::ToolCallId, cx: &mut Context) { @@ -2629,7 +2622,7 @@ impl AcpServerView { } } -impl Render for AcpServerView { +impl Render for ConnectionView { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { self.sync_queued_message_editors(window, cx); @@ -2808,11 +2801,11 @@ pub(crate) mod tests { let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); // Create history without an initial session list - it will be set after connection - let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx))); + let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); let thread_view = cx.update(|window, cx| { cx.new(|cx| { - AcpServerView::new( + ConnectionView::new( Rc::new(StubAgentServer::default_response()), None, None, @@ -2868,6 +2861,33 @@ pub(crate) mod tests { }); } + #[gpui::test] + async fn test_new_thread_creation_triggers_session_list_refresh(cx: &mut TestAppContext) { + init_test(cx); + + let session = AgentSessionInfo::new(SessionId::new("history-session")); + let (thread_view, history, cx) = setup_thread_view_with_history( + StubAgentServer::new(SessionHistoryConnection::new(vec![session.clone()])), + cx, + ) + .await; + + history.read_with(cx, |history, _cx| { + assert!( + history.has_session_list(), + "session list should be attached after thread creation" + ); + }); + + active_thread(&thread_view, cx).read_with(cx, |view, _cx| { + assert_eq!(view.recent_history_entries.len(), 1); + assert_eq!( + view.recent_history_entries[0].session_id, + session.session_id + ); + }); + } + #[gpui::test] async fn test_resume_without_history_adds_notice(cx: &mut TestAppContext) { init_test(cx); @@ -2880,11 +2900,11 @@ pub(crate) mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx))); + let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); let thread_view = cx.update(|window, cx| { cx.new(|cx| { - AcpServerView::new( + ConnectionView::new( Rc::new(StubAgentServer::new(ResumeOnlyAgentConnection)), Some(session), None, @@ -2934,11 +2954,11 @@ pub(crate) mod tests { session.cwd = Some(PathBuf::from("/project/subdir")); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx))); + let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); let _thread_view = cx.update(|window, cx| { cx.new(|cx| { - AcpServerView::new( + ConnectionView::new( Rc::new(StubAgentServer::new(connection)), Some(session), None, @@ -2986,11 +3006,11 @@ pub(crate) mod tests { session.cwd = Some(PathBuf::from("/some/other/path")); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx))); + let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); let _thread_view = cx.update(|window, cx| { cx.new(|cx| { - AcpServerView::new( + ConnectionView::new( Rc::new(StubAgentServer::new(connection)), Some(session), None, @@ -3038,11 +3058,11 @@ pub(crate) mod tests { session.cwd = Some(PathBuf::from("/project/../outside")); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx))); + let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); let _thread_view = cx.update(|window, cx| { cx.new(|cx| { - AcpServerView::new( + ConnectionView::new( Rc::new(StubAgentServer::new(connection)), Some(session), None, @@ -3092,6 +3112,38 @@ pub(crate) mod tests { }); } + #[gpui::test] + async fn test_connect_failure_transitions_to_load_error(cx: &mut TestAppContext) { + init_test(cx); + + let (thread_view, cx) = setup_thread_view(FailingAgentServer, cx).await; + + thread_view.read_with(cx, |view, cx| { + let title = view.title(cx); + assert_eq!( + title.as_ref(), + "Error Loading Codex CLI", + "Tab title should show the agent name with an error prefix" + ); + match &view.server_state { + ServerState::LoadError(LoadError::Other(msg)) => { + assert!( + msg.contains("Invalid gzip header"), + "Error callout should contain the underlying extraction error, got: {msg}" + ); + } + other => panic!( + "Expected LoadError::Other, got: {}", + match other { + ServerState::Loading(_) => "Loading (stuck!)", + ServerState::LoadError(_) => "LoadError (wrong variant)", + ServerState::Connected(_) => "Connected", + } + ), + } + }); + } + #[gpui::test] async fn test_auth_required_on_initial_connect(cx: &mut TestAppContext) { init_test(cx); @@ -3126,7 +3178,7 @@ pub(crate) mod tests { ); }); - // Authenticate using the real authenticate flow on AcpServerView. + // Authenticate using the real authenticate flow on ConnectionView. // This calls connection.authenticate(), which flips the internal flag, // then on success triggers reset() -> new_session() which now succeeds. thread_view.update_in(cx, |view, window, cx| { @@ -3313,12 +3365,12 @@ pub(crate) mod tests { // Set up thread view in workspace 1 let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx))); + let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); let agent = StubAgentServer::default_response(); let thread_view = cx.update(|window, cx| { cx.new(|cx| { - AcpServerView::new( + ConnectionView::new( Rc::new(agent), None, None, @@ -3482,7 +3534,19 @@ pub(crate) mod tests { async fn setup_thread_view( agent: impl AgentServer + 'static, cx: &mut TestAppContext, - ) -> (Entity, &mut VisualTestContext) { + ) -> (Entity, &mut VisualTestContext) { + let (thread_view, _history, cx) = setup_thread_view_with_history(agent, cx).await; + (thread_view, cx) + } + + async fn setup_thread_view_with_history( + agent: impl AgentServer + 'static, + cx: &mut TestAppContext, + ) -> ( + Entity, + Entity, + &mut VisualTestContext, + ) { let fs = FakeFs::new(cx.executor()); let project = Project::test(fs, [], cx).await; let (multi_workspace, cx) = @@ -3490,11 +3554,11 @@ pub(crate) mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx))); + let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); let thread_view = cx.update(|window, cx| { cx.new(|cx| { - AcpServerView::new( + ConnectionView::new( Rc::new(agent), None, None, @@ -3502,17 +3566,17 @@ pub(crate) mod tests { project, Some(thread_store), None, - history, + history.clone(), window, cx, ) }) }); cx.run_until_parked(); - (thread_view, cx) + (thread_view, history, cx) } - fn add_to_workspace(thread_view: Entity, cx: &mut VisualTestContext) { + fn add_to_workspace(thread_view: Entity, cx: &mut VisualTestContext) { let workspace = thread_view.read_with(cx, |thread_view, _cx| thread_view.workspace.clone()); workspace @@ -3528,7 +3592,7 @@ pub(crate) mod tests { .unwrap(); } - struct ThreadViewItem(Entity); + struct ThreadViewItem(Entity); impl Item for ThreadViewItem { type Event = (); @@ -3590,11 +3654,39 @@ pub(crate) mod tests { fn connect( &self, - _root_dir: Option<&Path>, _delegate: AgentServerDelegate, _cx: &mut App, - ) -> Task, Option)>> { - Task::ready(Ok((Rc::new(self.connection.clone()), None))) + ) -> Task>> { + Task::ready(Ok(Rc::new(self.connection.clone()))) + } + + fn into_any(self: Rc) -> Rc { + self + } + } + + struct FailingAgentServer; + + impl AgentServer for FailingAgentServer { + fn logo(&self) -> ui::IconName { + ui::IconName::AiOpenAi + } + + fn name(&self) -> SharedString { + "Codex CLI".into() + } + + fn connect( + &self, + _delegate: AgentServerDelegate, + _cx: &mut App, + ) -> Task>> { + Task::ready(Err(anyhow!( + "extracting downloaded asset for \ + https://github.com/zed-industries/codex-acp/releases/download/v0.9.4/\ + codex-acp-0.9.4-aarch64-pc-windows-msvc.zip: \ + failed to iterate over archive: Invalid gzip header" + ))) } fn into_any(self: Rc) -> Rc { @@ -3621,6 +3713,102 @@ pub(crate) mod tests { ) -> Task> { Task::ready(Ok(AgentSessionListResponse::new(self.sessions.clone()))) } + + fn into_any(self: Rc) -> Rc { + self + } + } + + #[derive(Clone)] + struct SessionHistoryConnection { + sessions: Vec, + } + + impl SessionHistoryConnection { + fn new(sessions: Vec) -> Self { + Self { sessions } + } + } + + fn build_test_thread( + connection: Rc, + project: Entity, + name: &'static str, + session_id: SessionId, + cx: &mut App, + ) -> Entity { + let action_log = cx.new(|_| ActionLog::new(project.clone())); + cx.new(|cx| { + AcpThread::new( + None, + name, + connection, + project, + action_log, + session_id, + watch::Receiver::constant( + acp::PromptCapabilities::new() + .image(true) + .audio(true) + .embedded_context(true), + ), + cx, + ) + }) + } + + impl AgentConnection for SessionHistoryConnection { + fn telemetry_id(&self) -> SharedString { + "history-connection".into() + } + + fn new_session( + self: Rc, + project: Entity, + _cwd: &Path, + cx: &mut App, + ) -> Task>> { + let thread = build_test_thread( + self, + project, + "SessionHistoryConnection", + SessionId::new("history-session"), + cx, + ); + Task::ready(Ok(thread)) + } + + fn supports_load_session(&self) -> bool { + true + } + + fn session_list(&self, _cx: &mut App) -> Option> { + Some(Rc::new(StubSessionList::new(self.sessions.clone()))) + } + + fn auth_methods(&self) -> &[acp::AuthMethod] { + &[] + } + + fn authenticate( + &self, + _method_id: acp::AuthMethodId, + _cx: &mut App, + ) -> Task> { + Task::ready(Ok(())) + } + + fn prompt( + &self, + _id: Option, + _params: acp::PromptRequest, + _cx: &mut App, + ) -> Task> { + Task::ready(Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))) + } + + fn cancel(&self, _session_id: &acp::SessionId, _cx: &mut App) {} + fn into_any(self: Rc) -> Rc { self } @@ -3640,24 +3828,13 @@ pub(crate) mod tests { _cwd: &Path, cx: &mut gpui::App, ) -> Task>> { - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let thread = cx.new(|cx| { - AcpThread::new( - None, - "ResumeOnlyAgentConnection", - self.clone(), - project, - action_log, - SessionId::new("new-session"), - watch::Receiver::constant( - acp::PromptCapabilities::new() - .image(true) - .audio(true) - .embedded_context(true), - ), - cx, - ) - }); + let thread = build_test_thread( + self, + project, + "ResumeOnlyAgentConnection", + SessionId::new("new-session"), + cx, + ); Task::ready(Ok(thread)) } @@ -3672,24 +3849,13 @@ pub(crate) mod tests { _cwd: &Path, cx: &mut App, ) -> Task>> { - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let thread = cx.new(|cx| { - AcpThread::new( - None, - "ResumeOnlyAgentConnection", - self.clone(), - project, - action_log, - session.session_id, - watch::Receiver::constant( - acp::PromptCapabilities::new() - .image(true) - .audio(true) - .embedded_context(true), - ), - cx, - ) - }); + let thread = build_test_thread( + self, + project, + "ResumeOnlyAgentConnection", + session.session_id, + cx, + ); Task::ready(Ok(thread)) } @@ -4063,9 +4229,9 @@ pub(crate) mod tests { } fn active_thread( - thread_view: &Entity, + thread_view: &Entity, cx: &TestAppContext, - ) -> Entity { + ) -> Entity { cx.read(|cx| { thread_view .read(cx) @@ -4076,7 +4242,7 @@ pub(crate) mod tests { } fn message_editor( - thread_view: &Entity, + thread_view: &Entity, cx: &TestAppContext, ) -> Entity { let thread = active_thread(thread_view, cx); @@ -4102,12 +4268,12 @@ pub(crate) mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| AcpThreadHistory::new(None, window, cx))); + let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); let connection = Rc::new(StubAgentConnection::new()); let thread_view = cx.update(|window, cx| { cx.new(|cx| { - AcpServerView::new( + ConnectionView::new( Rc::new(StubAgentServer::new(connection.as_ref().clone())), None, None, @@ -4634,7 +4800,7 @@ pub(crate) mod tests { } struct GeneratingThreadSetup { - thread_view: Entity, + thread_view: Entity, thread: Entity, message_editor: Entity, } @@ -5597,182 +5763,6 @@ pub(crate) mod tests { }); } - #[gpui::test] - async fn test_granularity_selection_updates_state(cx: &mut TestAppContext) { - init_test(cx); - - let tool_call_id = acp::ToolCallId::new("granularity-test-1"); - let tool_call = - acp::ToolCall::new(tool_call_id.clone(), "Run `cargo build`").kind(acp::ToolKind::Edit); - - let permission_options = - ToolPermissionContext::new(TerminalTool::NAME, vec!["cargo build".to_string()]) - .build_permission_options(); - - let connection = - StubAgentConnection::new().with_permission_requests(HashMap::from_iter([( - tool_call_id.clone(), - permission_options.clone(), - )])); - - connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(tool_call)]); - - let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await; - add_to_workspace(thread_view.clone(), cx); - - cx.update(|_window, cx| { - AgentSettings::override_global( - AgentSettings { - notify_when_agent_waiting: NotifyWhenAgentWaiting::Never, - ..AgentSettings::get_global(cx).clone() - }, - cx, - ); - }); - - let message_editor = message_editor(&thread_view, cx); - message_editor.update_in(cx, |editor, window, cx| { - editor.set_text("Build the project", window, cx); - }); - - active_thread(&thread_view, cx).update_in(cx, |view, window, cx| view.send(window, cx)); - - cx.run_until_parked(); - - // Verify default granularity is the last option (index 2 = "Only this time") - thread_view.read_with(cx, |thread_view, cx| { - let state = thread_view.active_thread().unwrap(); - let selected = state - .read(cx) - .selected_permission_granularity - .get(&tool_call_id); - assert!( - selected.is_none(), - "Should have no selection initially (defaults to last)" - ); - }); - - // Select the first option (index 0 = "Always for terminal") - thread_view.update_in(cx, |_, window, cx| { - window.dispatch_action( - crate::SelectPermissionGranularity { - tool_call_id: "granularity-test-1".to_string(), - index: 0, - } - .boxed_clone(), - cx, - ); - }); - - cx.run_until_parked(); - - // Verify the selection was updated - thread_view.read_with(cx, |thread_view, cx| { - let state = thread_view.active_thread().unwrap(); - let selected = state - .read(cx) - .selected_permission_granularity - .get(&tool_call_id); - assert_eq!(selected, Some(&0), "Should have selected index 0"); - }); - } - - #[gpui::test] - async fn test_allow_button_uses_selected_granularity(cx: &mut TestAppContext) { - init_test(cx); - - let tool_call_id = acp::ToolCallId::new("allow-granularity-test-1"); - let tool_call = - acp::ToolCall::new(tool_call_id.clone(), "Run `npm install`").kind(acp::ToolKind::Edit); - - let permission_options = - ToolPermissionContext::new(TerminalTool::NAME, vec!["npm install".to_string()]) - .build_permission_options(); - - // Verify we have the expected options - let PermissionOptions::Dropdown(choices) = &permission_options else { - panic!("Expected dropdown permission options"); - }; - - assert_eq!(choices.len(), 3); - assert!( - choices[0] - .allow - .option_id - .0 - .contains("always_allow:terminal") - ); - assert!( - choices[1] - .allow - .option_id - .0 - .contains("always_allow_pattern:terminal") - ); - assert_eq!(choices[2].allow.option_id.0.as_ref(), "allow"); - - let connection = - StubAgentConnection::new().with_permission_requests(HashMap::from_iter([( - tool_call_id.clone(), - permission_options.clone(), - )])); - - connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(tool_call)]); - - let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await; - add_to_workspace(thread_view.clone(), cx); - - cx.update(|_window, cx| { - AgentSettings::override_global( - AgentSettings { - notify_when_agent_waiting: NotifyWhenAgentWaiting::Never, - ..AgentSettings::get_global(cx).clone() - }, - cx, - ); - }); - - let message_editor = message_editor(&thread_view, cx); - message_editor.update_in(cx, |editor, window, cx| { - editor.set_text("Install dependencies", window, cx); - }); - - active_thread(&thread_view, cx).update_in(cx, |view, window, cx| view.send(window, cx)); - - cx.run_until_parked(); - - // Select the pattern option (index 1 = "Always for `npm` commands") - thread_view.update_in(cx, |_, window, cx| { - window.dispatch_action( - crate::SelectPermissionGranularity { - tool_call_id: "allow-granularity-test-1".to_string(), - index: 1, - } - .boxed_clone(), - cx, - ); - }); - - cx.run_until_parked(); - - // Simulate clicking the Allow button by dispatching AllowOnce action - // which should use the selected granularity - active_thread(&thread_view, cx).update_in(cx, |view, window, cx| { - view.allow_once(&AllowOnce, window, cx) - }); - - cx.run_until_parked(); - - // Verify tool call was authorized - thread_view.read_with(cx, |thread_view, cx| { - let tool_call = thread_view.pending_tool_call(cx); - assert!( - tool_call.is_none(), - "Tool call should be authorized after Allow with pattern granularity" - ); - }); - } - #[gpui::test] async fn test_deny_button_uses_selected_granularity(cx: &mut TestAppContext) { init_test(cx); @@ -6251,4 +6241,86 @@ pub(crate) mod tests { assert_eq!(tool_call_id, acp::ToolCallId::new("tc-b")); }); } + + #[gpui::test] + async fn test_move_queued_message_to_empty_main_editor(cx: &mut TestAppContext) { + init_test(cx); + + let (connection_view, cx) = + setup_thread_view(StubAgentServer::default_response(), cx).await; + + // Add a plain-text message to the queue directly. + active_thread(&connection_view, cx).update_in(cx, |thread, window, cx| { + thread.add_to_queue( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "queued message".to_string(), + ))], + vec![], + cx, + ); + // Main editor must be empty for this path — it is by default, but + // assert to make the precondition explicit. + assert!(thread.message_editor.read(cx).is_empty(cx)); + thread.move_queued_message_to_main_editor(0, None, window, cx); + }); + + cx.run_until_parked(); + + // Queue should now be empty. + let queue_len = active_thread(&connection_view, cx) + .read_with(cx, |thread, _cx| thread.local_queued_messages.len()); + assert_eq!(queue_len, 0, "Queue should be empty after move"); + + // Main editor should contain the queued message text. + let text = message_editor(&connection_view, cx).update(cx, |editor, cx| editor.text(cx)); + assert_eq!( + text, "queued message", + "Main editor should contain the moved queued message" + ); + } + + #[gpui::test] + async fn test_move_queued_message_to_non_empty_main_editor(cx: &mut TestAppContext) { + init_test(cx); + + let (connection_view, cx) = + setup_thread_view(StubAgentServer::default_response(), cx).await; + + // Seed the main editor with existing content. + message_editor(&connection_view, cx).update_in(cx, |editor, window, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "existing content".to_string(), + ))], + window, + cx, + ); + }); + + // Add a plain-text message to the queue. + active_thread(&connection_view, cx).update_in(cx, |thread, window, cx| { + thread.add_to_queue( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "queued message".to_string(), + ))], + vec![], + cx, + ); + thread.move_queued_message_to_main_editor(0, None, window, cx); + }); + + cx.run_until_parked(); + + // Queue should now be empty. + let queue_len = active_thread(&connection_view, cx) + .read_with(cx, |thread, _cx| thread.local_queued_messages.len()); + assert_eq!(queue_len, 0, "Queue should be empty after move"); + + // Main editor should contain existing content + separator + queued content. + let text = message_editor(&connection_view, cx).update(cx, |editor, cx| editor.text(cx)); + assert_eq!( + text, "existing content\n\nqueued message", + "Main editor should have existing content and queued message separated by two newlines" + ); + } } diff --git a/crates/agent_ui/src/acp/thread_view/active_thread.rs b/crates/agent_ui/src/connection_view/thread_view.rs similarity index 87% rename from crates/agent_ui/src/acp/thread_view/active_thread.rs rename to crates/agent_ui/src/connection_view/thread_view.rs index aa1a11ee2f65100d5bfa3c06801a98be16419af9..64a0f61345b1a48dcfec5229d5e699fed8fee2bd 100644 --- a/crates/agent_ui/src/acp/thread_view/active_thread.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -1,8 +1,13 @@ +use acp_thread::ContentBlock; use cloud_api_types::{SubmitAgentThreadFeedbackBody, SubmitAgentThreadFeedbackCommentsBody}; +use editor::actions::OpenExcerpts; + +use crate::StartThreadIn; use gpui::{Corner, List}; -use language_model::LanguageModelEffortLevel; +use language_model::{LanguageModelEffortLevel, Speed}; use settings::update_settings_file; use ui::{ButtonLike, SplitButton, SplitButtonStyle, Tab}; +use workspace::SERIALIZATION_THROTTLE_TIME; use super::*; @@ -43,6 +48,7 @@ impl ThreadFeedbackState { } } let session_id = thread.read(cx).session_id().clone(); + let parent_session_id = thread.read(cx).parent_session_id().cloned(); let agent_telemetry_id = thread.read(cx).connection().telemetry_id(); let task = telemetry.thread_data(&session_id, cx); let rating = match feedback { @@ -58,6 +64,7 @@ impl ThreadFeedbackState { organization_id: organization.map(|organization| organization.id.clone()), agent: agent_telemetry_id.to_string(), session_id: session_id.to_string(), + parent_session_id: parent_session_id.map(|id| id.to_string()), rating: rating.to_string(), thread, }) @@ -186,14 +193,20 @@ impl DiffStats { } } -pub struct AcpThreadView { +pub enum AcpThreadViewEvent { + FirstSendRequested { content: Vec }, +} + +impl EventEmitter for ThreadView {} + +pub struct ThreadView { pub id: acp::SessionId, pub parent_id: Option, - pub login: Option, // is some <=> Active | Unauthenticated pub thread: Entity, pub(crate) conversation: Entity, - pub server_view: WeakEntity, + pub server_view: WeakEntity, pub agent_icon: IconName, + pub agent_icon_from_external_svg: Option, pub agent_name: SharedString, pub focus_handle: FocusHandle, pub workspace: WeakEntity, @@ -201,7 +214,7 @@ pub struct AcpThreadView { pub title_editor: Entity, pub config_options_view: Option>, pub mode_selector: Option>, - pub model_selector: Option>, + pub model_selector: Option>, pub profile_selector: Option>, pub permission_dropdown_handle: PopoverMenuHandle, pub thread_retry_status: Option, @@ -234,12 +247,10 @@ pub struct AcpThreadView { pub is_loading_contents: bool, pub new_server_version_available: Option, pub resumed_without_history: bool, - /// Tracks the selected granularity index for each tool call's permission dropdown. - /// The index corresponds to the position in the allow_options list. - /// Default is the last option (index pointing to "Only this time"). - pub selected_permission_granularity: HashMap, pub resume_thread_metadata: Option, pub _cancel_task: Option>, + _save_task: Option>, + _draft_resolve_task: Option>, pub skip_queue_processing_count: usize, pub user_interrupted_generation: bool, pub can_fast_track_queue: bool, @@ -253,10 +264,10 @@ pub struct AcpThreadView { pub recent_history_entries: Vec, pub hovered_recent_history_item: Option, pub show_codex_windows_warning: bool, - pub history: Entity, + pub history: Entity, pub _history_subscription: Subscription, } -impl Focusable for AcpThreadView { +impl Focusable for ThreadView { fn focus_handle(&self, cx: &App) -> FocusHandle { if self.parent_id.is_some() { self.focus_handle.clone() @@ -276,21 +287,21 @@ pub struct TurnFields { pub turn_tokens: Option, } -impl AcpThreadView { +impl ThreadView { pub(crate) fn new( parent_id: Option, thread: Entity, conversation: Entity, - login: Option, - server_view: WeakEntity, + server_view: WeakEntity, agent_icon: IconName, + agent_icon_from_external_svg: Option, agent_name: SharedString, agent_display_name: SharedString, workspace: WeakEntity, entry_view_state: Entity, config_options_view: Option>, mode_selector: Option>, - model_selector: Option>, + model_selector: Option>, profile_selector: Option>, list_state: ListState, prompt_capabilities: Rc>, @@ -299,7 +310,7 @@ impl AcpThreadView { resume_thread_metadata: Option, project: WeakEntity, thread_store: Option>, - history: Entity, + history: Entity, prompt_store: Option>, initial_content: Option, mut subscriptions: Vec, @@ -347,6 +358,8 @@ impl AcpThreadView { editor.set_message(blocks, window, cx); } } + } else if let Some(draft) = thread.read(cx).draft_prompt() { + editor.set_message(draft.to_vec(), window, cx); } editor }); @@ -379,6 +392,30 @@ impl AcpThreadView { Self::handle_message_editor_event, )); + subscriptions.push(cx.observe(&message_editor, |this, editor, cx| { + let is_empty = editor.read(cx).text(cx).is_empty(); + let draft_contents_task = if is_empty { + None + } else { + Some(editor.update(cx, |editor, cx| editor.draft_contents(cx))) + }; + this._draft_resolve_task = Some(cx.spawn(async move |this, cx| { + let draft = if let Some(task) = draft_contents_task { + let blocks = task.await.ok().filter(|b| !b.is_empty()); + blocks + } else { + None + }; + this.update(cx, |this, cx| { + this.thread.update(cx, |thread, _cx| { + thread.set_draft_prompt(draft); + }); + this.schedule_save(cx); + }) + .ok(); + })); + })); + let recent_history_entries = history.read(cx).get_recent_sessions(3); let mut this = Self { @@ -387,9 +424,9 @@ impl AcpThreadView { focus_handle: cx.focus_handle(), thread, conversation, - login, server_view, agent_icon, + agent_icon_from_external_svg, agent_name, workspace, entry_view_state, @@ -429,8 +466,9 @@ impl AcpThreadView { discarded_partial_edits: HashSet::default(), is_loading_contents: false, new_server_version_available: None, - selected_permission_granularity: HashMap::default(), _cancel_task: None, + _save_task: None, + _draft_resolve_task: None, skip_queue_processing_count: 0, user_interrupted_generation: false, can_fast_track_queue: false, @@ -446,12 +484,50 @@ impl AcpThreadView { _history_subscription: history_subscription, show_codex_windows_warning, }; + let list_state_for_scroll = this.list_state.clone(); + let thread_view = cx.entity().downgrade(); + this.list_state + .set_scroll_handler(move |_event, _window, cx| { + let list_state = list_state_for_scroll.clone(); + let thread_view = thread_view.clone(); + // N.B. We must defer because the scroll handler is called while the + // ListState's RefCell is mutably borrowed. Reading logical_scroll_top() + // directly would panic from a double borrow. + cx.defer(move |cx| { + let scroll_top = list_state.logical_scroll_top(); + let _ = thread_view.update(cx, |this, cx| { + if let Some(thread) = this.as_native_thread(cx) { + thread.update(cx, |thread, _cx| { + thread.set_ui_scroll_position(Some(scroll_top)); + }); + } + this.schedule_save(cx); + }); + }); + }); + if should_auto_submit { this.send(window, cx); } this } + /// Schedule a throttled save of the thread state (draft prompt, scroll position, etc.). + /// Multiple calls within `SERIALIZATION_THROTTLE_TIME` are coalesced into a single save. + fn schedule_save(&mut self, cx: &mut Context) { + self._save_task = Some(cx.spawn(async move |this, cx| { + cx.background_executor() + .timer(SERIALIZATION_THROTTLE_TIME) + .await; + this.update(cx, |this, cx| { + if let Some(thread) = this.as_native_thread(cx) { + thread.update(cx, |_thread, cx| cx.notify()); + } + }) + .ok(); + })); + } + pub fn handle_message_editor_event( &mut self, _editor: &Entity, @@ -467,6 +543,7 @@ impl AcpThreadView { self.cancel_editing(&Default::default(), window, cx); } MessageEditorEvent::LostFocus => {} + MessageEditorEvent::InputAttempted(_) => {} } } @@ -484,6 +561,24 @@ impl AcpThreadView { .thread(acp_thread.session_id(), cx) } + /// Resolves the message editor's contents into content blocks. For profiles + /// that do not enable any tools, directory mentions are expanded to inline + /// file contents since the agent can't read files on its own. + fn resolve_message_contents( + &self, + message_editor: &Entity, + cx: &mut App, + ) -> Task, Vec>)>> { + let expand = self.as_native_thread(cx).is_some_and(|thread| { + let thread = thread.read(cx); + AgentSettings::get_global(cx) + .profiles + .get(thread.profile()) + .is_some_and(|profile| profile.tools.is_empty()) + }); + message_editor.update(cx, |message_editor, cx| message_editor.contents(expand, cx)) + } + pub fn current_model_id(&self, cx: &App) -> Option { let selector = self.model_selector.as_ref()?; let model = selector.read(cx).active_model(cx)?; @@ -581,9 +676,71 @@ impl AcpThreadView { ViewEvent::MessageEditorEvent(_editor, MessageEditorEvent::Cancel) => { self.cancel_editing(&Default::default(), window, cx); } + ViewEvent::MessageEditorEvent(_editor, MessageEditorEvent::InputAttempted(_)) => {} + ViewEvent::OpenDiffLocation { + path, + position, + split, + } => { + self.open_diff_location(path, *position, *split, window, cx); + } } } + fn open_diff_location( + &self, + path: &str, + position: Point, + split: bool, + window: &mut Window, + cx: &mut Context, + ) { + let Some(project) = self.project.upgrade() else { + return; + }; + let Some(project_path) = project.read(cx).find_project_path(path, cx) else { + return; + }; + + let open_task = if split { + self.workspace + .update(cx, |workspace, cx| { + workspace.split_path(project_path, window, cx) + }) + .log_err() + } else { + self.workspace + .update(cx, |workspace, cx| { + workspace.open_path(project_path, None, true, window, cx) + }) + .log_err() + }; + + let Some(open_task) = open_task else { + return; + }; + + window + .spawn(cx, async move |cx| { + let item = open_task.await?; + let Some(editor) = item.downcast::() else { + return anyhow::Ok(()); + }; + editor.update_in(cx, |editor, window, cx| { + editor.change_selections( + SelectionEffects::scroll(Autoscroll::center()), + window, + cx, + |selections| { + selections.select_ranges([position..position]); + }, + ); + })?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + // turns pub fn start_turn(&mut self, cx: &mut Context) -> usize { @@ -635,6 +792,46 @@ impl AcpThreadView { } let message_editor = self.message_editor.clone(); + + // Intercept the first send so the agent panel can capture the full + // content blocks — needed for "Start thread in New Worktree", + // which must create a workspace before sending the message there. + let intercept_first_send = self.thread.read(cx).entries().is_empty() + && !message_editor.read(cx).is_empty(cx) + && self + .workspace + .upgrade() + .and_then(|workspace| workspace.read(cx).panel::(cx)) + .is_some_and(|panel| { + panel.read(cx).start_thread_in() == &StartThreadIn::NewWorktree + }); + + if intercept_first_send { + let content_task = self.resolve_message_contents(&message_editor, cx); + + cx.spawn(async move |this, cx| match content_task.await { + Ok((content, _tracked_buffers)) => { + if content.is_empty() { + return; + } + + this.update(cx, |_, cx| { + cx.emit(AcpThreadViewEvent::FirstSendRequested { content }); + }) + .ok(); + } + Err(error) => { + this.update(cx, |this, cx| { + this.handle_thread_error(error, cx); + }) + .ok(); + } + }) + .detach(); + + return; + } + let is_editor_empty = message_editor.read(cx).is_empty(cx); let is_generating = thread.read(cx).status() != ThreadStatus::Idle; @@ -658,7 +855,7 @@ impl AcpThreadView { let text = text.trim(); if text == "/login" || text == "/logout" { let connection = thread.read(cx).connection().clone(); - let can_login = !connection.auth_methods().is_empty() || self.login.is_some(); + let can_login = !connection.auth_methods().is_empty(); // Does the agent have a specific logout command? Prefer that in case they need to reset internal state. let logout_supported = text == "/logout" && self @@ -674,7 +871,7 @@ impl AcpThreadView { let agent_name = self.agent_name.clone(); let server_view = self.server_view.clone(); move |window, cx| { - AcpServerView::handle_auth_required( + ConnectionView::handle_auth_required( server_view.clone(), AuthRequired::new(), agent_name, @@ -698,18 +895,7 @@ impl AcpThreadView { window: &mut Window, cx: &mut Context, ) { - let full_mention_content = self.as_native_thread(cx).is_some_and(|thread| { - // Include full contents when using minimal profile - let thread = thread.read(cx); - AgentSettings::get_global(cx) - .profiles - .get(thread.profile()) - .is_some_and(|profile| profile.tools.is_empty()) - }); - - let contents = message_editor.update(cx, |message_editor, cx| { - message_editor.contents(full_mention_content, cx) - }); + let contents = self.resolve_message_contents(&message_editor, cx); self.thread_error.take(); self.thread_feedback.clear(); @@ -749,7 +935,9 @@ impl AcpThreadView { cx: &mut Context, ) { let session_id = self.thread.read(cx).session_id().clone(); + let parent_session_id = self.thread.read(cx).parent_session_id().cloned(); let agent_telemetry_id = self.thread.read(cx).connection().telemetry_id(); + let is_first_message = self.thread.read(cx).entries().is_empty(); let thread = self.thread.downgrade(); self.is_loading_contents = true; @@ -790,6 +978,25 @@ impl AcpThreadView { .ok(); } }); + if is_first_message { + let text: String = contents + .iter() + .filter_map(|block| match block { + acp::ContentBlock::Text(text_content) => Some(text_content.text.as_str()), + _ => None, + }) + .collect::>() + .join(" "); + let text = text.lines().next().unwrap_or("").trim(); + if !text.is_empty() { + let title: SharedString = util::truncate_and_trailoff(text, 20).into(); + thread + .update(cx, |thread, cx| thread.set_title(title, cx))? + .await + .log_err(); + } + } + let turn_start_time = Instant::now(); let send = thread.update(cx, |thread, cx| { thread.action_log().update(cx, |action_log, cx| { @@ -803,6 +1010,7 @@ impl AcpThreadView { "Agent Message Sent", agent = agent_telemetry_id, session = session_id, + parent_session_id = parent_session_id.as_ref().map(|id| id.to_string()), model = model_id, mode = mode_id ); @@ -822,6 +1030,7 @@ impl AcpThreadView { "Agent Turn Completed", agent = agent_telemetry_id, session = session_id, + parent_session_id = parent_session_id.as_ref().map(|id| id.to_string()), model = model_id, mode = mode_id, status, @@ -833,7 +1042,7 @@ impl AcpThreadView { cx.spawn(async move |this, cx| { if let Err(err) = task.await { this.update(cx, |this, cx| { - this.handle_any_thread_error(err, cx); + this.handle_thread_error(err, cx); }) .ok(); } else { @@ -891,12 +1100,12 @@ impl AcpThreadView { .detach(); } - pub(crate) fn handle_any_thread_error(&mut self, error: anyhow::Error, cx: &mut Context) { - let error = ThreadError::from_err(error, &self.agent_name); - self.handle_thread_error(error, cx); - } - - pub(crate) fn handle_thread_error(&mut self, error: ThreadError, cx: &mut Context) { + pub(crate) fn handle_thread_error( + &mut self, + error: impl Into, + cx: &mut Context, + ) { + let error = error.into(); self.emit_thread_error_telemetry(&error, cx); self.thread_error = Some(error); cx.notify(); @@ -930,11 +1139,17 @@ impl AcpThreadView { let agent_telemetry_id = self.thread.read(cx).connection().telemetry_id(); let session_id = self.thread.read(cx).session_id().clone(); + let parent_session_id = self + .thread + .read(cx) + .parent_session_id() + .map(|id| id.to_string()); telemetry::event!( "Agent Panel Error Shown", agent = agent_telemetry_id, session_id = session_id, + parent_session_id = parent_session_id, kind = error_kind, acp_error_code = acp_error_code, message = message, @@ -964,7 +1179,7 @@ impl AcpThreadView { this.update(cx, |this, cx| { if let Err(err) = result { - this.handle_any_thread_error(err, cx); + this.handle_thread_error(err, cx); } }) }) @@ -1035,21 +1250,11 @@ impl AcpThreadView { let is_idle = self.thread.read(cx).status() == acp_thread::ThreadStatus::Idle; if is_idle { - self.send_impl(message_editor.clone(), window, cx); + self.send_impl(message_editor, window, cx); return; } - let full_mention_content = self.as_native_thread(cx).is_some_and(|thread| { - let thread = thread.read(cx); - AgentSettings::get_global(cx) - .profiles - .get(thread.profile()) - .is_some_and(|profile| profile.tools.is_empty()) - }); - - let contents = message_editor.update(cx, |message_editor, cx| { - message_editor.contents(full_mention_content, cx) - }); + let contents = self.resolve_message_contents(&message_editor, cx); cx.spawn_in(window, async move |this, cx| { let (content, tracked_buffers) = contents.await?; @@ -1151,6 +1356,44 @@ impl AcpThreadView { self.send_content(contents_task, window, cx); } + pub fn move_queued_message_to_main_editor( + &mut self, + index: usize, + inserted_text: Option<&str>, + window: &mut Window, + cx: &mut Context, + ) -> bool { + let Some(queued_message) = self.remove_from_queue(index, cx) else { + return false; + }; + let queued_content = queued_message.content; + let message_editor = self.message_editor.clone(); + let inserted_text = inserted_text.map(ToOwned::to_owned); + + window.focus(&message_editor.focus_handle(cx), cx); + + if message_editor.read(cx).is_empty(cx) { + message_editor.update(cx, |editor, cx| { + editor.set_message(queued_content, window, cx); + if let Some(inserted_text) = inserted_text.as_deref() { + editor.insert_text(inserted_text, window, cx); + } + }); + cx.notify(); + return true; + } + + message_editor.update(cx, |editor, cx| { + editor.append_message(queued_content, Some("\n\n"), window, cx); + if let Some(inserted_text) = inserted_text.as_deref() { + editor.insert_text(inserted_text, window, cx); + } + }); + + cx.notify(); + true + } + // editor methods pub fn expand_message_editor( @@ -1325,19 +1568,6 @@ impl AcpThreadView { ); } - pub fn handle_select_permission_granularity( - &mut self, - action: &SelectPermissionGranularity, - _window: &mut Window, - cx: &mut Context, - ) { - let tool_call_id = acp::ToolCallId::new(action.tool_call_id.clone()); - self.selected_permission_granularity - .insert(tool_call_id, action.index); - - cx.notify(); - } - fn authorize_pending_with_granularity( &mut self, is_allow: bool, @@ -1357,9 +1587,9 @@ impl AcpThreadView { // Get selected index, defaulting to last option ("Only this time") let selected_index = self - .selected_permission_granularity - .get(&tool_call_id) - .copied() + .conversation + .read(cx) + .selected_permission_granularity(&session_id, &tool_call_id) .unwrap_or_else(|| choices.len().saturating_sub(1)); let selected_choice = choices.get(selected_index).or(choices.last())?; @@ -1507,7 +1737,7 @@ impl AcpThreadView { pub fn sync_thread( &mut self, project: Entity, - server_view: Entity, + server_view: Entity, window: &mut Window, cx: &mut Context, ) { @@ -1539,7 +1769,7 @@ impl AcpThreadView { thread_store .update(&mut cx.clone(), |store, cx| { - store.save_thread(session_id.clone(), db_thread, cx) + store.save_thread(session_id.clone(), db_thread, Default::default(), cx) }) .await?; @@ -1757,23 +1987,26 @@ impl AcpThreadView { .when(!plan.is_empty() && !changed_buffers.is_empty(), |this| { this.child(Divider::horizontal().color(DividerColor::Border)) }) - .when(!changed_buffers.is_empty(), |this| { - this.child(self.render_edits_summary( - &changed_buffers, - edits_expanded, - pending_edits, - cx, - )) - .when(edits_expanded, |parent| { - parent.child(self.render_edited_files( - action_log, - telemetry.clone(), + .when( + !changed_buffers.is_empty() && thread.parent_session_id().is_none(), + |this| { + this.child(self.render_edits_summary( &changed_buffers, + edits_expanded, pending_edits, cx, )) - }) - }) + .when(edits_expanded, |parent| { + parent.child(self.render_edited_files( + action_log, + telemetry.clone(), + &changed_buffers, + pending_edits, + cx, + )) + }) + }, + ) .when(!queue_is_empty, |this| { this.when(!plan.is_empty() || !changed_buffers.is_empty(), |this| { this.child(Divider::horizontal().color(DividerColor::Border)) @@ -2529,6 +2762,7 @@ impl AcpThreadView { .gap_0p5() .child(self.render_add_context_button(cx)) .child(self.render_follow_toggle(cx)) + .children(self.render_fast_mode_control(cx)) .children(self.render_thinking_control(cx)), ) .child( @@ -2606,50 +2840,24 @@ impl AcpThreadView { .child(if editor_focused { h_flex() .gap_1() - .min_w_40() - .child( - IconButton::new(("cancel_edit", index), IconName::Close) - .icon_size(IconSize::Small) - .icon_color(Color::Error) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |_window, cx| { - Tooltip::for_action_in( - "Cancel Edit", - &editor::actions::Cancel, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let main_editor = self.message_editor.clone(); - cx.listener(move |_, _, window, cx| { - window.focus(&main_editor.focus_handle(cx), cx); - }) - }), - ) + .min_w(rems_from_px(150.)) + .justify_end() .child( - IconButton::new(("save_edit", index), IconName::Check) + IconButton::new(("edit", index), IconName::Pencil) .icon_size(IconSize::Small) - .icon_color(Color::Success) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |_window, cx| { - Tooltip::for_action_in( - "Save Edit", - &Chat, - &focus_handle, - cx, - ) - } + .tooltip(|_window, cx| { + Tooltip::with_meta( + "Edit Queued Message", + None, + "Type anything to edit", + cx, + ) }) - .on_click({ - let main_editor = self.message_editor.clone(); - cx.listener(move |_, _, window, cx| { - window.focus(&main_editor.focus_handle(cx), cx); - }) - }), + .on_click(cx.listener(move |this, _, window, cx| { + this.move_queued_message_to_main_editor( + index, None, window, cx, + ); + })), ) .child( Button::new(("send_now_focused", index), "Send Now") @@ -2671,62 +2879,64 @@ impl AcpThreadView { ) } else { h_flex() - .gap_1() .when(!is_next, |this| this.visible_on_hover("queue_entry")) + .gap_1() + .min_w(rems_from_px(150.)) + .justify_end() .child( - IconButton::new(("edit", index), IconName::Pencil) + IconButton::new(("delete", index), IconName::Trash) .icon_size(IconSize::Small) .tooltip({ let focus_handle = focus_handle.clone(); move |_window, cx| { if is_next { Tooltip::for_action_in( - "Edit", - &EditFirstQueuedMessage, + "Remove Message from Queue", + &RemoveFirstQueuedMessage, &focus_handle, cx, ) } else { - Tooltip::simple("Edit", cx) + Tooltip::simple( + "Remove Message from Queue", + cx, + ) } } }) - .on_click({ - let editor = editor.clone(); - cx.listener(move |_, _, window, cx| { - window.focus(&editor.focus_handle(cx), cx); - }) - }), + .on_click(cx.listener(move |this, _, _, cx| { + this.remove_from_queue(index, cx); + cx.notify(); + })), ) .child( - IconButton::new(("delete", index), IconName::Trash) + IconButton::new(("edit", index), IconName::Pencil) .icon_size(IconSize::Small) .tooltip({ let focus_handle = focus_handle.clone(); move |_window, cx| { if is_next { Tooltip::for_action_in( - "Remove Message from Queue", - &RemoveFirstQueuedMessage, + "Edit", + &EditFirstQueuedMessage, &focus_handle, cx, ) } else { - Tooltip::simple( - "Remove Message from Queue", - cx, - ) + Tooltip::simple("Edit", cx) } } }) - .on_click(cx.listener(move |this, _, _, cx| { - this.remove_from_queue(index, cx); - cx.notify(); + .on_click(cx.listener(move |this, _, window, cx| { + this.move_queued_message_to_main_editor( + index, None, window, cx, + ); })), ) .child( Button::new(("send_now", index), "Send Now") .label_size(LabelSize::Small) + .when(is_next, |this| this.style(ButtonStyle::Outlined)) .when(is_next && message_editor.is_empty(cx), |this| { let action: Box = if can_fast_track { @@ -2735,7 +2945,7 @@ impl AcpThreadView { Box::new(SendNextQueuedMessage) }; - this.style(ButtonStyle::Outlined).key_binding( + this.key_binding( KeyBinding::for_action_in( action.as_ref(), &focus_handle.clone(), @@ -2744,9 +2954,6 @@ impl AcpThreadView { .map(|kb| kb.size(keybinding_size)), ) }) - .when(is_next && !message_editor.is_empty(cx), |this| { - this.style(ButtonStyle::Outlined) - }) .on_click(cx.listener(move |this, _, window, cx| { this.send_queued_message_at_index( index, true, window, cx, @@ -2953,26 +3160,69 @@ impl AcpThreadView { } } - fn render_thinking_control(&self, cx: &mut Context) -> Option { - let thread = self.as_native_thread(cx)?.read(cx); - let model = thread.model()?; + fn fast_mode_available(&self, cx: &Context) -> bool { + if !cx.is_staff() { + return false; + } + self.as_native_thread(cx) + .and_then(|thread| thread.read(cx).model()) + .map(|model| model.supports_fast_mode()) + .unwrap_or(false) + } - let supports_thinking = model.supports_thinking(); - if !supports_thinking { + fn render_fast_mode_control(&self, cx: &mut Context) -> Option { + if !self.fast_mode_available(cx) { return None; } - let thinking = thread.thinking_enabled(); + let thread = self.as_native_thread(cx)?.read(cx); - let (tooltip_label, icon, color) = if thinking { - ( - "Disable Thinking Mode", - IconName::ThinkingMode, - Color::Muted, - ) + let (tooltip_label, color, icon) = if matches!(thread.speed(), Some(Speed::Fast)) { + ("Disable Fast Mode", Color::Muted, IconName::FastForward) } else { ( - "Enable Thinking Mode", + "Enable Fast Mode", + Color::Custom(cx.theme().colors().icon_disabled.opacity(0.8)), + IconName::FastForwardOff, + ) + }; + + let focus_handle = self.message_editor.focus_handle(cx); + + Some( + IconButton::new("fast-mode", icon) + .icon_size(IconSize::Small) + .icon_color(color) + .tooltip(move |_, cx| { + Tooltip::for_action_in(tooltip_label, &ToggleFastMode, &focus_handle, cx) + }) + .on_click(cx.listener(move |this, _, _window, cx| { + this.toggle_fast_mode(cx); + })) + .into_any_element(), + ) + } + + fn render_thinking_control(&self, cx: &mut Context) -> Option { + let thread = self.as_native_thread(cx)?.read(cx); + let model = thread.model()?; + + let supports_thinking = model.supports_thinking(); + if !supports_thinking { + return None; + } + + let thinking = thread.thinking_enabled(); + + let (tooltip_label, icon, color) = if thinking { + ( + "Disable Thinking Mode", + IconName::ThinkingMode, + Color::Muted, + ) + } else { + ( + "Enable Thinking Mode", IconName::ThinkingModeOff, Color::Custom(cx.theme().colors().icon_disabled.opacity(0.8)), ) @@ -3181,7 +3431,12 @@ impl AcpThreadView { .on_click(cx.listener(|this, _event, _, cx| this.cancel_generation(cx))) .into_any_element() } else { - IconButton::new("send-message", IconName::Send) + let send_icon = if is_generating { + IconName::QueueMessage + } else { + IconName::Send + }; + IconButton::new("send-message", send_icon) .style(ButtonStyle::Filled) .map(|this| { if is_editor_empty && !is_generating { @@ -3432,7 +3687,7 @@ impl AcpThreadView { } } -impl AcpThreadView { +impl ThreadView { pub(crate) fn render_entries(&mut self, cx: &mut Context) -> List { list( self.list_state.clone(), @@ -3670,6 +3925,7 @@ impl AcpThreadView { AgentThreadEntry::AssistantMessage(AssistantMessage { chunks, indented: _, + is_subagent_output: _, }) => { let mut is_blank = true; let is_last = entry_ix + 1 == total_entries; @@ -3736,12 +3992,49 @@ impl AcpThreadView { entry_ix, tool_call, &self.focus_handle(cx), + false, window, cx, ) .into_any(), }; + let is_subagent_output = self.is_subagent() + && matches!(entry, AgentThreadEntry::AssistantMessage(msg) if msg.is_subagent_output); + + let primary = if is_subagent_output { + v_flex() + .w_full() + .child( + h_flex() + .id("subagent_output") + .px_5() + .py_1() + .gap_2() + .child(Divider::horizontal()) + .child( + h_flex() + .gap_1() + .child( + Icon::new(IconName::ForwardArrowUp) + .color(Color::Muted) + .size(IconSize::Small), + ) + .child( + Label::new("Subagent Output") + .size(LabelSize::Custom(self.tool_name_font_size())) + .color(Color::Muted), + ), + ) + .child(Divider::horizontal()) + .tooltip(Tooltip::text("Everything below this line was sent as output from this subagent to the main agent.")), + ) + .child(primary) + .into_any_element() + } else { + primary + }; + let primary = if is_indented { let line_top = if is_first_indented { rems_from_px(-12.0) @@ -3781,7 +4074,7 @@ impl AcpThreadView { let thread = self.thread.clone(); let comments_editor = self.thread_feedback.comments_editor.clone(); - let primary = if entry_ix == total_entries - 1 { + let primary = if entry_ix + 1 == total_entries { v_flex() .w_full() .child(primary) @@ -4169,6 +4462,8 @@ impl AcpThreadView { }) .flatten(); + let is_blocked_on_terminal_command = + !confirmation && self.is_blocked_on_terminal_command(cx); let is_waiting = confirmation || self.thread.read(cx).has_in_progress_tool_calls(); let turn_tokens_label = elapsed_label @@ -4206,6 +4501,8 @@ impl AcpThreadView { .color(Color::Muted), ), ) + } else if is_blocked_on_terminal_command { + this } else { this.child(SpinnerLabel::new().size(LabelSize::Small)) } @@ -4494,51 +4791,74 @@ impl AcpThreadView { if text.is_empty() { None } else { Some(text) } } + fn is_blocked_on_terminal_command(&self, cx: &App) -> bool { + let thread = self.thread.read(cx); + if !matches!(thread.status(), ThreadStatus::Generating) { + return false; + } + + let mut has_running_terminal_call = false; + + for entry in thread.entries().iter().rev() { + match entry { + AgentThreadEntry::UserMessage(_) => break, + AgentThreadEntry::ToolCall(tool_call) + if matches!( + tool_call.status, + ToolCallStatus::InProgress | ToolCallStatus::Pending + ) => + { + if matches!(tool_call.kind, acp::ToolKind::Execute) { + has_running_terminal_call = true; + } else { + return false; + } + } + AgentThreadEntry::ToolCall(_) | AgentThreadEntry::AssistantMessage(_) => {} + } + } + + has_running_terminal_call + } + fn render_collapsible_command( &self, + group: SharedString, is_preview: bool, command_source: &str, - tool_call_id: &acp::ToolCallId, cx: &Context, ) -> Div { - let command_group = - SharedString::from(format!("collapsible-command-group-{}", tool_call_id)); - v_flex() - .group(command_group.clone()) + .p_1p5() .bg(self.tool_card_header_bg(cx)) - .child( - v_flex() - .p_1p5() - .when(is_preview, |this| { - this.pt_1().child( - // Wrapping this label on a container with 24px height to avoid - // layout shift when it changes from being a preview label - // to the actual path where the command will run in - h_flex().h_6().child( - Label::new("Run Command") - .buffer_font(cx) - .size(LabelSize::XSmall) - .color(Color::Muted), - ), - ) - }) - .children(command_source.lines().map(|line| { - let text: SharedString = if line.is_empty() { - " ".into() - } else { - line.to_string().into() - }; - - Label::new(text).buffer_font(cx).size(LabelSize::Small) - })) - .child( - div().absolute().top_1().right_1().child( - CopyButton::new("copy-command", command_source.to_string()) - .tooltip_label("Copy Command") - .visible_on_hover(command_group), - ), + .when(is_preview, |this| { + this.pt_1().child( + // Wrapping this label on a container with 24px height to avoid + // layout shift when it changes from being a preview label + // to the actual path where the command will run in + h_flex().h_6().child( + Label::new("Run Command") + .buffer_font(cx) + .size(LabelSize::XSmall) + .color(Color::Muted), ), + ) + }) + .children(command_source.lines().map(|line| { + let text: SharedString = if line.is_empty() { + " ".into() + } else { + line.to_string().into() + }; + + Label::new(text).buffer_font(cx).size(LabelSize::Small) + })) + .child( + div().absolute().top_1().right_1().child( + CopyButton::new("copy-command", command_source.to_string()) + .tooltip_label("Copy Command") + .visible_on_hover(group), + ), ) } @@ -4549,6 +4869,7 @@ impl AcpThreadView { terminal: &Entity, tool_call: &ToolCall, focus_handle: &FocusHandle, + is_subagent: bool, window: &Window, cx: &Context, ) -> AnyElement { @@ -4569,7 +4890,11 @@ impl AcpThreadView { let needs_confirmation = confirmation_options.is_some(); let output = terminal_data.output(); - let command_finished = output.is_some(); + let command_finished = output.is_some() + && !matches!( + tool_call.status, + ToolCallStatus::InProgress | ToolCallStatus::Pending + ); let truncated_output = output.is_some_and(|output| output.original_content_len > output.content.len()); let output_line_count = output.map(|output| output.content_line_count).unwrap_or(0); @@ -4611,14 +4936,15 @@ impl AcpThreadView { .unwrap_or(&command_source); let command_element = - self.render_collapsible_command(false, command_content, &tool_call.id, cx); + self.render_collapsible_command(header_group.clone(), false, command_content, cx); let is_expanded = self.expanded_tool_calls.contains(&tool_call.id); let header = h_flex() .id(header_id) - .px_1p5() .pt_1() + .pl_1p5() + .pr_1() .flex_none() .gap_1() .justify_between() @@ -4636,19 +4962,54 @@ impl AcpThreadView { .color(Color::Muted), ), ) + .child( + Disclosure::new( + SharedString::from(format!( + "terminal-tool-disclosure-{}", + terminal.entity_id() + )), + is_expanded, + ) + .opened_icon(IconName::ChevronUp) + .closed_icon(IconName::ChevronDown) + .visible_on_hover(&header_group) + .on_click(cx.listener({ + let id = tool_call.id.clone(); + move |this, _event, _window, cx| { + if is_expanded { + this.expanded_tool_calls.remove(&id); + } else { + this.expanded_tool_calls.insert(id.clone()); + } + cx.notify(); + } + })), + ) + .when(time_elapsed > Duration::from_secs(10), |header| { + header.child( + Label::new(format!("({})", duration_alt_display(time_elapsed))) + .buffer_font(cx) + .color(Color::Muted) + .size(LabelSize::XSmall), + ) + }) .when(!command_finished && !needs_confirmation, |header| { header .gap_1p5() .child( - Button::new( + Icon::new(IconName::ArrowCircle) + .size(IconSize::XSmall) + .color(Color::Muted) + .with_rotate_animation(2) + ) + .child(div().h(relative(0.6)).ml_1p5().child(Divider::vertical().color(DividerColor::Border))) + .child( + IconButton::new( SharedString::from(format!("stop-terminal-{}", terminal.entity_id())), - "Stop", + IconName::Stop ) - .icon(IconName::Stop) - .icon_position(IconPosition::Start) .icon_size(IconSize::Small) .icon_color(Color::Error) - .label_size(LabelSize::Small) .tooltip(move |_window, cx| { Tooltip::with_meta( "Stop This Command", @@ -4669,13 +5030,6 @@ impl AcpThreadView { }) }), ) - .child(Divider::vertical()) - .child( - Icon::new(IconName::ArrowCircle) - .size(IconSize::XSmall) - .color(Color::Info) - .with_rotate_animation(2) - ) }) .when(truncated_output, |header| { let tooltip = if let Some(output) = output { @@ -4711,14 +5065,6 @@ impl AcpThreadView { .tooltip(Tooltip::text(tooltip)), ) }) - .when(time_elapsed > Duration::from_secs(10), |header| { - header.child( - Label::new(format!("({})", duration_alt_display(time_elapsed))) - .buffer_font(cx) - .color(Color::Muted) - .size(LabelSize::XSmall), - ) - }) .when(tool_failed || command_failed, |header| { header.child( div() @@ -4736,29 +5082,7 @@ impl AcpThreadView { }), ) }) - .child( - Disclosure::new( - SharedString::from(format!( - "terminal-tool-disclosure-{}", - terminal.entity_id() - )), - is_expanded, - ) - .opened_icon(IconName::ChevronUp) - .closed_icon(IconName::ChevronDown) - .visible_on_hover(&header_group) - .on_click(cx.listener({ - let id = tool_call.id.clone(); - move |this, _event, _window, cx| { - if is_expanded { - this.expanded_tool_calls.remove(&id); - } else { - this.expanded_tool_calls.insert(id.clone()); - } - cx.notify(); - } - })), - ); +; let terminal_view = self .entry_view_state @@ -4767,12 +5091,14 @@ impl AcpThreadView { .and_then(|entry| entry.terminal(terminal)); v_flex() - .my_1p5() - .mx_5() - .border_1() - .when(tool_failed || command_failed, |card| card.border_dashed()) - .border_color(border_color) - .rounded_md() + .when(!is_subagent, |this| { + this.my_1p5() + .mx_5() + .border_1() + .when(tool_failed || command_failed, |card| card.border_dashed()) + .border_color(border_color) + .rounded_md() + }) .overflow_hidden() .child( v_flex() @@ -4849,6 +5175,7 @@ impl AcpThreadView { entry_ix: usize, tool_call: &ToolCall, focus_handle: &FocusHandle, + is_subagent: bool, window: &Window, cx: &Context, ) -> Div { @@ -4856,15 +5183,20 @@ impl AcpThreadView { div().w_full().map(|this| { if tool_call.is_subagent() { - this.child(self.render_subagent_tool_call( - active_session_id, - entry_ix, - tool_call, - tool_call.subagent_session_id.clone(), - focus_handle, - window, - cx, - )) + this.child( + self.render_subagent_tool_call( + active_session_id, + entry_ix, + tool_call, + tool_call + .subagent_session_info + .as_ref() + .map(|i| i.session_id.clone()), + focus_handle, + window, + cx, + ), + ) } else if has_terminals { this.children(tool_call.terminals().map(|terminal| { self.render_terminal_tool_call( @@ -4873,6 +5205,7 @@ impl AcpThreadView { terminal, tool_call, focus_handle, + is_subagent, window, cx, ) @@ -4883,6 +5216,7 @@ impl AcpThreadView { entry_ix, tool_call, focus_handle, + is_subagent, window, cx, )) @@ -4896,6 +5230,7 @@ impl AcpThreadView { entry_ix: usize, tool_call: &ToolCall, focus_handle: &FocusHandle, + is_subagent: bool, window: &Window, cx: &Context, ) -> Div { @@ -4917,14 +5252,20 @@ impl AcpThreadView { matches!(tool_call.kind, acp::ToolKind::Edit) || tool_call.diffs().next().is_some(); let is_cancelled_edit = is_edit && matches!(tool_call.status, ToolCallStatus::Canceled); - let has_revealed_diff = tool_call.diffs().next().is_some_and(|diff| { - self.entry_view_state - .read(cx) - .entry(entry_ix) - .and_then(|entry| entry.editor_for_diff(diff)) - .is_some() - && diff.read(cx).has_revealed_range(cx) - }); + let (has_revealed_diff, tool_call_output_focus) = tool_call + .diffs() + .next() + .and_then(|diff| { + let editor = self + .entry_view_state + .read(cx) + .entry(entry_ix) + .and_then(|entry| entry.editor_for_diff(diff))?; + let has_revealed_diff = diff.read(cx).has_revealed_range(cx); + let has_focus = editor.read(cx).is_focused(window); + Some((has_revealed_diff, has_focus)) + }) + .unwrap_or((false, false)); let use_card_layout = needs_confirmation || is_edit || is_terminal_tool; @@ -5110,7 +5451,9 @@ impl AcpThreadView { v_flex() .map(|this| { - if use_card_layout { + if is_subagent { + this + } else if use_card_layout { this.my_1p5() .rounded_md() .border_1() @@ -5122,18 +5465,25 @@ impl AcpThreadView { this.my_1() } }) - .map(|this| { - if has_location && !use_card_layout { - this.ml_4() - } else { - this.ml_5() - } + .when(!is_subagent, |this| { + this.map(|this| { + if has_location && !use_card_layout { + this.ml_4() + } else { + this.ml_5() + } + }) + .mr_5() }) - .mr_5() .map(|this| { if is_terminal_tool { let label_source = tool_call.label.read(cx).source(); - this.child(self.render_collapsible_command(true, label_source, &tool_call.id, cx)) + this.child(self.render_collapsible_command( + card_header_id.clone(), + true, + label_source, + cx, + )) } else { this.child( h_flex() @@ -5157,97 +5507,148 @@ impl AcpThreadView { window, cx, )) - .when(is_collapsible || failed_or_canceled, |this| { - let diff_for_discard = - if has_revealed_diff && is_cancelled_edit && cx.has_flag::() { - tool_call.diffs().next().cloned() - } else { - None - }; - this.child( - h_flex() - .px_1() - .when_some(diff_for_discard.clone(), |this, _| this.pr_0p5()) - .gap_1() - .when(is_collapsible, |this| { - this.child( - Disclosure::new(("expand-output", entry_ix), is_open) - .opened_icon(IconName::ChevronUp) - .closed_icon(IconName::ChevronDown) - .visible_on_hover(&card_header_id) - .on_click(cx.listener({ - let id = tool_call.id.clone(); - move |this: &mut Self, _, _, cx: &mut Context| { + .child( + h_flex() + .gap_0p5() + .when(is_collapsible || failed_or_canceled, |this| { + let diff_for_discard = if has_revealed_diff + && is_cancelled_edit + && cx.has_flag::() + { + tool_call.diffs().next().cloned() + } else { + None + }; + + this.child( + h_flex() + .px_1() + .when_some(diff_for_discard.clone(), |this, _| { + this.pr_0p5() + }) + .gap_1() + .when(is_collapsible, |this| { + this.child( + Disclosure::new( + ("expand-output", entry_ix), + is_open, + ) + .opened_icon(IconName::ChevronUp) + .closed_icon(IconName::ChevronDown) + .visible_on_hover(&card_header_id) + .on_click(cx.listener({ + let id = tool_call.id.clone(); + move |this: &mut Self, + _, + _, + cx: &mut Context| { if is_open { - this - .expanded_tool_calls.remove(&id); + this.expanded_tool_calls + .remove(&id); } else { - this.expanded_tool_calls.insert(id.clone()); + this.expanded_tool_calls + .insert(id.clone()); } - cx.notify(); + cx.notify(); + } + })), + ) + }) + .when(failed_or_canceled, |this| { + if is_cancelled_edit && !has_revealed_diff { + this.child( + div() + .id(entry_ix) + .tooltip(Tooltip::text( + "Interrupted Edit", + )) + .child( + Icon::new(IconName::XCircle) + .color(Color::Muted) + .size(IconSize::Small), + ), + ) + } else if is_cancelled_edit { + this + } else { + this.child( + Icon::new(IconName::Close) + .color(Color::Error) + .size(IconSize::Small), + ) } - })), + }) + .when_some(diff_for_discard, |this, diff| { + let tool_call_id = tool_call.id.clone(); + let is_discarded = self + .discarded_partial_edits + .contains(&tool_call_id); + + this.when(!is_discarded, |this| { + this.child( + IconButton::new( + ("discard-partial-edit", entry_ix), + IconName::Undo, + ) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::with_meta( + "Discard Interrupted Edit", + None, + "You can discard this interrupted partial edit and restore the original file content.", + cx, + ) + }) + .on_click(cx.listener({ + let tool_call_id = + tool_call_id.clone(); + move |this, _, _window, cx| { + let diff_data = diff.read(cx); + let base_text = diff_data + .base_text() + .clone(); + let buffer = + diff_data.buffer().clone(); + buffer.update( + cx, + |buffer, cx| { + buffer.set_text( + base_text.as_ref(), + cx, + ); + }, + ); + this.discarded_partial_edits + .insert( + tool_call_id.clone(), + ); + cx.notify(); + } + })), + ) + }) + }), ) - }) - .when(failed_or_canceled, |this| { - if is_cancelled_edit && !has_revealed_diff { - this.child( - div() - .id(entry_ix) - .tooltip(Tooltip::text( - "Interrupted Edit", - )) - .child( - Icon::new(IconName::XCircle) - .color(Color::Muted) - .size(IconSize::Small), - ), - ) - } else if is_cancelled_edit { - this - } else { - this.child( - Icon::new(IconName::Close) - .color(Color::Error) - .size(IconSize::Small), + }) + .when(tool_call_output_focus, |this| { + this.child( + Button::new("open-file-button", "Open File") + .label_size(LabelSize::Small) + .style(ButtonStyle::OutlinedGhost) + .key_binding( + KeyBinding::for_action(&OpenExcerpts, cx) + .map(|s| s.size(rems_from_px(12.))), ) - } - }) - .when_some(diff_for_discard, |this, diff| { - let tool_call_id = tool_call.id.clone(); - let is_discarded = self.discarded_partial_edits.contains(&tool_call_id); - this.when(!is_discarded, |this| { - this.child( - IconButton::new( - ("discard-partial-edit", entry_ix), - IconName::Undo, + .on_click(|_, window, cx| { + window.dispatch_action( + Box::new(OpenExcerpts), + cx, ) - .icon_size(IconSize::Small) - .tooltip(move |_, cx| Tooltip::with_meta( - "Discard Interrupted Edit", - None, - "You can discard this interrupted partial edit and restore the original file content.", - cx - )) - .on_click(cx.listener({ - let tool_call_id = tool_call_id.clone(); - move |this, _, _window, cx| { - let diff_data = diff.read(cx); - let base_text = diff_data.base_text().clone(); - let buffer = diff_data.buffer().clone(); - buffer.update(cx, |buffer, cx| { - buffer.set_text(base_text.as_ref(), cx); - }); - this.discarded_partial_edits.insert(tool_call_id.clone()); - cx.notify(); - } - })), - ) - }) - }) + }), + ) + }), + ) - ) - }), ) } }) @@ -5298,9 +5699,9 @@ impl AcpThreadView { ) -> Div { // Get the selected granularity index, defaulting to the last option ("Only this time") let selected_index = self - .selected_permission_granularity - .get(&tool_call_id) - .copied() + .conversation + .read(cx) + .selected_permission_granularity(&session_id, &tool_call_id) .unwrap_or_else(|| choices.len().saturating_sub(1)); let selected_choice = choices.get(selected_index).or(choices.last()); @@ -5388,6 +5789,7 @@ impl AcpThreadView { ) }) .on_click(cx.listener({ + let session_id = session_id.clone(); let tool_call_id = tool_call_id.clone(); let option_id = deny_option_id; let option_kind = deny_option_kind; @@ -5408,6 +5810,7 @@ impl AcpThreadView { choices, dropdown_label, entry_ix, + session_id, tool_call_id, selected_index, is_first, @@ -5420,6 +5823,7 @@ impl AcpThreadView { choices: &[PermissionOptionChoice], current_label: SharedString, entry_ix: usize, + session_id: acp::SessionId, tool_call_id: acp::ToolCallId, selected_index: usize, is_first: bool, @@ -5433,6 +5837,8 @@ impl AcpThreadView { let permission_dropdown_handle = self.permission_dropdown_handle.clone(); + let conversation = self.conversation.clone(); + PopoverMenu::new(("permission-granularity", entry_ix)) .with_handle(permission_dropdown_handle) .trigger( @@ -5453,6 +5859,8 @@ impl AcpThreadView { }), ) .menu(move |window, cx| { + let session_id = session_id.clone(); + let conversation = conversation.clone(); let tool_call_id = tool_call_id.clone(); let options = menu_options.clone(); @@ -5460,23 +5868,23 @@ impl AcpThreadView { for (index, display_name) in options.iter() { let display_name = display_name.clone(); let index = *index; - let tool_call_id_for_entry = tool_call_id.clone(); + let session_id = session_id.clone(); + let conversation = conversation.clone(); + let tool_call_id = tool_call_id.clone(); let is_selected = index == selected_index; - menu = menu.toggleable_entry( display_name, is_selected, IconPosition::End, None, - move |window, cx| { - window.dispatch_action( - SelectPermissionGranularity { - tool_call_id: tool_call_id_for_entry.0.to_string(), + move |_window, cx| { + conversation.update(cx, |conversation, _cx| { + conversation.set_selected_permission_granularity( + session_id.clone(), + tool_call_id.clone(), index, - } - .boxed_clone(), - cx, - ); + ); + }); }, ); } @@ -5867,6 +6275,7 @@ impl AcpThreadView { terminal, tool_call, focus_handle, + false, window, cx, ), @@ -6133,7 +6542,7 @@ impl AcpThreadView { &self, active_session_id: &acp::SessionId, entry_ix: usize, - thread_view: Option<&Entity>, + thread_view: Option<&Entity>, tool_call: &ToolCall, focus_handle: &FocusHandle, window: &Window, @@ -6150,47 +6559,90 @@ impl AcpThreadView { .map(|log| log.read(cx).changed_buffers(cx)) .unwrap_or_default(); + let is_pending_tool_call = thread + .as_ref() + .and_then(|thread| { + self.conversation + .read(cx) + .pending_tool_call(thread.read(cx).session_id(), cx) + }) + .is_some(); + let is_expanded = self.expanded_tool_calls.contains(&tool_call.id); let files_changed = changed_buffers.len(); let diff_stats = DiffStats::all_files(&changed_buffers, cx); let is_running = matches!( tool_call.status, - ToolCallStatus::Pending | ToolCallStatus::InProgress + ToolCallStatus::Pending + | ToolCallStatus::InProgress + | ToolCallStatus::WaitingForConfirmation { .. } ); - let is_canceled_or_failed = matches!( + + let is_failed = matches!( tool_call.status, - ToolCallStatus::Canceled | ToolCallStatus::Failed | ToolCallStatus::Rejected + ToolCallStatus::Failed | ToolCallStatus::Rejected ); - let has_title = thread - .as_ref() - .is_some_and(|t| !t.read(cx).title().is_empty()); - let has_no_title_or_canceled = !has_title || is_canceled_or_failed; + let is_cancelled = matches!(tool_call.status, ToolCallStatus::Canceled) + || tool_call.content.iter().any(|c| match c { + ToolCallContent::ContentBlock(ContentBlock::Markdown { markdown }) => { + markdown.read(cx).source() == "User canceled" + } + _ => false, + }); - let title = thread + let thread_title = thread .as_ref() .map(|t| t.read(cx).title()) - .unwrap_or_else(|| { - if is_canceled_or_failed { - "Subagent Canceled" - } else { - "Spawning Subagent…" - } - .into() - }); + .filter(|t| !t.is_empty()); + let tool_call_label = tool_call.label.read(cx).source().to_string(); + let has_tool_call_label = !tool_call_label.is_empty(); + + let has_title = thread_title.is_some() || has_tool_call_label; + let has_no_title_or_canceled = !has_title || is_failed || is_cancelled; + + let title: SharedString = if let Some(thread_title) = thread_title { + thread_title + } else if !tool_call_label.is_empty() { + tool_call_label.into() + } else if is_cancelled { + "Subagent Canceled".into() + } else if is_failed { + "Subagent Failed".into() + } else { + "Spawning Agent…".into() + }; let card_header_id = format!("subagent-header-{}", entry_ix); + let status_icon = format!("status-icon-{}", entry_ix); let diff_stat_id = format!("subagent-diff-{}", entry_ix); let icon = h_flex().w_4().justify_center().child(if is_running { SpinnerLabel::new() .size(LabelSize::Small) .into_any_element() - } else if is_canceled_or_failed { - Icon::new(IconName::Close) - .size(IconSize::Small) - .color(Color::Error) + } else if is_cancelled { + div() + .id(status_icon) + .child( + Icon::new(IconName::Circle) + .size(IconSize::Small) + .color(Color::Custom( + cx.theme().colors().icon_disabled.opacity(0.5), + )), + ) + .tooltip(Tooltip::text("Subagent Cancelled")) + .into_any_element() + } else if is_failed { + div() + .id(status_icon) + .child( + Icon::new(IconName::Close) + .size(IconSize::Small) + .color(Color::Error), + ) + .tooltip(Tooltip::text("Subagent Failed")) .into_any_element() } else { Icon::new(IconName::Check) @@ -6209,6 +6661,8 @@ impl AcpThreadView { "Click to Preview" }; + let error_message = self.subagent_error_message(&tool_call.status, tool_call, cx); + v_flex() .w_full() .rounded_md() @@ -6269,7 +6723,7 @@ impl AcpThreadView { ) }), ) - .when(!has_no_title_or_canceled, |this| { + .when(!has_no_title_or_canceled && !is_pending_tool_call, |this| { this.tooltip(move |_, cx| { Tooltip::with_meta( title.to_string(), @@ -6279,7 +6733,7 @@ impl AcpThreadView { ) }) }) - .when(has_expandable_content, |this| { + .when(has_expandable_content && !is_pending_tool_call, |this| { this.cursor_pointer() .hover(|s| s.bg(cx.theme().colors().element_hover)) .child( @@ -6302,6 +6756,9 @@ impl AcpThreadView { this.expanded_tool_calls .insert(tool_call_id.clone()); } + let expanded = + this.expanded_tool_calls.contains(&tool_call_id); + telemetry::event!("Subagent Toggled", expanded); cx.notify(); } })) @@ -6320,6 +6777,7 @@ impl AcpThreadView { |this, thread| { this.on_click(cx.listener( move |_this, _event, _window, cx| { + telemetry::event!("Subagent Stopped"); thread.update(cx, |thread, cx| { thread.cancel(cx).detach(); }); @@ -6337,68 +6795,69 @@ impl AcpThreadView { .read(cx) .pending_tool_call(thread.read(cx).session_id(), cx); - if let Some((_, subagent_tool_call_id, _)) = pending_tool_call { + let session_id = thread.read(cx).session_id().clone(); + + let fullscreen_toggle = h_flex() + .id(entry_ix) + .py_1() + .w_full() + .justify_center() + .border_t_1() + .when(is_failed, |this| this.border_dashed()) + .border_color(self.tool_card_border_color(cx)) + .cursor_pointer() + .hover(|s| s.bg(cx.theme().colors().element_hover)) + .child( + Icon::new(IconName::Maximize) + .color(Color::Muted) + .size(IconSize::Small), + ) + .tooltip(Tooltip::text("Make Subagent Full Screen")) + .on_click(cx.listener(move |this, _event, window, cx| { + telemetry::event!("Subagent Maximized"); + this.server_view + .update(cx, |this, cx| { + this.navigate_to_session(session_id.clone(), window, cx); + }) + .ok(); + })); + + if is_running && let Some((_, subagent_tool_call_id, _)) = pending_tool_call { if let Some((entry_ix, tool_call)) = thread.read(cx).tool_call(&subagent_tool_call_id) { - this.child(thread_view.read(cx).render_any_tool_call( - active_session_id, - entry_ix, - tool_call, - focus_handle, - window, - cx, - )) + this.child(Divider::horizontal().color(DividerColor::Border)) + .child(thread_view.read(cx).render_any_tool_call( + active_session_id, + entry_ix, + tool_call, + focus_handle, + true, + window, + cx, + )) + .child(fullscreen_toggle) } else { this } } else { - let session_id = thread.read(cx).session_id().clone(); this.when(is_expanded, |this| { this.child(self.render_subagent_expanded_content( - active_session_id, - entry_ix, thread_view, is_running, tool_call, - focus_handle, window, cx, )) - .child( - h_flex() - .p_1() - .w_full() - .border_t_1() - .when(is_canceled_or_failed, |this| this.border_dashed()) - .border_color(cx.theme().colors().border_variant) - .child( - Button::new( - format!("expand-subagent-{}", entry_ix), - "Full Screen", - ) - .full_width() - .style(ButtonStyle::Outlined) - .label_size(LabelSize::Small) - .icon(IconName::Maximize) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) - .on_click(cx.listener( - move |this, _event, window, cx| { - this.server_view - .update(cx, |this, cx| { - this.navigate_to_session( - session_id.clone(), - window, - cx, - ); - }) - .ok(); - }, - )), - ), - ) + .when_some(error_message, |this, message| { + this.child( + Callout::new() + .severity(Severity::Error) + .icon(IconName::XCircle) + .title(message), + ) + }) + .child(fullscreen_toggle) }) } }) @@ -6407,12 +6866,9 @@ impl AcpThreadView { fn render_subagent_expanded_content( &self, - active_session_id: &acp::SessionId, - entry_ix: usize, - thread_view: &Entity, + thread_view: &Entity, is_running: bool, tool_call: &ToolCall, - focus_handle: &FocusHandle, window: &Window, cx: &Context, ) -> impl IntoElement { @@ -6421,99 +6877,106 @@ impl AcpThreadView { let subagent_view = thread_view.read(cx); let session_id = subagent_view.thread.read(cx).session_id().clone(); - let base_container = || { + let is_canceled_or_failed = matches!( + tool_call.status, + ToolCallStatus::Canceled | ToolCallStatus::Failed | ToolCallStatus::Rejected + ); + + let editor_bg = cx.theme().colors().editor_background; + let overlay = { div() - .id(format!("subagent-content-{}", session_id)) - .relative() - .w_full() - .h_56() - .border_t_1() - .border_color(self.tool_card_border_color(cx)) - .overflow_hidden() + .absolute() + .inset_0() + .size_full() + .bg(linear_gradient( + 180., + linear_color_stop(editor_bg.opacity(0.5), 0.), + linear_color_stop(editor_bg.opacity(0.), 0.1), + )) + .block_mouse_except_scroll() }; - let show_thread_entries = is_running || tool_call.content.is_empty(); - - if show_thread_entries { - let scroll_handle = self - .subagent_scroll_handles - .borrow_mut() - .entry(session_id.clone()) - .or_default() - .clone(); - if is_running { - scroll_handle.scroll_to_bottom(); - } + let entries = subagent_view.thread.read(cx).entries(); + let total_entries = entries.len(); + let mut entry_range = if let Some(info) = tool_call.subagent_session_info.as_ref() { + info.message_start_index + ..info + .message_end_index + .map(|i| (i + 1).min(total_entries)) + .unwrap_or(total_entries) + } else { + 0..total_entries + }; + entry_range.start = entry_range + .end + .saturating_sub(MAX_PREVIEW_ENTRIES) + .max(entry_range.start); + let start_ix = entry_range.start; - let entries = subagent_view.thread.read(cx).entries(); - let total_entries = entries.len(); - let start_ix = total_entries.saturating_sub(MAX_PREVIEW_ENTRIES); + let scroll_handle = self + .subagent_scroll_handles + .borrow_mut() + .entry(session_id.clone()) + .or_default() + .clone(); + if is_running { + scroll_handle.scroll_to_bottom(); + } - let rendered_entries: Vec = entries[start_ix..] - .iter() - .enumerate() - .map(|(i, entry)| { - let actual_ix = start_ix + i; - subagent_view.render_entry(actual_ix, total_entries + 1, entry, window, cx) - }) - .collect(); + let rendered_entries: Vec = entries + .get(entry_range) + .unwrap_or_default() + .iter() + .enumerate() + .map(|(i, entry)| { + let actual_ix = start_ix + i; + subagent_view.render_entry(actual_ix, total_entries, entry, window, cx) + }) + .collect(); - base_container() - .child( - div() - .id(format!("subagent-entries-{}", session_id)) - .size_full() - .track_scroll(&scroll_handle) - .pb_1() - .children(rendered_entries), - ) - .when(is_running, |this| { - let editor_bg = cx.theme().colors().editor_background; - this.child( - div() - .absolute() - .inset_0() - .size_full() - .bg(linear_gradient( - 180., - linear_color_stop(editor_bg, 0.), - linear_color_stop(editor_bg.opacity(0.), 0.15), - )) - .block_mouse_except_scroll(), - ) - }) - .into_any_element() + v_flex() + .w_full() + .border_t_1() + .when(is_canceled_or_failed, |this| this.border_dashed()) + .border_color(self.tool_card_border_color(cx)) + .overflow_hidden() + .child( + div() + .pb_1() + .min_h_0() + .id(format!("subagent-entries-{}", session_id)) + .track_scroll(&scroll_handle) + .children(rendered_entries), + ) + .h_56() + .child(overlay) + .into_any_element() + } + + fn subagent_error_message( + &self, + status: &ToolCallStatus, + tool_call: &ToolCall, + cx: &App, + ) -> Option { + if matches!(status, ToolCallStatus::Failed) { + tool_call.content.iter().find_map(|content| { + if let ToolCallContent::ContentBlock(block) = content { + if let acp_thread::ContentBlock::Markdown { markdown } = block { + let source = markdown.read(cx).source().to_string(); + if !source.is_empty() { + if source == "User canceled" { + return None; + } else { + return Some(SharedString::from(source)); + } + } + } + } + None + }) } else { - base_container() - .child( - v_flex() - .id(format!("subagent-done-content-{}", session_id)) - .size_full() - .justify_end() - .children(tool_call.content.iter().enumerate().map( - |(content_ix, content)| { - div().p_2().child(self.render_tool_call_content( - active_session_id, - entry_ix, - content, - content_ix, - tool_call, - true, - false, - matches!( - tool_call.status, - ToolCallStatus::Failed - | ToolCallStatus::Rejected - | ToolCallStatus::Canceled - ), - focus_handle, - window, - cx, - )) - }, - )), - ) - .into_any_element() + None } } @@ -6742,7 +7205,7 @@ impl AcpThreadView { } let connection = this.thread.read(cx).connection().clone(); window.defer(cx, |window, cx| { - AcpServerView::handle_auth_required( + ConnectionView::handle_auth_required( server_view, AuthRequired::new(), agent_name, @@ -6861,7 +7324,7 @@ impl AcpThreadView { fn update_recent_history_from_cache( &mut self, - history: &Entity, + history: &Entity, cx: &mut Context, ) { self.recent_history_entries = history.read(cx).get_recent_sessions(3); @@ -6934,7 +7397,7 @@ impl AcpThreadView { // TODO: Add keyboard navigation. let is_hovered = self.hovered_recent_history_item == Some(index); - crate::acp::thread_history::AcpHistoryEntryElement::new( + crate::thread_history::HistoryEntryElement::new( entry, self.server_view.clone(), ) @@ -7097,6 +7560,24 @@ impl AcpThreadView { }); } + fn toggle_fast_mode(&mut self, cx: &mut Context) { + if !self.fast_mode_available(cx) { + return; + } + let Some(thread) = self.as_native_thread(cx) else { + return; + }; + thread.update(cx, |thread, cx| { + thread.set_speed( + thread + .speed() + .map(|speed| speed.toggle()) + .unwrap_or(Speed::Fast), + cx, + ); + }); + } + fn cycle_thinking_effort(&mut self, cx: &mut Context) { let Some(thread) = self.as_native_thread(cx) else { return; @@ -7156,7 +7637,7 @@ impl AcpThreadView { } } -impl Render for AcpThreadView { +impl Render for ThreadView { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let has_messages = self.list_state.item_count() > 0; @@ -7198,9 +7679,11 @@ impl Render for AcpThreadView { .on_action(cx.listener(Self::allow_once)) .on_action(cx.listener(Self::reject_once)) .on_action(cx.listener(Self::handle_authorize_tool_call)) - .on_action(cx.listener(Self::handle_select_permission_granularity)) .on_action(cx.listener(Self::open_permission_dropdown)) .on_action(cx.listener(Self::open_add_context_menu)) + .on_action(cx.listener(|this, _: &ToggleFastMode, _window, cx| { + this.toggle_fast_mode(cx); + })) .on_action(cx.listener(|this, _: &ToggleThinkingMode, _window, cx| { if let Some(thread) = this.as_native_thread(cx) { thread.update(cx, |thread, cx| { @@ -7220,9 +7703,7 @@ impl Render for AcpThreadView { cx.notify(); })) .on_action(cx.listener(|this, _: &EditFirstQueuedMessage, window, cx| { - if let Some(editor) = this.queued_message_editors.first() { - window.focus(&editor.focus_handle(cx), cx); - } + this.move_queued_message_to_main_editor(0, None, window, cx); })) .on_action(cx.listener(|this, _: &ClearMessageQueue, _, cx| { this.local_queued_messages.clear(); diff --git a/crates/agent_ui/src/acp/entry_view_state.rs b/crates/agent_ui/src/entry_view_state.rs similarity index 81% rename from crates/agent_ui/src/acp/entry_view_state.rs rename to crates/agent_ui/src/entry_view_state.rs index 353e1168c8a685bd1822ebe83e7ea2d52733a728..aef7f1f335eff7d092f924b9883ab0d64bbf65a8 100644 --- a/crates/agent_ui/src/acp/entry_view_state.rs +++ b/crates/agent_ui/src/entry_view_state.rs @@ -1,11 +1,11 @@ use std::{cell::RefCell, ops::Range, rc::Rc}; -use super::thread_history::AcpThreadHistory; +use super::thread_history::ThreadHistory; use acp_thread::{AcpThread, AgentThreadEntry}; use agent::ThreadStore; use agent_client_protocol::{self as acp, ToolCallId}; use collections::HashMap; -use editor::{Editor, EditorMode, MinimapVisibility, SizingBehavior}; +use editor::{Editor, EditorEvent, EditorMode, MinimapVisibility, SizingBehavior}; use gpui::{ AnyEntity, App, AppContext as _, Entity, EntityId, EventEmitter, FocusHandle, Focusable, ScrollHandle, SharedString, TextStyleRefinement, WeakEntity, Window, @@ -13,19 +13,20 @@ use gpui::{ use language::language_settings::SoftWrap; use project::Project; use prompt_store::PromptStore; +use rope::Point; use settings::Settings as _; use terminal_view::TerminalView; use theme::ThemeSettings; use ui::{Context, TextSize}; use workspace::Workspace; -use crate::acp::message_editor::{MessageEditor, MessageEditorEvent}; +use crate::message_editor::{MessageEditor, MessageEditorEvent}; pub struct EntryViewState { workspace: WeakEntity, project: WeakEntity, thread_store: Option>, - history: WeakEntity, + history: WeakEntity, prompt_store: Option>, entries: Vec, prompt_capabilities: Rc>, @@ -38,7 +39,7 @@ impl EntryViewState { workspace: WeakEntity, project: WeakEntity, thread_store: Option>, - history: WeakEntity, + history: WeakEntity, prompt_store: Option>, prompt_capabilities: Rc>, available_commands: Rc>>, @@ -113,7 +114,7 @@ impl EntryViewState { cx.subscribe(&message_editor, move |_, editor, event, cx| { cx.emit(EntryViewEvent { entry_index: index, - view_event: ViewEvent::MessageEditorEvent(editor, *event), + view_event: ViewEvent::MessageEditorEvent(editor, event.clone()), }) }) .detach(); @@ -125,14 +126,19 @@ impl EntryViewState { let terminals = tool_call.terminals().cloned().collect::>(); let diffs = tool_call.diffs().cloned().collect::>(); - let views = if let Some(Entry::Content(views)) = self.entries.get_mut(index) { - views + let views = if let Some(Entry::ToolCall(tool_call)) = self.entries.get_mut(index) { + &mut tool_call.content } else { - self.set_entry(index, Entry::empty()); - let Some(Entry::Content(views)) = self.entries.get_mut(index) else { + self.set_entry( + index, + Entry::ToolCall(ToolCallEntry { + content: HashMap::default(), + }), + ); + let Some(Entry::ToolCall(tool_call)) = self.entries.get_mut(index) else { unreachable!() }; - views + &mut tool_call.content }; let is_tool_call_completed = @@ -168,12 +174,48 @@ impl EntryViewState { for diff in diffs { views.entry(diff.entity_id()).or_insert_with(|| { - let element = create_editor_diff(diff.clone(), window, cx).into_any(); + let editor = create_editor_diff(diff.clone(), window, cx); + cx.subscribe(&editor, { + let diff = diff.clone(); + let entry_index = index; + move |_this, _editor, event: &EditorEvent, cx| { + if let EditorEvent::OpenExcerptsRequested { + selections_by_buffer, + split, + } = event + { + let multibuffer = diff.read(cx).multibuffer(); + if let Some((buffer_id, (ranges, _))) = + selections_by_buffer.iter().next() + { + if let Some(buffer) = + multibuffer.read(cx).buffer(*buffer_id) + { + if let Some(range) = ranges.first() { + let point = + buffer.read(cx).offset_to_point(range.start.0); + if let Some(path) = diff.read(cx).file_path(cx) { + cx.emit(EntryViewEvent { + entry_index, + view_event: ViewEvent::OpenDiffLocation { + path, + position: point, + split: *split, + }, + }); + } + } + } + } + } + } + }) + .detach(); cx.emit(EntryViewEvent { entry_index: index, view_event: ViewEvent::NewDiff(id.clone()), }); - element + editor.into_any() }); } } @@ -213,8 +255,8 @@ impl EntryViewState { for entry in self.entries.iter() { match entry { Entry::UserMessage { .. } | Entry::AssistantMessage { .. } => {} - Entry::Content(response_views) => { - for view in response_views.values() { + Entry::ToolCall(ToolCallEntry { content }) => { + for view in content.values() { if let Ok(diff_editor) = view.clone().downcast::() { diff_editor.update(cx, |diff_editor, cx| { diff_editor.set_text_style_refinement( @@ -242,6 +284,11 @@ pub enum ViewEvent { NewTerminal(ToolCallId), TerminalMovedToBackground(ToolCallId), MessageEditorEvent(Entity, MessageEditorEvent), + OpenDiffLocation { + path: String, + position: Point, + split: bool, + }, } #[derive(Default, Debug)] @@ -263,25 +310,30 @@ impl AssistantMessageEntry { } } +#[derive(Debug)] +pub struct ToolCallEntry { + content: HashMap, +} + #[derive(Debug)] pub enum Entry { UserMessage(Entity), AssistantMessage(AssistantMessageEntry), - Content(HashMap), + ToolCall(ToolCallEntry), } impl Entry { pub fn focus_handle(&self, cx: &App) -> Option { match self { Self::UserMessage(editor) => Some(editor.read(cx).focus_handle(cx)), - Self::AssistantMessage(_) | Self::Content(_) => None, + Self::AssistantMessage(_) | Self::ToolCall(_) => None, } } pub fn message_editor(&self) -> Option<&Entity> { match self { Self::UserMessage(editor) => Some(editor), - Self::AssistantMessage(_) | Self::Content(_) => None, + Self::AssistantMessage(_) | Self::ToolCall(_) => None, } } @@ -308,25 +360,21 @@ impl Entry { ) -> Option { match self { Self::AssistantMessage(message) => message.scroll_handle_for_chunk(chunk_ix), - Self::UserMessage(_) | Self::Content(_) => None, + Self::UserMessage(_) | Self::ToolCall(_) => None, } } fn content_map(&self) -> Option<&HashMap> { match self { - Self::Content(map) => Some(map), + Self::ToolCall(ToolCallEntry { content }) => Some(content), _ => None, } } - fn empty() -> Self { - Self::Content(HashMap::default()) - } - #[cfg(test)] pub fn has_content(&self) -> bool { match self { - Self::Content(map) => !map.is_empty(), + Self::ToolCall(ToolCallEntry { content }) => !content.is_empty(), Self::UserMessage(_) | Self::AssistantMessage(_) => false, } } @@ -379,6 +427,7 @@ fn create_editor_diff( editor.scroll_manager.set_forbid_vertical_scroll(true); editor.set_show_indent_guides(false, cx); editor.set_read_only(true); + editor.set_delegate_open_excerpts(true); editor.set_show_breakpoints(false, cx); editor.set_show_code_actions(false, cx); editor.set_show_git_diff_gutter(false, cx); @@ -412,7 +461,7 @@ mod tests { use fs::FakeFs; use gpui::{AppContext as _, TestAppContext}; - use crate::acp::entry_view_state::EntryViewState; + use crate::entry_view_state::EntryViewState; use multi_buffer::MultiBufferRow; use pretty_assertions::assert_matches; use project::Project; @@ -459,8 +508,8 @@ mod tests { }); let thread_store = None; - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); let view_state = cx.new(|_cx| { EntryViewState::new( diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index f2221675764db228199e8f809d2b6ddf20b46d9e..9ac84addcc80c806739570ad9951209f16c31bb1 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -7,7 +7,7 @@ use std::rc::Rc; use std::sync::Arc; use uuid::Uuid; -use crate::acp::AcpThreadHistory; +use crate::ThreadHistory; use crate::context::load_context; use crate::mention_set::MentionSet; use crate::{ @@ -26,8 +26,8 @@ use editor::RowExt; use editor::SelectionEffects; use editor::scroll::ScrollOffset; use editor::{ - Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, ExcerptRange, - HighlightKey, MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint, + Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, HighlightKey, + MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint, actions::SelectAll, display_map::{ BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, EditorMargins, @@ -487,7 +487,7 @@ impl InlineAssistant { project: WeakEntity, thread_store: Entity, prompt_store: Option>, - history: WeakEntity, + history: WeakEntity, initial_prompt: Option, window: &mut Window, codegen_ranges: &[Range], @@ -626,7 +626,7 @@ impl InlineAssistant { project: WeakEntity, thread_store: Entity, prompt_store: Option>, - history: WeakEntity, + history: WeakEntity, initial_prompt: Option, window: &mut Window, cx: &mut App, @@ -671,7 +671,7 @@ impl InlineAssistant { workspace: Entity, thread_store: Entity, prompt_store: Option>, - history: WeakEntity, + history: WeakEntity, window: &mut Window, cx: &mut App, ) -> InlineAssistId { @@ -1495,11 +1495,11 @@ impl InlineAssistant { let mut new_blocks = Vec::new(); for (new_row, old_row_range) in deleted_row_ranges { - let (_, buffer_start) = old_snapshot - .point_to_buffer_offset(Point::new(*old_row_range.start(), 0)) + let (_, start, _) = old_snapshot + .point_to_buffer_point(Point::new(*old_row_range.start(), 0)) .unwrap(); - let (_, buffer_end) = old_snapshot - .point_to_buffer_offset(Point::new( + let (_, end, _) = old_snapshot + .point_to_buffer_point(Point::new( *old_row_range.end(), old_snapshot.line_len(MultiBufferRow(*old_row_range.end())), )) @@ -1509,10 +1509,11 @@ impl InlineAssistant { let multi_buffer = cx.new(|_| MultiBuffer::without_headers(language::Capability::ReadOnly)); multi_buffer.update(cx, |multi_buffer, cx| { - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_buffer( old_buffer.clone(), - // todo(lw): buffer_start and buffer_end might come from different snapshots! - Some(ExcerptRange::new(buffer_start..buffer_end)), + // todo(lw): start and end might come from different snapshots! + [start..end], + 0, cx, ); }); @@ -2154,7 +2155,7 @@ pub mod test { }); let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let history = cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx)); + let history = cx.new(|cx| crate::ThreadHistory::new(None, window, cx)); // Add editor to workspace workspace.update(cx, |workspace, cx| { diff --git a/crates/agent_ui/src/inline_prompt_editor.rs b/crates/agent_ui/src/inline_prompt_editor.rs index 2066a7ad886614373b200f4e45dd3bb0034f72a2..0450efc4b7ebf466d0b9b13f516249a2cba0ecfa 100644 --- a/crates/agent_ui/src/inline_prompt_editor.rs +++ b/crates/agent_ui/src/inline_prompt_editor.rs @@ -1,4 +1,4 @@ -use crate::acp::AcpThreadHistory; +use crate::ThreadHistory; use agent::ThreadStore; use agent_settings::AgentSettings; use collections::{HashMap, VecDeque}; @@ -64,7 +64,7 @@ pub struct PromptEditor { pub editor: Entity, mode: PromptEditorMode, mention_set: Entity, - history: WeakEntity, + history: WeakEntity, prompt_store: Option>, workspace: WeakEntity, model_selector: Entity, @@ -1225,7 +1225,7 @@ impl PromptEditor { fs: Arc, thread_store: Entity, prompt_store: Option>, - history: WeakEntity, + history: WeakEntity, project: WeakEntity, workspace: WeakEntity, window: &mut Window, @@ -1384,7 +1384,7 @@ impl PromptEditor { fs: Arc, thread_store: Entity, prompt_store: Option>, - history: WeakEntity, + history: WeakEntity, project: WeakEntity, workspace: WeakEntity, window: &mut Window, @@ -1632,6 +1632,7 @@ fn insert_message_creases( crease_for_mention( crease.label.clone(), crease.icon_path.clone(), + None, start..end, cx.weak_entity(), ) diff --git a/crates/agent_ui/src/language_model_selector.rs b/crates/agent_ui/src/language_model_selector.rs index 9205e21be1ab796fae50a26d31aca514756e2bc2..e6e72b3197b4108d7b423470bf8bb4b75cd055b7 100644 --- a/crates/agent_ui/src/language_model_selector.rs +++ b/crates/agent_ui/src/language_model_selector.rs @@ -455,12 +455,7 @@ impl PickerDelegate for LanguageModelPickerDelegate { cx.notify(); } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { match self.filtered_entries.get(ix) { Some(LanguageModelPickerEntry::Model(_)) => true, Some(LanguageModelPickerEntry::Separator(_)) | None => false, diff --git a/crates/agent_ui/src/mention_set.rs b/crates/agent_ui/src/mention_set.rs index b1dab681ade325d0d47fa9f9310cb3e98bf72974..792bfc11a63471e02b22835823fa8c59cdfc9bcf 100644 --- a/crates/agent_ui/src/mention_set.rs +++ b/crates/agent_ui/src/mention_set.rs @@ -233,6 +233,9 @@ impl MentionSet { content_len, mention_uri.name().into(), IconName::Image.path().into(), + mention_uri.tooltip_text(), + Some(mention_uri.clone()), + Some(workspace.downgrade()), Some(image), editor.clone(), window, @@ -245,6 +248,9 @@ impl MentionSet { content_len, crease_text, mention_uri.icon_path(cx), + mention_uri.tooltip_text(), + Some(mention_uri.clone()), + Some(workspace.downgrade()), None, editor.clone(), window, @@ -485,6 +491,7 @@ impl MentionSet { let crease = crease_for_mention( selection_name(abs_path.as_deref(), &line_range).into(), uri.icon_path(cx), + uri.tooltip_text(), range, editor.downgrade(), ); @@ -547,9 +554,9 @@ impl MentionSet { None, None, ); - let connection = server.connect(None, delegate, cx); + let connection = server.connect(delegate, cx); cx.spawn(async move |_, cx| { - let (agent, _) = connection.await?; + let agent = connection.await?; let agent = agent.downcast::().unwrap(); let summary = agent .0 @@ -695,6 +702,9 @@ pub(crate) async fn insert_images_as_context( content_len, MentionUri::PastedImage.name().into(), IconName::Image.path().into(), + None, + None, + None, Some(Task::ready(Ok(image.clone())).shared()), editor.clone(), window, @@ -805,7 +815,9 @@ pub(crate) fn insert_crease_for_mention( content_len: usize, crease_label: SharedString, crease_icon: SharedString, - // abs_path: Option>, + crease_tooltip: Option, + mention_uri: Option, + workspace: Option>, image: Option, String>>>>, editor: Entity, window: &mut Window, @@ -825,6 +837,9 @@ pub(crate) fn insert_crease_for_mention( render: render_mention_fold_button( crease_label.clone(), crease_icon.clone(), + crease_tooltip, + mention_uri.clone(), + workspace.clone(), start..end, rx, image, @@ -858,11 +873,12 @@ pub(crate) fn insert_crease_for_mention( pub(crate) fn crease_for_mention( label: SharedString, icon_path: SharedString, + tooltip: Option, range: Range, editor_entity: WeakEntity, ) -> Crease { let placeholder = FoldPlaceholder { - render: render_fold_icon_button(icon_path.clone(), label.clone(), editor_entity), + render: render_fold_icon_button(icon_path.clone(), label.clone(), tooltip, editor_entity), merge_adjacent: false, ..Default::default() }; @@ -876,6 +892,7 @@ pub(crate) fn crease_for_mention( fn render_fold_icon_button( icon_path: SharedString, label: SharedString, + tooltip: Option, editor: WeakEntity, ) -> Arc, &mut App) -> AnyElement> { Arc::new({ @@ -886,6 +903,9 @@ fn render_fold_icon_button( MentionCrease::new(fold_id, icon_path.clone(), label.clone()) .is_toggled(is_in_text_selection) + .when_some(tooltip.clone(), |this, tooltip_text| { + this.tooltip(tooltip_text) + }) .into_any_element() } }) @@ -1018,6 +1038,9 @@ fn render_directory_contents(entries: Vec<(Arc, String, String)>) -> St fn render_mention_fold_button( label: SharedString, icon: SharedString, + tooltip: Option, + mention_uri: Option, + workspace: Option>, range: Range, mut loading_finished: postage::barrier::Receiver, image_task: Option, String>>>>, @@ -1037,6 +1060,9 @@ fn render_mention_fold_button( id: cx.entity_id(), label, icon, + tooltip, + mention_uri: mention_uri.clone(), + workspace: workspace.clone(), range, editor, loading: Some(loading), @@ -1050,6 +1076,9 @@ struct LoadingContext { id: EntityId, label: SharedString, icon: SharedString, + tooltip: Option, + mention_uri: Option, + workspace: Option>, range: Range, editor: WeakEntity, loading: Option>, @@ -1066,8 +1095,13 @@ impl Render for LoadingContext { let id = ElementId::from(("loading_context", self.id)); MentionCrease::new(id, self.icon.clone(), self.label.clone()) + .mention_uri(self.mention_uri.clone()) + .workspace(self.workspace.clone()) .is_toggled(is_in_text_selection) .is_loading(self.loading.is_some()) + .when_some(self.tooltip.clone(), |this, tooltip_text| { + this.tooltip(tooltip_text) + }) .when_some(self.image.clone(), |this, image_task| { this.image_preview(move |_, cx| { let image = image_task.peek().cloned().transpose().ok().flatten(); diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/message_editor.rs similarity index 84% rename from crates/agent_ui/src/acp/message_editor.rs rename to crates/agent_ui/src/message_editor.rs index 47847aef53cbd597c78cf329467a35ff1ac68978..6ce0b7e356dc75f1c3d4db0f318d1978a37d00cc 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -1,5 +1,5 @@ use crate::SendImmediately; -use crate::acp::AcpThreadHistory; +use crate::ThreadHistory; use crate::{ ChatWithFollow, completion_provider::{ @@ -51,13 +51,14 @@ pub struct MessageEditor { _parse_slash_command_task: Task<()>, } -#[derive(Clone, Copy, Debug)] +#[derive(Clone, Debug)] pub enum MessageEditorEvent { Send, SendImmediately, Cancel, Focus, LostFocus, + InputAttempted(Arc), } impl EventEmitter for MessageEditor {} @@ -107,7 +108,7 @@ impl MessageEditor { workspace: WeakEntity, project: WeakEntity, thread_store: Option>, - history: WeakEntity, + history: WeakEntity, prompt_store: Option>, prompt_capabilities: Rc>, available_commands: Rc>>, @@ -153,6 +154,7 @@ impl MessageEditor { Box::new(editor::actions::Copy), ) .action("Paste", Box::new(editor::actions::Paste)) + .action("Paste as Plain Text", Box::new(PasteRaw)) })) }); @@ -186,6 +188,18 @@ impl MessageEditor { subscriptions.push(cx.subscribe_in(&editor, window, { move |this, editor, event, window, cx| { + let input_attempted_text = match event { + EditorEvent::InputHandled { text, .. } => Some(text), + EditorEvent::InputIgnored { text } => Some(text), + _ => None, + }; + if let Some(text) = input_attempted_text + && editor.read(cx).read_only(cx) + && !text.is_empty() + { + cx.emit(MessageEditorEvent::InputAttempted(text.clone())); + } + if let EditorEvent::Edited { .. } = event && !editor.read(cx).read_only(cx) { @@ -403,7 +417,27 @@ impl MessageEditor { let text = self.editor.read(cx).text(cx); let available_commands = self.available_commands.borrow().clone(); let agent_name = self.agent_name.clone(); + let build_task = self.build_content_blocks(full_mention_content, cx); + + cx.spawn(async move |_, _cx| { + Self::validate_slash_commands(&text, &available_commands, &agent_name)?; + build_task.await + }) + } + + pub fn draft_contents(&self, cx: &mut Context) -> Task>> { + let build_task = self.build_content_blocks(false, cx); + cx.spawn(async move |_, _cx| { + let (blocks, _tracked_buffers) = build_task.await?; + Ok(blocks) + }) + } + fn build_content_blocks( + &self, + full_mention_content: bool, + cx: &mut Context, + ) -> Task, Vec>)>> { let contents = self .mention_set .update(cx, |store, cx| store.contents(full_mention_content, cx)); @@ -411,18 +445,16 @@ impl MessageEditor { let supports_embedded_context = self.prompt_capabilities.borrow().embedded_context; cx.spawn(async move |_, cx| { - Self::validate_slash_commands(&text, &available_commands, &agent_name)?; - let contents = contents.await?; let mut all_tracked_buffers = Vec::new(); let result = editor.update(cx, |editor, cx| { + let text = editor.text(cx); let (mut ix, _) = text .char_indices() .find(|(_, c)| !c.is_whitespace()) .unwrap_or((0, '\0')); let mut chunks: Vec = Vec::new(); - let text = editor.text(cx); editor.display_map.update(cx, |map, cx| { let snapshot = map.snapshot(cx); for (crease_id, crease) in snapshot.crease_snapshot.creases() { @@ -690,6 +722,9 @@ impl MessageEditor { content_len, crease_text.into(), mention_uri.icon_path(cx), + mention_uri.tooltip_text(), + Some(mention_uri.clone()), + Some(self.workspace.clone()), None, self.editor.clone(), window, @@ -747,70 +782,93 @@ impl MessageEditor { _ => None, }) { - let path_style = workspace.read(cx).project().read(cx).path_style(cx); - - // Parse markdown mention links in format: [@name](uri) - let parsed_mentions = parse_mention_links(&clipboard_text, path_style); - - if !parsed_mentions.is_empty() { + if clipboard_text.contains("[@") { cx.stop_propagation(); - - let insertion_offset = self.editor.update(cx, |editor, cx| { + let selections_before = self.editor.update(cx, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); - editor.selections.newest_anchor().start.to_offset(&snapshot) + editor + .selections + .disjoint_anchors() + .iter() + .map(|selection| { + ( + selection.start.bias_left(&snapshot), + selection.end.bias_right(&snapshot), + ) + }) + .collect::>() }); - // Insert the raw text first self.editor.update(cx, |editor, cx| { editor.insert(&clipboard_text, window, cx); }); - let supports_images = self.prompt_capabilities.borrow().image; - let http_client = workspace.read(cx).client().http_client(); - - // Now create creases for each mention and load their content let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx); - for (range, mention_uri) in parsed_mentions { - let start_offset = insertion_offset.0 + range.start; - let anchor = snapshot.anchor_before(MultiBufferOffset(start_offset)); - let content_len = range.end - range.start; - - let Some((crease_id, tx)) = insert_crease_for_mention( - anchor.excerpt_id, - anchor.text_anchor, - content_len, - mention_uri.name().into(), - mention_uri.icon_path(cx), - None, - self.editor.clone(), - window, - cx, - ) else { - continue; - }; + let path_style = workspace.read(cx).project().read(cx).path_style(cx); + + let mut all_mentions = Vec::new(); + for (start_anchor, end_anchor) in selections_before { + let start_offset = start_anchor.to_offset(&snapshot); + let end_offset = end_anchor.to_offset(&snapshot); + + // Get the actual inserted text from the buffer (may differ due to auto-indent) + let inserted_text: String = + snapshot.text_for_range(start_offset..end_offset).collect(); + + let parsed_mentions = parse_mention_links(&inserted_text, path_style); + for (range, mention_uri) in parsed_mentions { + let mention_start_offset = MultiBufferOffset(start_offset.0 + range.start); + let anchor = snapshot.anchor_before(mention_start_offset); + let content_len = range.end - range.start; + all_mentions.push((anchor, content_len, mention_uri)); + } + } - // Create the confirmation task based on the mention URI type. - // This properly loads file content, fetches URLs, etc. - let task = self.mention_set.update(cx, |mention_set, cx| { - mention_set.confirm_mention_for_uri( - mention_uri.clone(), - supports_images, - http_client.clone(), + if !all_mentions.is_empty() { + let supports_images = self.prompt_capabilities.borrow().image; + let http_client = workspace.read(cx).client().http_client(); + + for (anchor, content_len, mention_uri) in all_mentions { + let Some((crease_id, tx)) = insert_crease_for_mention( + anchor.excerpt_id, + anchor.text_anchor, + content_len, + mention_uri.name().into(), + mention_uri.icon_path(cx), + mention_uri.tooltip_text(), + Some(mention_uri.clone()), + Some(self.workspace.clone()), + None, + self.editor.clone(), + window, cx, - ) - }); - let task = cx - .spawn(async move |_, _| task.await.map_err(|e| e.to_string())) - .shared(); + ) else { + continue; + }; - self.mention_set.update(cx, |mention_set, _cx| { - mention_set.insert_mention(crease_id, mention_uri.clone(), task.clone()) - }); + // Create the confirmation task based on the mention URI type. + // This properly loads file content, fetches URLs, etc. + let task = self.mention_set.update(cx, |mention_set, cx| { + mention_set.confirm_mention_for_uri( + mention_uri.clone(), + supports_images, + http_client.clone(), + cx, + ) + }); + let task = cx + .spawn(async move |_, _| task.await.map_err(|e| e.to_string())) + .shared(); - // Drop the tx after inserting to signal the crease is ready - drop(tx); + self.mention_set.update(cx, |mention_set, _cx| { + mention_set.insert_mention(crease_id, mention_uri.clone(), task.clone()) + }); + + // Drop the tx after inserting to signal the crease is ready + drop(tx); + } + return; } - return; } } @@ -960,6 +1018,9 @@ impl MessageEditor { content_len, mention_uri.name().into(), mention_uri.icon_path(cx), + mention_uri.tooltip_text(), + Some(mention_uri.clone()), + Some(self.workspace.clone()), None, self.editor.clone(), window, @@ -1172,13 +1233,45 @@ impl MessageEditor { message: Vec, window: &mut Window, cx: &mut Context, + ) { + self.clear(window, cx); + self.insert_message_blocks(message, false, window, cx); + } + + pub fn append_message( + &mut self, + message: Vec, + separator: Option<&str>, + window: &mut Window, + cx: &mut Context, + ) { + if message.is_empty() { + return; + } + + if let Some(separator) = separator + && !separator.is_empty() + && !self.is_empty(cx) + { + self.editor.update(cx, |editor, cx| { + editor.insert(separator, window, cx); + }); + } + + self.insert_message_blocks(message, true, window, cx); + } + + fn insert_message_blocks( + &mut self, + message: Vec, + append_to_existing: bool, + window: &mut Window, + cx: &mut Context, ) { let Some(workspace) = self.workspace.upgrade() else { return; }; - self.clear(window, cx); - let path_style = workspace.read(cx).project().read(cx).path_style(cx); let mut text = String::new(); let mut mentions = Vec::new(); @@ -1252,19 +1345,40 @@ impl MessageEditor { } } - let snapshot = self.editor.update(cx, |editor, cx| { - editor.set_text(text, window, cx); - editor.buffer().read(cx).snapshot(cx) - }); + if text.is_empty() && mentions.is_empty() { + return; + } + + let insertion_start = if append_to_existing { + self.editor.read(cx).text(cx).len() + } else { + 0 + }; + + let snapshot = if append_to_existing { + self.editor.update(cx, |editor, cx| { + editor.insert(&text, window, cx); + editor.buffer().read(cx).snapshot(cx) + }) + } else { + self.editor.update(cx, |editor, cx| { + editor.set_text(text, window, cx); + editor.buffer().read(cx).snapshot(cx) + }) + }; for (range, mention_uri, mention) in mentions { - let anchor = snapshot.anchor_before(MultiBufferOffset(range.start)); + let adjusted_start = insertion_start + range.start; + let anchor = snapshot.anchor_before(MultiBufferOffset(adjusted_start)); let Some((crease_id, tx)) = insert_crease_for_mention( anchor.excerpt_id, anchor.text_anchor, range.end - range.start, mention_uri.name().into(), mention_uri.icon_path(cx), + mention_uri.tooltip_text(), + Some(mention_uri.clone()), + Some(self.workspace.clone()), None, self.editor.clone(), window, @@ -1282,6 +1396,7 @@ impl MessageEditor { ) }); } + cx.notify(); } @@ -1289,6 +1404,16 @@ impl MessageEditor { self.editor.read(cx).text(cx) } + pub fn insert_text(&mut self, text: &str, window: &mut Window, cx: &mut Context) { + if text.is_empty() { + return; + } + + self.editor.update(cx, |editor, cx| { + editor.insert(text, window, cx); + }); + } + pub fn set_placeholder_text( &mut self, placeholder: &str, @@ -1300,7 +1425,7 @@ impl MessageEditor { }); } - #[cfg(test)] + #[cfg(any(test, feature = "test-support"))] pub fn set_text(&mut self, text: &str, window: &mut Window, cx: &mut Context) { self.editor.update(cx, |editor, cx| { editor.set_text(text, window, cx); @@ -1449,12 +1574,16 @@ mod tests { use acp_thread::{AgentSessionInfo, MentionUri}; use agent::{ThreadStore, outline}; use agent_client_protocol as acp; - use editor::{AnchorRangeExt as _, Editor, EditorMode, MultiBufferOffset}; + use editor::{ + AnchorRangeExt as _, Editor, EditorMode, MultiBufferOffset, SelectionEffects, + actions::Paste, + }; use fs::FakeFs; use futures::StreamExt as _; use gpui::{ - AppContext, Entity, EventEmitter, FocusHandle, Focusable, TestAppContext, VisualTestContext, + AppContext, ClipboardItem, Entity, EventEmitter, FocusHandle, Focusable, TestAppContext, + VisualTestContext, }; use language_model::LanguageModelRegistry; use lsp::{CompletionContext, CompletionTriggerKind}; @@ -1466,11 +1595,11 @@ mod tests { use util::{path, paths::PathStyle, rel_path::rel_path}; use workspace::{AppState, Item, MultiWorkspace}; - use crate::acp::{ + use crate::completion_provider::{PromptCompletionProviderDelegate, PromptContextType}; + use crate::{ + connection_view::tests::init_test, message_editor::{Mention, MessageEditor, parse_mention_links}, - thread_view::tests::init_test, }; - use crate::completion_provider::{PromptCompletionProviderDelegate, PromptContextType}; #[test] fn test_parse_mention_links() { @@ -1577,8 +1706,8 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = None; - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -1691,8 +1820,8 @@ mod tests { let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); let workspace_handle = workspace.downgrade(); let message_editor = workspace.update_in(cx, |_, window, cx| { cx.new(|cx| { @@ -1847,8 +1976,8 @@ mod tests { let mut cx = VisualTestContext::from_window(window.into(), cx); let thread_store = None; - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default())); let available_commands = Rc::new(RefCell::new(vec![ acp::AvailableCommand::new("quick-math", "2 + 2 = 4 - 1 = 3"), @@ -2082,8 +2211,8 @@ mod tests { } let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default())); let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| { @@ -2578,8 +2707,8 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = Some(cx.new(|cx| ThreadStore::new(cx))); - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -2679,8 +2808,8 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = Some(cx.new(|cx| ThreadStore::new(cx))); - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); // Create a thread metadata to insert as summary let thread_metadata = AgentSessionInfo { @@ -2761,8 +2890,8 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = None; - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); let thread_metadata = AgentSessionInfo { session_id: acp::SessionId::new("thread-123"), @@ -2821,8 +2950,8 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = None; - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -2876,8 +3005,8 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = Some(cx.new(|cx| ThreadStore::new(cx))); - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -2932,8 +3061,8 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = Some(cx.new(|cx| ThreadStore::new(cx))); - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -2997,8 +3126,8 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = Some(cx.new(|cx| ThreadStore::new(cx))); - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); let (message_editor, editor) = workspace.update_in(cx, |workspace, window, cx| { let workspace_handle = cx.weak_entity(); @@ -3157,8 +3286,8 @@ mod tests { }); let thread_store = Some(cx.new(|cx| ThreadStore::new(cx))); - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); // Create a new `MessageEditor`. The `EditorMode::full()` has to be used // to ensure we have a fixed viewport, so we can eventually actually @@ -3278,8 +3407,8 @@ mod tests { let mut cx = VisualTestContext::from_window(window.into(), cx); let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let history = cx - .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| { let workspace_handle = cx.weak_entity(); @@ -3333,4 +3462,341 @@ mod tests { assert_eq!(editor.text(cx), "😄😄@file"); }); } + + #[gpui::test] + async fn test_paste_mention_link_with_multiple_selections(cx: &mut TestAppContext) { + init_test(cx); + + let app_state = cx.update(AppState::test); + + cx.update(|cx| { + editor::init(cx); + workspace::init(app_state.clone(), cx); + }); + + app_state + .fs + .as_fake() + .insert_tree(path!("/project"), json!({"file.txt": "content"})) + .await; + + let project = Project::test(app_state.fs.clone(), [path!("/project").as_ref()], cx).await; + let window = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + + let mut cx = VisualTestContext::from_window(window.into(), cx); + + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + + let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| { + let workspace_handle = cx.weak_entity(); + let message_editor = cx.new(|cx| { + MessageEditor::new( + workspace_handle, + project.downgrade(), + Some(thread_store), + history.downgrade(), + None, + Default::default(), + Default::default(), + "Test Agent".into(), + "Test", + EditorMode::AutoHeight { + max_lines: None, + min_lines: 1, + }, + window, + cx, + ) + }); + workspace.active_pane().update(cx, |pane, cx| { + pane.add_item( + Box::new(cx.new(|_| MessageEditorItem(message_editor.clone()))), + true, + true, + None, + window, + cx, + ); + }); + message_editor.read(cx).focus_handle(cx).focus(window, cx); + let editor = message_editor.read(cx).editor().clone(); + (message_editor, editor) + }); + + editor.update_in(&mut cx, |editor, window, cx| { + editor.set_text( + "AAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAA", + window, + cx, + ); + }); + + cx.run_until_parked(); + + editor.update_in(&mut cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([ + MultiBufferOffset(0)..MultiBufferOffset(25), // First selection (large) + MultiBufferOffset(30)..MultiBufferOffset(55), // Second selection (newest) + ]); + }); + }); + + let mention_link = "[@f](file:///test.txt)"; + cx.write_to_clipboard(ClipboardItem::new_string(mention_link.into())); + + message_editor.update_in(&mut cx, |message_editor, window, cx| { + message_editor.paste(&Paste, window, cx); + }); + + let text = editor.update(&mut cx, |editor, cx| editor.text(cx)); + assert!( + text.contains("[@f](file:///test.txt)"), + "Expected mention link to be pasted, got: {}", + text + ); + } + + // Helper that creates a minimal MessageEditor inside a window, returning both + // the entity and the underlying VisualTestContext so callers can drive updates. + async fn setup_message_editor( + cx: &mut TestAppContext, + ) -> (Entity, &mut VisualTestContext) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project", json!({"file.txt": ""})).await; + let project = Project::test(fs, [Path::new(path!("/project"))], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + + let message_editor = cx.update(|window, cx| { + cx.new(|cx| { + MessageEditor::new( + workspace.downgrade(), + project.downgrade(), + None, + history.downgrade(), + None, + Default::default(), + Default::default(), + "Test Agent".into(), + "Test", + EditorMode::AutoHeight { + min_lines: 1, + max_lines: None, + }, + window, + cx, + ) + }) + }); + + cx.run_until_parked(); + (message_editor, cx) + } + + #[gpui::test] + async fn test_set_message_plain_text(cx: &mut TestAppContext) { + init_test(cx); + let (message_editor, cx) = setup_message_editor(cx).await; + + message_editor.update_in(cx, |editor, window, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "hello world".to_string(), + ))], + window, + cx, + ); + }); + + let text = message_editor.update(cx, |editor, cx| editor.text(cx)); + assert_eq!(text, "hello world"); + assert!(!message_editor.update(cx, |editor, cx| editor.is_empty(cx))); + } + + #[gpui::test] + async fn test_set_message_replaces_existing_content(cx: &mut TestAppContext) { + init_test(cx); + let (message_editor, cx) = setup_message_editor(cx).await; + + // Set initial content. + message_editor.update_in(cx, |editor, window, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "old content".to_string(), + ))], + window, + cx, + ); + }); + + // Replace with new content. + message_editor.update_in(cx, |editor, window, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "new content".to_string(), + ))], + window, + cx, + ); + }); + + let text = message_editor.update(cx, |editor, cx| editor.text(cx)); + assert_eq!( + text, "new content", + "set_message should replace old content" + ); + } + + #[gpui::test] + async fn test_append_message_to_empty_editor(cx: &mut TestAppContext) { + init_test(cx); + let (message_editor, cx) = setup_message_editor(cx).await; + + message_editor.update_in(cx, |editor, window, cx| { + editor.append_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "appended".to_string(), + ))], + Some("\n\n"), + window, + cx, + ); + }); + + let text = message_editor.update(cx, |editor, cx| editor.text(cx)); + assert_eq!( + text, "appended", + "No separator should be inserted when the editor is empty" + ); + } + + #[gpui::test] + async fn test_append_message_to_non_empty_editor(cx: &mut TestAppContext) { + init_test(cx); + let (message_editor, cx) = setup_message_editor(cx).await; + + // Seed initial content. + message_editor.update_in(cx, |editor, window, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "initial".to_string(), + ))], + window, + cx, + ); + }); + + // Append with separator. + message_editor.update_in(cx, |editor, window, cx| { + editor.append_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "appended".to_string(), + ))], + Some("\n\n"), + window, + cx, + ); + }); + + let text = message_editor.update(cx, |editor, cx| editor.text(cx)); + assert_eq!( + text, "initial\n\nappended", + "Separator should appear between existing and appended content" + ); + } + + #[gpui::test] + async fn test_append_message_preserves_mention_offset(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project", json!({"file.txt": "content"})) + .await; + let project = Project::test(fs, [Path::new(path!("/project"))], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + + let message_editor = cx.update(|window, cx| { + cx.new(|cx| { + MessageEditor::new( + workspace.downgrade(), + project.downgrade(), + None, + history.downgrade(), + None, + Default::default(), + Default::default(), + "Test Agent".into(), + "Test", + EditorMode::AutoHeight { + min_lines: 1, + max_lines: None, + }, + window, + cx, + ) + }) + }); + + cx.run_until_parked(); + + // Seed plain-text prefix so the editor is non-empty before appending. + message_editor.update_in(cx, |editor, window, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "prefix text".to_string(), + ))], + window, + cx, + ); + }); + + // Append a message that contains a ResourceLink mention. + message_editor.update_in(cx, |editor, window, cx| { + editor.append_message( + vec![acp::ContentBlock::ResourceLink(acp::ResourceLink::new( + "file.txt", + "file:///project/file.txt", + ))], + Some("\n\n"), + window, + cx, + ); + }); + + cx.run_until_parked(); + + // The mention should be registered in the mention_set so that contents() + // will emit it as a structured block rather than plain text. + let mention_uris = + message_editor.update(cx, |editor, cx| editor.mention_set.read(cx).mentions()); + assert_eq!( + mention_uris.len(), + 1, + "Expected exactly one mention in the mention_set after append, got: {mention_uris:?}" + ); + + // The editor text should start with the prefix, then the separator, then + // the mention placeholder — confirming the offset was computed correctly. + let text = message_editor.update(cx, |editor, cx| editor.text(cx)); + assert!( + text.starts_with("prefix text\n\n"), + "Expected text to start with 'prefix text\\n\\n', got: {text:?}" + ); + } } diff --git a/crates/agent_ui/src/acp/mode_selector.rs b/crates/agent_ui/src/mode_selector.rs similarity index 100% rename from crates/agent_ui/src/acp/mode_selector.rs rename to crates/agent_ui/src/mode_selector.rs diff --git a/crates/agent_ui/src/acp/model_selector.rs b/crates/agent_ui/src/model_selector.rs similarity index 91% rename from crates/agent_ui/src/acp/model_selector.rs rename to crates/agent_ui/src/model_selector.rs index 43a39e61088219605d2ee7ab65e610b43137576c..89ed3e490b33ca83cbdab25cfce77fee7cf9ccb6 100644 --- a/crates/agent_ui/src/acp/model_selector.rs +++ b/crates/agent_ui/src/model_selector.rs @@ -23,7 +23,7 @@ use zed_actions::agent::OpenSettings; use crate::ui::{HoldForDefault, ModelSelectorFooter, ModelSelectorHeader, ModelSelectorListItem}; -pub type AcpModelSelector = Picker; +pub type ModelSelector = Picker; pub fn acp_model_selector( selector: Rc, @@ -31,26 +31,25 @@ pub fn acp_model_selector( fs: Arc, focus_handle: FocusHandle, window: &mut Window, - cx: &mut Context, -) -> AcpModelSelector { - let delegate = - AcpModelPickerDelegate::new(selector, agent_server, fs, focus_handle, window, cx); + cx: &mut Context, +) -> ModelSelector { + let delegate = ModelPickerDelegate::new(selector, agent_server, fs, focus_handle, window, cx); Picker::list(delegate, window, cx) .show_scrollbar(true) .width(rems(20.)) .max_height(Some(rems(20.).into())) } -enum AcpModelPickerEntry { +enum ModelPickerEntry { Separator(SharedString), Model(AgentModelInfo, bool), } -pub struct AcpModelPickerDelegate { +pub struct ModelPickerDelegate { selector: Rc, agent_server: Rc, fs: Arc, - filtered_entries: Vec, + filtered_entries: Vec, models: Option, selected_index: usize, selected_description: Option<(usize, SharedString, bool)>, @@ -61,21 +60,21 @@ pub struct AcpModelPickerDelegate { focus_handle: FocusHandle, } -impl AcpModelPickerDelegate { +impl ModelPickerDelegate { fn new( selector: Rc, agent_server: Rc, fs: Arc, focus_handle: FocusHandle, window: &mut Window, - cx: &mut Context, + cx: &mut Context, ) -> Self { let rx = selector.watch(cx); let refresh_models_task = { cx.spawn_in(window, { async move |this, cx| { async fn refresh( - this: &WeakEntity>, + this: &WeakEntity>, cx: &mut AsyncWindowContext, ) -> Result<()> { let (models_task, selected_model_task) = this.update(cx, |this, cx| { @@ -188,7 +187,7 @@ impl AcpModelPickerDelegate { // Keep the picker selection aligned with the newly-selected model if let Some(new_index) = self.filtered_entries.iter().position(|entry| { - matches!(entry, AcpModelPickerEntry::Model(model_info, _) if self.selected_model.as_ref().is_some_and(|selected| model_info.id == selected.id)) + matches!(entry, ModelPickerEntry::Model(model_info, _) if self.selected_model.as_ref().is_some_and(|selected| model_info.id == selected.id)) }) { self.set_selected_index(new_index, window, cx); } else { @@ -197,7 +196,7 @@ impl AcpModelPickerDelegate { } } -impl PickerDelegate for AcpModelPickerDelegate { +impl PickerDelegate for ModelPickerDelegate { type ListItem = AnyElement; fn match_count(&self) -> usize { @@ -213,15 +212,10 @@ impl PickerDelegate for AcpModelPickerDelegate { cx.notify(); } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { match self.filtered_entries.get(ix) { - Some(AcpModelPickerEntry::Model(_, _)) => true, - Some(AcpModelPickerEntry::Separator(_)) | None => false, + Some(ModelPickerEntry::Model(_, _)) => true, + Some(ModelPickerEntry::Separator(_)) | None => false, } } @@ -261,7 +255,7 @@ impl PickerDelegate for AcpModelPickerDelegate { .as_ref() .and_then(|selected| { this.delegate.filtered_entries.iter().position(|entry| { - if let AcpModelPickerEntry::Model(model_info, _) = entry { + if let ModelPickerEntry::Model(model_info, _) = entry { model_info.id == selected.id } else { false @@ -277,7 +271,7 @@ impl PickerDelegate for AcpModelPickerDelegate { } fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context>) { - if let Some(AcpModelPickerEntry::Model(model_info, _)) = + if let Some(ModelPickerEntry::Model(model_info, _)) = self.filtered_entries.get(self.selected_index) { if window.modifiers().secondary() { @@ -320,10 +314,10 @@ impl PickerDelegate for AcpModelPickerDelegate { cx: &mut Context>, ) -> Option { match self.filtered_entries.get(ix)? { - AcpModelPickerEntry::Separator(title) => { + ModelPickerEntry::Separator(title) => { Some(ModelSelectorHeader::new(title, ix > 1).into_any_element()) } - AcpModelPickerEntry::Model(model_info, is_favorite) => { + ModelPickerEntry::Model(model_info, is_favorite) => { let is_selected = Some(model_info) == self.selected_model.as_ref(); let default_model = self.agent_server.default_model(cx); let is_default = default_model.as_ref() == Some(&model_info.id); @@ -434,7 +428,7 @@ impl PickerDelegate for AcpModelPickerDelegate { fn info_list_to_picker_entries( model_list: AgentModelList, favorites: &HashSet, -) -> Vec { +) -> Vec { let mut entries = Vec::new(); let all_models: Vec<_> = match &model_list { @@ -450,28 +444,28 @@ fn info_list_to_picker_entries( let has_favorites = !favorite_models.is_empty(); if has_favorites { - entries.push(AcpModelPickerEntry::Separator("Favorite".into())); + entries.push(ModelPickerEntry::Separator("Favorite".into())); for model in favorite_models { - entries.push(AcpModelPickerEntry::Model((*model).clone(), true)); + entries.push(ModelPickerEntry::Model((*model).clone(), true)); } } match model_list { AgentModelList::Flat(list) => { if has_favorites { - entries.push(AcpModelPickerEntry::Separator("All".into())); + entries.push(ModelPickerEntry::Separator("All".into())); } for model in list { let is_favorite = favorites.contains(&model.id); - entries.push(AcpModelPickerEntry::Model(model, is_favorite)); + entries.push(ModelPickerEntry::Model(model, is_favorite)); } } AgentModelList::Grouped(index_map) => { for (group_name, models) in index_map { - entries.push(AcpModelPickerEntry::Separator(group_name.0)); + entries.push(ModelPickerEntry::Separator(group_name.0)); for model in models { let is_favorite = favorites.contains(&model.id); - entries.push(AcpModelPickerEntry::Model(model, is_favorite)); + entries.push(ModelPickerEntry::Model(model, is_favorite)); } } } @@ -608,22 +602,22 @@ mod tests { .collect() } - fn get_entry_model_ids(entries: &[AcpModelPickerEntry]) -> Vec<&str> { + fn get_entry_model_ids(entries: &[ModelPickerEntry]) -> Vec<&str> { entries .iter() .filter_map(|entry| match entry { - AcpModelPickerEntry::Model(info, _) => Some(info.id.0.as_ref()), + ModelPickerEntry::Model(info, _) => Some(info.id.0.as_ref()), _ => None, }) .collect() } - fn get_entry_labels(entries: &[AcpModelPickerEntry]) -> Vec<&str> { + fn get_entry_labels(entries: &[ModelPickerEntry]) -> Vec<&str> { entries .iter() .map(|entry| match entry { - AcpModelPickerEntry::Model(info, _) => info.id.0.as_ref(), - AcpModelPickerEntry::Separator(s) => &s, + ModelPickerEntry::Model(info, _) => info.id.0.as_ref(), + ModelPickerEntry::Separator(s) => &s, }) .collect() } @@ -671,7 +665,7 @@ mod tests { assert!(matches!( entries.first(), - Some(AcpModelPickerEntry::Separator(s)) if s == "Favorite" + Some(ModelPickerEntry::Separator(s)) if s == "Favorite" )); let model_ids = get_entry_model_ids(&entries); @@ -687,7 +681,7 @@ mod tests { assert!(matches!( entries.first(), - Some(AcpModelPickerEntry::Separator(s)) if s == "zed" + Some(ModelPickerEntry::Separator(s)) if s == "zed" )); } @@ -702,7 +696,7 @@ mod tests { let entries = info_list_to_picker_entries(models, &favorites); for entry in &entries { - if let AcpModelPickerEntry::Model(info, is_favorite) = entry { + if let ModelPickerEntry::Model(info, is_favorite) = entry { if info.id.0.as_ref() == "zed/claude" { assert!(is_favorite, "zed/claude should be a favorite"); } else { @@ -789,12 +783,12 @@ mod tests { assert!(matches!( entries.first(), - Some(AcpModelPickerEntry::Separator(s)) if s == "Favorite" + Some(ModelPickerEntry::Separator(s)) if s == "Favorite" )); assert!(entries.iter().any(|e| matches!( e, - AcpModelPickerEntry::Separator(s) if s == "All" + ModelPickerEntry::Separator(s) if s == "All" ))); } @@ -838,7 +832,7 @@ mod tests { let entries = info_list_to_picker_entries(models, &favorites); for entry in &entries { - if let AcpModelPickerEntry::Model(info, is_favorite) = entry { + if let ModelPickerEntry::Model(info, is_favorite) = entry { if info.id.0.as_ref() == "favorite-model" { assert!(*is_favorite, "favorite-model should have is_favorite=true"); } else if info.id.0.as_ref() == "regular-model" { diff --git a/crates/agent_ui/src/acp/model_selector_popover.rs b/crates/agent_ui/src/model_selector_popover.rs similarity index 90% rename from crates/agent_ui/src/acp/model_selector_popover.rs rename to crates/agent_ui/src/model_selector_popover.rs index 941a84faa8782603eb448b3296b99b7d41ab77a0..257337b6b0b8a39645bc38b4d814b250d7b5e1f9 100644 --- a/crates/agent_ui/src/acp/model_selector_popover.rs +++ b/crates/agent_ui/src/model_selector_popover.rs @@ -7,20 +7,20 @@ use gpui::{Entity, FocusHandle}; use picker::popover_menu::PickerPopoverMenu; use ui::{ButtonLike, PopoverMenuHandle, TintColor, Tooltip, prelude::*}; -use crate::acp::{AcpModelSelector, model_selector::acp_model_selector}; use crate::ui::ModelSelectorTooltip; +use crate::{ModelSelector, model_selector::acp_model_selector}; -pub struct AcpModelSelectorPopover { - selector: Entity, - menu_handle: PopoverMenuHandle, +pub struct ModelSelectorPopover { + selector: Entity, + menu_handle: PopoverMenuHandle, } -impl AcpModelSelectorPopover { +impl ModelSelectorPopover { pub(crate) fn new( selector: Rc, agent_server: Rc, fs: Arc, - menu_handle: PopoverMenuHandle, + menu_handle: PopoverMenuHandle, focus_handle: FocusHandle, window: &mut Window, cx: &mut Context, @@ -48,7 +48,7 @@ impl AcpModelSelectorPopover { } } -impl Render for AcpModelSelectorPopover { +impl Render for ModelSelectorPopover { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let selector = self.selector.read(cx); let model = selector.delegate.active_model(); diff --git a/crates/agent_ui/src/profile_selector.rs b/crates/agent_ui/src/profile_selector.rs index 45d7232e0dff8b2ab1056b522b5994e11236d843..926549c22f88bcb0937dddf7c3ff1b32060ed297 100644 --- a/crates/agent_ui/src/profile_selector.rs +++ b/crates/agent_ui/src/profile_selector.rs @@ -443,12 +443,7 @@ impl PickerDelegate for ProfilePickerDelegate { cx.notify(); } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { match self.filtered_entries.get(ix) { Some(ProfilePickerEntry::Profile(_)) => true, Some(ProfilePickerEntry::Header(_)) | None => false, diff --git a/crates/agent_ui/src/terminal_inline_assistant.rs b/crates/agent_ui/src/terminal_inline_assistant.rs index 8ee59a0a096172ad9a81983f3517226e824c43e7..3df3c1faaed9e02b659bc75b09257e81e96ebc03 100644 --- a/crates/agent_ui/src/terminal_inline_assistant.rs +++ b/crates/agent_ui/src/terminal_inline_assistant.rs @@ -1,5 +1,5 @@ use crate::{ - acp::AcpThreadHistory, + ThreadHistory, context::load_context, inline_prompt_editor::{ CodegenStatus, PromptEditor, PromptEditorEvent, TerminalInlineAssistId, @@ -64,7 +64,7 @@ impl TerminalInlineAssistant { project: WeakEntity, thread_store: Entity, prompt_store: Option>, - history: WeakEntity, + history: WeakEntity, initial_prompt: Option, window: &mut Window, cx: &mut App, @@ -276,6 +276,7 @@ impl TerminalInlineAssistant { temperature, thinking_allowed: false, thinking_effort: None, + speed: None, } })) } diff --git a/crates/agent_ui/src/test_support.rs b/crates/agent_ui/src/test_support.rs new file mode 100644 index 0000000000000000000000000000000000000000..05a6b0925fb9151cc18d7096c8bf4f2674054073 --- /dev/null +++ b/crates/agent_ui/src/test_support.rs @@ -0,0 +1,98 @@ +use acp_thread::{AgentConnection, StubAgentConnection}; +use agent_client_protocol as acp; +use agent_servers::{AgentServer, AgentServerDelegate}; +use gpui::{Entity, SharedString, Task, TestAppContext, VisualTestContext}; +use settings::SettingsStore; +use std::any::Any; +use std::rc::Rc; + +use crate::AgentPanel; +use crate::agent_panel; + +pub struct StubAgentServer { + connection: C, +} + +impl StubAgentServer { + pub fn new(connection: C) -> Self { + Self { connection } + } +} + +impl StubAgentServer { + pub fn default_response() -> Self { + let conn = StubAgentConnection::new(); + conn.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Default response".into()), + )]); + Self::new(conn) + } +} + +impl AgentServer for StubAgentServer +where + C: 'static + AgentConnection + Send + Clone, +{ + fn logo(&self) -> ui::IconName { + ui::IconName::Ai + } + + fn name(&self) -> SharedString { + "Test".into() + } + + fn connect( + &self, + _delegate: AgentServerDelegate, + _cx: &mut gpui::App, + ) -> Task>> { + Task::ready(Ok(Rc::new(self.connection.clone()))) + } + + fn into_any(self: Rc) -> Rc { + self + } +} + +pub fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + theme::init(theme::LoadThemes::JustBase, cx); + editor::init(cx); + release_channel::init("0.0.0".parse().unwrap(), cx); + agent_panel::init(cx); + }); +} + +pub fn open_thread_with_connection( + panel: &Entity, + connection: StubAgentConnection, + cx: &mut VisualTestContext, +) { + panel.update_in(cx, |panel, window, cx| { + panel.open_external_thread_with_server( + Rc::new(StubAgentServer::new(connection)), + window, + cx, + ); + }); + cx.run_until_parked(); +} + +pub fn send_message(panel: &Entity, cx: &mut VisualTestContext) { + let thread_view = panel.read_with(cx, |panel, cx| panel.as_active_thread_view(cx).unwrap()); + let message_editor = thread_view.read_with(cx, |view, _cx| view.message_editor.clone()); + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Hello", window, cx); + }); + thread_view.update_in(cx, |view, window, cx| view.send(window, cx)); + cx.run_until_parked(); +} + +pub fn active_session_id(panel: &Entity, cx: &VisualTestContext) -> acp::SessionId { + panel.read_with(cx, |panel, cx| { + let thread = panel.active_agent_thread(cx).unwrap(); + thread.read(cx).session_id().clone() + }) +} diff --git a/crates/agent_ui/src/text_thread_editor.rs b/crates/agent_ui/src/text_thread_editor.rs index b878be82e3896733f1eef8d6442cac901366c4a2..13764bd655c23176b3aa016f36eae193e16f92de 100644 --- a/crates/agent_ui/src/text_thread_editor.rs +++ b/crates/agent_ui/src/text_thread_editor.rs @@ -1495,7 +1495,7 @@ impl TextThreadEditor { return; }; - // Get buffer info for the delegate call (even if empty, AcpThreadView ignores these + // Get buffer info for the delegate call (even if empty, ThreadView ignores these // params and calls insert_selections which handles both terminal and buffer) if let Some((selections, buffer)) = maybe!({ let editor = workspace diff --git a/crates/agent_ui/src/acp/thread_history.rs b/crates/agent_ui/src/thread_history.rs similarity index 97% rename from crates/agent_ui/src/acp/thread_history.rs rename to crates/agent_ui/src/thread_history.rs index 76f981b8847a191d66c173df000fdbf619d62239..8f8488cb94f94e036b37ef31c9c588740cd6cf02 100644 --- a/crates/agent_ui/src/acp/thread_history.rs +++ b/crates/agent_ui/src/thread_history.rs @@ -1,4 +1,4 @@ -use crate::acp::AcpServerView; +use crate::ConnectionView; use crate::{AgentPanel, RemoveHistory, RemoveSelectedThread}; use acp_thread::{AgentSessionInfo, AgentSessionList, AgentSessionListRequest, SessionListUpdate}; use agent_client_protocol as acp; @@ -27,7 +27,7 @@ fn thread_title(entry: &AgentSessionInfo) -> &SharedString { .unwrap_or(DEFAULT_TITLE) } -pub struct AcpThreadHistory { +pub struct ThreadHistory { session_list: Option>, sessions: Vec, scroll_handle: UniformListScrollHandle, @@ -70,9 +70,9 @@ pub enum ThreadHistoryEvent { Open(AgentSessionInfo), } -impl EventEmitter for AcpThreadHistory {} +impl EventEmitter for ThreadHistory {} -impl AcpThreadHistory { +impl ThreadHistory { pub fn new( session_list: Option>, window: &mut Window, @@ -720,13 +720,13 @@ impl AcpThreadHistory { } } -impl Focusable for AcpThreadHistory { +impl Focusable for ThreadHistory { fn focus_handle(&self, cx: &App) -> FocusHandle { self.search_editor.focus_handle(cx) } } -impl Render for AcpThreadHistory { +impl Render for ThreadHistory { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let has_no_history = self.is_empty(); @@ -860,17 +860,17 @@ impl Render for AcpThreadHistory { } #[derive(IntoElement)] -pub struct AcpHistoryEntryElement { +pub struct HistoryEntryElement { entry: AgentSessionInfo, - thread_view: WeakEntity, + thread_view: WeakEntity, selected: bool, hovered: bool, supports_delete: bool, on_hover: Box, } -impl AcpHistoryEntryElement { - pub fn new(entry: AgentSessionInfo, thread_view: WeakEntity) -> Self { +impl HistoryEntryElement { + pub fn new(entry: AgentSessionInfo, thread_view: WeakEntity) -> Self { Self { entry, thread_view, @@ -897,7 +897,7 @@ impl AcpHistoryEntryElement { } } -impl RenderOnce for AcpHistoryEntryElement { +impl RenderOnce for HistoryEntryElement { fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { let id = ElementId::Name(self.entry.session_id.0.clone().into()); let title = thread_title(&self.entry).clone(); @@ -1240,7 +1240,7 @@ mod tests { )); let (history, cx) = cx.add_window_view(|window, cx| { - AcpThreadHistory::new(Some(session_list.clone()), window, cx) + ThreadHistory::new(Some(session_list.clone()), window, cx) }); cx.run_until_parked(); @@ -1264,7 +1264,7 @@ mod tests { )); let (history, cx) = cx.add_window_view(|window, cx| { - AcpThreadHistory::new(Some(session_list.clone()), window, cx) + ThreadHistory::new(Some(session_list.clone()), window, cx) }); cx.run_until_parked(); session_list.clear_requested_cursors(); @@ -1301,7 +1301,7 @@ mod tests { )); let (history, cx) = cx.add_window_view(|window, cx| { - AcpThreadHistory::new(Some(session_list.clone()), window, cx) + ThreadHistory::new(Some(session_list.clone()), window, cx) }); cx.run_until_parked(); @@ -1334,7 +1334,7 @@ mod tests { )); let (history, cx) = cx.add_window_view(|window, cx| { - AcpThreadHistory::new(Some(session_list.clone()), window, cx) + ThreadHistory::new(Some(session_list.clone()), window, cx) }); cx.run_until_parked(); @@ -1365,7 +1365,7 @@ mod tests { )); let (history, cx) = cx.add_window_view(|window, cx| { - AcpThreadHistory::new(Some(session_list.clone()), window, cx) + ThreadHistory::new(Some(session_list.clone()), window, cx) }); cx.run_until_parked(); @@ -1410,7 +1410,7 @@ mod tests { ); let (history, cx) = cx.add_window_view(|window, cx| { - AcpThreadHistory::new(Some(session_list.clone()), window, cx) + ThreadHistory::new(Some(session_list.clone()), window, cx) }); cx.run_until_parked(); session_list.clear_requested_cursors(); @@ -1442,7 +1442,7 @@ mod tests { let session_list = Rc::new(TestSessionList::new(sessions)); let (history, cx) = cx.add_window_view(|window, cx| { - AcpThreadHistory::new(Some(session_list.clone()), window, cx) + ThreadHistory::new(Some(session_list.clone()), window, cx) }); cx.run_until_parked(); @@ -1478,7 +1478,7 @@ mod tests { let session_list = Rc::new(TestSessionList::new(sessions)); let (history, cx) = cx.add_window_view(|window, cx| { - AcpThreadHistory::new(Some(session_list.clone()), window, cx) + ThreadHistory::new(Some(session_list.clone()), window, cx) }); cx.run_until_parked(); @@ -1511,7 +1511,7 @@ mod tests { let session_list = Rc::new(TestSessionList::new(sessions)); let (history, cx) = cx.add_window_view(|window, cx| { - AcpThreadHistory::new(Some(session_list.clone()), window, cx) + ThreadHistory::new(Some(session_list.clone()), window, cx) }); cx.run_until_parked(); @@ -1547,7 +1547,7 @@ mod tests { let session_list = Rc::new(TestSessionList::new(sessions)); let (history, cx) = cx.add_window_view(|window, cx| { - AcpThreadHistory::new(Some(session_list.clone()), window, cx) + ThreadHistory::new(Some(session_list.clone()), window, cx) }); cx.run_until_parked(); @@ -1587,7 +1587,7 @@ mod tests { let session_list = Rc::new(TestSessionList::new(sessions)); let (history, cx) = cx.add_window_view(|window, cx| { - AcpThreadHistory::new(Some(session_list.clone()), window, cx) + ThreadHistory::new(Some(session_list.clone()), window, cx) }); cx.run_until_parked(); @@ -1624,7 +1624,7 @@ mod tests { let session_list = Rc::new(TestSessionList::new(sessions)); let (history, cx) = cx.add_window_view(|window, cx| { - AcpThreadHistory::new(Some(session_list.clone()), window, cx) + ThreadHistory::new(Some(session_list.clone()), window, cx) }); cx.run_until_parked(); diff --git a/crates/agent_ui/src/ui/acp_onboarding_modal.rs b/crates/agent_ui/src/ui/acp_onboarding_modal.rs index e48a36bd5af3eff578e230195dc2247900977173..23f3eadc4b259aa854f6c2cbb6bb3a68ec46deb5 100644 --- a/crates/agent_ui/src/ui/acp_onboarding_modal.rs +++ b/crates/agent_ui/src/ui/acp_onboarding_modal.rs @@ -1,8 +1,8 @@ -use client::zed_urls; use gpui::{ ClickEvent, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, MouseDownEvent, Render, linear_color_stop, linear_gradient, }; +use project::agent_server_store::GEMINI_NAME; use ui::{TintColor, Vector, VectorName, prelude::*}; use workspace::{ModalView, Workspace}; @@ -37,7 +37,13 @@ impl AcpOnboardingModal { if let Some(panel) = workspace.panel::(cx) { panel.update(cx, |panel, cx| { - panel.new_agent_thread(AgentType::Gemini, window, cx); + panel.new_agent_thread( + AgentType::Custom { + name: GEMINI_NAME.into(), + }, + window, + cx, + ); }); } }); @@ -47,11 +53,11 @@ impl AcpOnboardingModal { acp_onboarding_event!("Open Panel Clicked"); } - fn view_docs(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context) { - cx.open_url(&zed_urls::external_agents_docs(cx)); + fn open_agent_registry(&mut self, _: &ClickEvent, window: &mut Window, cx: &mut Context) { + window.dispatch_action(Box::new(zed_actions::AcpRegistry), cx); cx.notify(); - acp_onboarding_event!("Documentation Link Clicked"); + acp_onboarding_event!("Open Agent Registry Clicked"); } fn cancel(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context) { @@ -197,7 +203,7 @@ impl Render for AcpOnboardingModal { .icon_size(IconSize::Indicator) .icon_color(Color::Muted) .full_width() - .on_click(cx.listener(Self::view_docs)); + .on_click(cx.listener(Self::open_agent_registry)); let close_button = h_flex().absolute().top_2().right_2().child( IconButton::new("cancel", IconName::Close).on_click(cx.listener( diff --git a/crates/agent_ui/src/ui/claude_agent_onboarding_modal.rs b/crates/agent_ui/src/ui/claude_agent_onboarding_modal.rs index 5a90d430295c921ea6e11f14921694990cbbc27c..9e499690efcb797e28f32ca8b3bd0f2c2f0da9db 100644 --- a/crates/agent_ui/src/ui/claude_agent_onboarding_modal.rs +++ b/crates/agent_ui/src/ui/claude_agent_onboarding_modal.rs @@ -1,8 +1,8 @@ -use client::zed_urls; use gpui::{ ClickEvent, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, MouseDownEvent, Render, linear_color_stop, linear_gradient, }; +use project::agent_server_store::CLAUDE_AGENT_NAME; use ui::{TintColor, Vector, VectorName, prelude::*}; use workspace::{ModalView, Workspace}; @@ -37,7 +37,13 @@ impl ClaudeCodeOnboardingModal { if let Some(panel) = workspace.panel::(cx) { panel.update(cx, |panel, cx| { - panel.new_agent_thread(AgentType::ClaudeAgent, window, cx); + panel.new_agent_thread( + AgentType::Custom { + name: CLAUDE_AGENT_NAME.into(), + }, + window, + cx, + ); }); } }); @@ -47,8 +53,8 @@ impl ClaudeCodeOnboardingModal { claude_agent_onboarding_event!("Open Panel Clicked"); } - fn view_docs(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context) { - cx.open_url(&zed_urls::external_agents_docs(cx)); + fn view_docs(&mut self, _: &ClickEvent, window: &mut Window, cx: &mut Context) { + window.dispatch_action(Box::new(zed_actions::AcpRegistry), cx); cx.notify(); claude_agent_onboarding_event!("Documentation Link Clicked"); diff --git a/crates/agent_ui/src/ui/mention_crease.rs b/crates/agent_ui/src/ui/mention_crease.rs index 013d6659493bd0930d132a662d374f60ca47961f..0a61b8e4ef2ec69714f158a72f83cc0528cc8a8f 100644 --- a/crates/agent_ui/src/ui/mention_crease.rs +++ b/crates/agent_ui/src/ui/mention_crease.rs @@ -1,17 +1,28 @@ -use std::time::Duration; +use std::{ops::RangeInclusive, path::PathBuf, time::Duration}; -use gpui::{Animation, AnimationExt, AnyView, IntoElement, Window, pulsating_between}; +use acp_thread::MentionUri; +use agent_client_protocol as acp; +use editor::{Editor, SelectionEffects, scroll::Autoscroll}; +use gpui::{ + Animation, AnimationExt, AnyView, Context, IntoElement, WeakEntity, Window, pulsating_between, +}; +use prompt_store::PromptId; +use rope::Point; use settings::Settings; use theme::ThemeSettings; -use ui::{ButtonLike, TintColor, prelude::*}; +use ui::{ButtonLike, TintColor, Tooltip, prelude::*}; +use workspace::{OpenOptions, Workspace}; #[derive(IntoElement)] pub struct MentionCrease { id: ElementId, icon: SharedString, label: SharedString, + mention_uri: Option, + workspace: Option>, is_toggled: bool, is_loading: bool, + tooltip: Option, image_preview: Option AnyView + 'static>>, } @@ -25,12 +36,25 @@ impl MentionCrease { id: id.into(), icon: icon.into(), label: label.into(), + mention_uri: None, + workspace: None, is_toggled: false, is_loading: false, + tooltip: None, image_preview: None, } } + pub fn mention_uri(mut self, mention_uri: Option) -> Self { + self.mention_uri = mention_uri; + self + } + + pub fn workspace(mut self, workspace: Option>) -> Self { + self.workspace = workspace; + self + } + pub fn is_toggled(mut self, is_toggled: bool) -> Self { self.is_toggled = is_toggled; self @@ -41,6 +65,11 @@ impl MentionCrease { self } + pub fn tooltip(mut self, tooltip: impl Into) -> Self { + self.tooltip = Some(tooltip.into()); + self + } + pub fn image_preview( mut self, builder: impl Fn(&mut Window, &mut App) -> AnyView + 'static, @@ -55,6 +84,9 @@ impl RenderOnce for MentionCrease { let settings = ThemeSettings::get_global(cx); let font_size = settings.agent_buffer_font_size(cx); let buffer_font = settings.buffer_font.clone(); + let is_loading = self.is_loading; + let tooltip = self.tooltip; + let image_preview = self.image_preview; let button_height = DefiniteLength::Absolute(AbsoluteLength::Pixels( px(window.line_height().into()) - px(1.), @@ -66,9 +98,14 @@ impl RenderOnce for MentionCrease { .height(button_height) .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .toggle_state(self.is_toggled) - .when_some(self.image_preview, |this, image_preview| { - this.hoverable_tooltip(image_preview) - }) + .when_some( + self.mention_uri.clone().zip(self.workspace.clone()), + |this, (mention_uri, workspace)| { + this.on_click(move |_event, window, cx| { + open_mention_uri(mention_uri.clone(), &workspace, window, cx); + }) + }, + ) .child( h_flex() .pb_px() @@ -82,7 +119,7 @@ impl RenderOnce for MentionCrease { ) .child(self.label.clone()) .map(|this| { - if self.is_loading { + if is_loading { this.with_animation( "loading-context-crease", Animation::new(Duration::from_secs(2)) @@ -96,5 +133,179 @@ impl RenderOnce for MentionCrease { } }), ) + .map(|button| { + if let Some(image_preview) = image_preview { + button.hoverable_tooltip(image_preview) + } else { + button.when_some(tooltip, |this, tooltip_text| { + this.tooltip(Tooltip::text(tooltip_text)) + }) + } + }) } } + +fn open_mention_uri( + mention_uri: MentionUri, + workspace: &WeakEntity, + window: &mut Window, + cx: &mut App, +) { + let Some(workspace) = workspace.upgrade() else { + return; + }; + + workspace.update(cx, |workspace, cx| match mention_uri { + MentionUri::File { abs_path } => { + open_file(workspace, abs_path, None, window, cx); + } + MentionUri::Symbol { + abs_path, + line_range, + .. + } + | MentionUri::Selection { + abs_path: Some(abs_path), + line_range, + } => { + open_file(workspace, abs_path, Some(line_range), window, cx); + } + MentionUri::Directory { abs_path } => { + reveal_in_project_panel(workspace, abs_path, cx); + } + MentionUri::Thread { id, name } => { + open_thread(workspace, id, name, window, cx); + } + MentionUri::TextThread { .. } => {} + MentionUri::Rule { id, .. } => { + open_rule(workspace, id, window, cx); + } + MentionUri::Fetch { url } => { + cx.open_url(url.as_str()); + } + MentionUri::PastedImage + | MentionUri::Selection { abs_path: None, .. } + | MentionUri::Diagnostics { .. } + | MentionUri::TerminalSelection { .. } + | MentionUri::GitDiff { .. } => {} + }); +} + +fn open_file( + workspace: &mut Workspace, + abs_path: PathBuf, + line_range: Option>, + window: &mut Window, + cx: &mut Context, +) { + let project = workspace.project(); + + if let Some(project_path) = + project.update(cx, |project, cx| project.find_project_path(&abs_path, cx)) + { + let item = workspace.open_path(project_path, None, true, window, cx); + if let Some(line_range) = line_range { + window + .spawn(cx, async move |cx| { + let Some(editor) = item.await?.downcast::() else { + return Ok(()); + }; + editor + .update_in(cx, |editor, window, cx| { + let range = Point::new(*line_range.start(), 0) + ..Point::new(*line_range.start(), 0); + editor.change_selections( + SelectionEffects::scroll(Autoscroll::center()), + window, + cx, + |selections| selections.select_ranges(vec![range]), + ); + }) + .ok(); + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } else { + item.detach_and_log_err(cx); + } + } else if abs_path.exists() { + workspace + .open_abs_path( + abs_path, + OpenOptions { + focus: Some(true), + ..Default::default() + }, + window, + cx, + ) + .detach_and_log_err(cx); + } +} + +fn reveal_in_project_panel( + workspace: &mut Workspace, + abs_path: PathBuf, + cx: &mut Context, +) { + let project = workspace.project(); + let Some(entry_id) = project.update(cx, |project, cx| { + let path = project.find_project_path(&abs_path, cx)?; + project.entry_for_path(&path, cx).map(|entry| entry.id) + }) else { + return; + }; + + project.update(cx, |_, cx| { + cx.emit(project::Event::RevealInProjectPanel(entry_id)); + }); +} + +fn open_thread( + workspace: &mut Workspace, + id: acp::SessionId, + name: String, + window: &mut Window, + cx: &mut Context, +) { + use crate::AgentPanel; + use acp_thread::AgentSessionInfo; + + let Some(panel) = workspace.panel::(cx) else { + return; + }; + + panel.update(cx, |panel, cx| { + panel.load_agent_thread( + AgentSessionInfo { + session_id: id, + cwd: None, + title: Some(name.into()), + updated_at: None, + meta: None, + }, + window, + cx, + ) + }); +} + +fn open_rule( + _workspace: &mut Workspace, + id: PromptId, + window: &mut Window, + cx: &mut Context, +) { + use zed_actions::assistant::OpenRulesLibrary; + + let PromptId::User { uuid } = id else { + return; + }; + + window.dispatch_action( + Box::new(OpenRulesLibrary { + prompt_to_select: Some(uuid.0), + }), + cx, + ); +} diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index bc2516b8b0f53e79a03fca40f6ce4dc5b564efc1..6bff2be4c15841de597309b626e768bbf79e880a 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -906,11 +906,17 @@ pub struct ImageSource { pub data: String, } +fn is_false(value: &bool) -> bool { + !value +} + #[derive(Debug, Serialize, Deserialize)] pub struct Tool { pub name: String, pub description: String, pub input_schema: serde_json::Value, + #[serde(default, skip_serializing_if = "is_false")] + pub eager_input_streaming: bool, } #[derive(Debug, Serialize, Deserialize)] @@ -971,6 +977,8 @@ pub struct Request { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub stop_sequences: Vec, #[serde(default, skip_serializing_if = "Option::is_none")] + pub speed: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] pub temperature: Option, #[serde(default, skip_serializing_if = "Option::is_none")] pub top_k: Option, @@ -978,6 +986,14 @@ pub struct Request { pub top_p: Option, } +#[derive(Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum Speed { + #[default] + Standard, + Fast, +} + #[derive(Debug, Serialize, Deserialize)] struct StreamingRequest { #[serde(flatten)] diff --git a/crates/assistant_text_thread/src/text_thread.rs b/crates/assistant_text_thread/src/text_thread.rs index 18b37808b936e354614f6681bbcb263b184f832c..34007868f9f128fa80f09f884ccbaf57ffd103c1 100644 --- a/crates/assistant_text_thread/src/text_thread.rs +++ b/crates/assistant_text_thread/src/text_thread.rs @@ -2275,6 +2275,7 @@ impl TextThread { temperature: model.and_then(|model| AgentSettings::temperature_for_model(model, cx)), thinking_allowed: true, thinking_effort: None, + speed: None, }; for message in self.messages(cx) { if message.status != MessageStatus::Done { diff --git a/crates/audio/Cargo.toml b/crates/audio/Cargo.toml index 3139eb56c7e30555c48fe0be329c55d472b3f8eb..f3898265e500dd40602c9877b5e4c0980932a81a 100644 --- a/crates/audio/Cargo.toml +++ b/crates/audio/Cargo.toml @@ -30,4 +30,4 @@ thiserror.workspace = true util.workspace = true [target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies] -libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" } +libwebrtc.workspace = true diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index b506b1b31f7e1840a8a78219c8843687ff85cd2c..53fac7beac2475d06f4a0f886536942308f9976c 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -990,7 +990,7 @@ async fn install_release_macos( }; let output = new_command("rsync") - .args(["-av", "--delete"]) + .args(["-av", "--delete", "--exclude", "Icon?"]) .arg(&mounted_app_path) .arg(&running_app_path) .output() diff --git a/crates/auto_update_helper/Cargo.toml b/crates/auto_update_helper/Cargo.toml index 73c38d80dd12e9c42daa42b7e6f2c9d6975cf47b..aa5bf6ac40b0e1ab20cbde510be5d7f389c7ade8 100644 --- a/crates/auto_update_helper/Cargo.toml +++ b/crates/auto_update_helper/Cargo.toml @@ -19,6 +19,7 @@ log.workspace = true simplelog.workspace = true [target.'cfg(target_os = "windows")'.dependencies] +scopeguard = "1.2" windows.workspace = true [target.'cfg(target_os = "windows")'.dev-dependencies] diff --git a/crates/auto_update_helper/src/updater.rs b/crates/auto_update_helper/src/updater.rs index 076e11fb4eef1e5c53e2bdc290be7117330c3e61..7821c908c40873637c4ac3993c320416e2a4b978 100644 --- a/crates/auto_update_helper/src/updater.rs +++ b/crates/auto_update_helper/src/updater.rs @@ -1,13 +1,22 @@ use std::{ + ffi::OsStr, + os::windows::ffi::OsStrExt, path::Path, sync::LazyLock, time::{Duration, Instant}, }; use anyhow::{Context as _, Result}; -use windows::Win32::{ - Foundation::{HWND, LPARAM, WPARAM}, - UI::WindowsAndMessaging::PostMessageW, +use windows::{ + Win32::{ + Foundation::{HWND, LPARAM, WPARAM}, + System::RestartManager::{ + CCH_RM_SESSION_KEY, RmEndSession, RmGetList, RmRegisterResources, RmShutdown, + RmStartSession, + }, + UI::WindowsAndMessaging::PostMessageW, + }, + core::{PCWSTR, PWSTR}, }; use crate::windows_impl::WM_JOB_UPDATED; @@ -262,9 +271,106 @@ pub(crate) static JOBS: LazyLock<[Job; 9]> = LazyLock::new(|| { ] }); +/// Attempts to use Windows Restart Manager to release file handles held by other processes +/// (e.g., Explorer.exe) on the files we need to move during the update. +/// +/// This is a best-effort operation - if it fails, we'll still try the update and rely on +/// the retry logic. +fn release_file_handles(app_dir: &Path) -> Result<()> { + // Files that commonly get locked by Explorer or other processes + let files_to_release = [ + app_dir.join("Zed.exe"), + app_dir.join("bin\\Zed.exe"), + app_dir.join("bin\\zed"), + app_dir.join("conpty.dll"), + ]; + + log::info!("Attempting to release file handles using Restart Manager..."); + + let mut session: u32 = 0; + let mut session_key = [0u16; CCH_RM_SESSION_KEY as usize + 1]; + + // Start a Restart Manager session + let err = unsafe { + RmStartSession( + &mut session, + Some(0), + PWSTR::from_raw(session_key.as_mut_ptr()), + ) + }; + if err.is_err() { + anyhow::bail!("RmStartSession failed: {err:?}"); + } + + // Ensure we end the session when done + let _session_guard = scopeguard::guard(session, |s| { + let _ = unsafe { RmEndSession(s) }; + }); + + // Convert paths to wide strings for Windows API + let wide_paths: Vec> = files_to_release + .iter() + .filter(|p| p.exists()) + .map(|p| { + OsStr::new(p) + .encode_wide() + .chain(std::iter::once(0)) + .collect() + }) + .collect(); + + if wide_paths.is_empty() { + log::info!("No files to release handles for"); + return Ok(()); + } + + let pcwstr_paths: Vec = wide_paths + .iter() + .map(|p| PCWSTR::from_raw(p.as_ptr())) + .collect(); + + // Register the files we want to modify + let err = unsafe { RmRegisterResources(session, Some(&pcwstr_paths), None, None) }; + if err.is_err() { + anyhow::bail!("RmRegisterResources failed: {err:?}"); + } + + // Check if any processes are using these files + let mut needed: u32 = 0; + let mut count: u32 = 0; + let mut reboot_reasons: u32 = 0; + let _ = unsafe { RmGetList(session, &mut needed, &mut count, None, &mut reboot_reasons) }; + + if needed == 0 { + log::info!("No processes are holding handles to the files"); + return Ok(()); + } + + log::info!( + "{} process(es) are holding handles to the files, requesting release...", + needed + ); + + // Request processes to release their handles + // RmShutdown with flags=0 asks applications to release handles gracefully + // For Explorer, this typically releases icon cache handles without closing Explorer + let err = unsafe { RmShutdown(session, 0, None) }; + if err.is_err() { + anyhow::bail!("RmShutdown failed: {:?}", err); + } + + log::info!("Successfully requested handle release"); + Ok(()) +} + pub(crate) fn perform_update(app_dir: &Path, hwnd: Option, launch: bool) -> Result<()> { let hwnd = hwnd.map(|ptr| HWND(ptr as _)); + // Try to release file handles before starting the update + if let Err(e) = release_file_handles(app_dir) { + log::warn!("Restart Manager failed (will continue anyway): {}", e); + } + let mut last_successful_job = None; 'outer: for (i, job) in JOBS.iter().enumerate() { let start = Instant::now(); @@ -279,19 +385,22 @@ pub(crate) fn perform_update(app_dir: &Path, hwnd: Option, launch: bool) unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? }; break; } - Err(err) => { - // Check if it's a "not found" error - let io_err = err.downcast_ref::().unwrap(); - if io_err.kind() == std::io::ErrorKind::NotFound { - log::warn!("File or folder not found."); - last_successful_job = Some(i); - unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? }; - break; + Err(err) => match err.downcast_ref::() { + Some(io_err) => match io_err.kind() { + std::io::ErrorKind::NotFound => { + log::error!("Operation failed with file not found, aborting: {}", err); + break 'outer; + } + _ => { + log::error!("Operation failed (retrying): {}", err); + std::thread::sleep(Duration::from_millis(50)); + } + }, + None => { + log::error!("Operation failed with unexpected error, aborting: {}", err); + break 'outer; } - - log::error!("Operation failed: {} ({:?})", err, io_err.kind()); - std::thread::sleep(Duration::from_millis(50)); - } + }, } } } diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index 2c9a68d5526f2cb0f03bc3da7ab611233091b143..82ab2736b8bc207aa30952ae9f79f161eb9db8db 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -1721,7 +1721,7 @@ impl BufferDiff { if let Some(language_registry) = language_registry { base_text.set_language_registry(language_registry); } - base_text.set_language(language, cx); + base_text.set_language_async(language, cx); base_text.parsing_idle() }); cx.spawn(async move |this, cx| { @@ -1753,6 +1753,7 @@ impl BufferDiff { let should_compare_hunks = update.base_text_edits.is_some() || !base_text_changed; let parsing_idle = if let Some(diff) = update.base_text_edits { state.base_text.update(cx, |base_text, cx| { + base_text.set_sync_parse_timeout(None); base_text.set_capability(Capability::ReadWrite, cx); base_text.apply_diff(diff, cx); base_text.set_capability(Capability::ReadOnly, cx); @@ -1760,6 +1761,7 @@ impl BufferDiff { }) } else if update.base_text_changed { state.base_text.update(cx, |base_text, cx| { + base_text.set_sync_parse_timeout(None); base_text.set_capability(Capability::ReadWrite, cx); base_text.set_text(new_state.base_text.clone(), cx); base_text.set_capability(Capability::ReadOnly, cx); diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index c4966443978e1eaf86192171de4c765cac41d5c7..8b6f30a3cd3bf1d61f76a9b39c99a7b51a30ea4f 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -22,6 +22,7 @@ pub(crate) fn init(client: &AnyProtoClient) { pub struct ChannelBuffer { pub channel_id: ChannelId, connected: bool, + rejoining: bool, collaborators: HashMap, user_store: Entity, channel_store: Entity, @@ -84,6 +85,7 @@ impl ChannelBuffer { buffer_epoch: response.epoch, client, connected: true, + rejoining: false, collaborators: Default::default(), acknowledge_task: None, channel_id: channel.id, @@ -111,6 +113,7 @@ impl ChannelBuffer { pub fn connected(&mut self, cx: &mut Context) { self.connected = true; + self.rejoining = false; if self.subscription.is_none() { let Ok(subscription) = self.client.subscribe_to_entity(self.channel_id.0) else { return; @@ -120,6 +123,10 @@ impl ChannelBuffer { } } + pub(crate) fn set_rejoining(&mut self, rejoining: bool) { + self.rejoining = rejoining; + } + pub fn remote_id(&self, cx: &App) -> BufferId { self.buffer.read(cx).remote_id() } @@ -204,6 +211,9 @@ impl ChannelBuffer { return; } let operation = language::proto::serialize_operation(operation); + if self.rejoining { + return; + } self.client .send(proto::UpdateChannelBuffer { channel_id: self.channel_id.0, @@ -263,6 +273,7 @@ impl ChannelBuffer { log::info!("channel buffer {} disconnected", self.channel_id); if self.connected { self.connected = false; + self.rejoining = false; self.subscription.take(); cx.emit(ChannelBufferEvent::Disconnected); cx.notify() diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index 62e34210ebac2dd0e017b415adb094857bb11025..a9357a765a75443e18efb1e6f31cdfab313ebcce 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -855,12 +855,18 @@ impl ChannelStore { if let OpenEntityHandle::Open(buffer) = buffer && let Some(buffer) = buffer.upgrade() { - let channel_buffer = buffer.read(cx); - let buffer = channel_buffer.buffer().read(cx); - buffer_versions.push(proto::ChannelBufferVersion { - channel_id: channel_buffer.channel_id.0, - epoch: channel_buffer.epoch(), - version: language::proto::serialize_version(&buffer.version()), + buffer.update(cx, |channel_buffer, cx| { + // Block on_buffer_update from sending UpdateChannelBuffer messages + // until the rejoin completes. This prevents a race condition where + // edits made during the rejoin async gap could inflate the server + // version, causing offline edits to be filtered out by serialize_ops. + channel_buffer.set_rejoining(true); + let inner_buffer = channel_buffer.buffer().read(cx); + buffer_versions.push(proto::ChannelBufferVersion { + channel_id: channel_buffer.channel_id.0, + epoch: channel_buffer.epoch(), + version: language::proto::serialize_version(&inner_buffer.version()), + }); }); } } @@ -874,7 +880,26 @@ impl ChannelStore { }); cx.spawn(async move |this, cx| { - let mut response = response.await?; + let response = match response.await { + Ok(response) => response, + Err(err) => { + // Clear rejoining flag on all buffers since the rejoin failed + this.update(cx, |this, cx| { + for buffer in this.opened_buffers.values() { + if let OpenEntityHandle::Open(buffer) = buffer { + if let Some(buffer) = buffer.upgrade() { + buffer.update(cx, |channel_buffer, _| { + channel_buffer.set_rejoining(false); + }); + } + } + } + }) + .ok(); + return Err(err); + } + }; + let mut response = response; this.update(cx, |this, cx| { this.opened_buffers.retain(|_, buffer| match buffer { @@ -948,6 +973,22 @@ impl ChannelStore { fn handle_disconnect(&mut self, wait_for_reconnect: bool, cx: &mut Context) { cx.notify(); self.did_subscribe = false; + + // If we're waiting for reconnect, set rejoining=true on all buffers immediately. + // This prevents operations from being sent during the reconnection window, + // before handle_connect has a chance to run and capture the version. + if wait_for_reconnect { + for buffer in self.opened_buffers.values() { + if let OpenEntityHandle::Open(buffer) = buffer { + if let Some(buffer) = buffer.upgrade() { + buffer.update(cx, |channel_buffer, _| { + channel_buffer.set_rejoining(true); + }); + } + } + } + } + self.disconnect_channel_buffers_task.get_or_insert_with(|| { cx.spawn(async move |this, cx| { if wait_for_reconnect { diff --git a/crates/client/src/zed_urls.rs b/crates/client/src/zed_urls.rs index 2d0eedc9179a0ba2b640a8ed28366e47c0f0c397..e8d5a1e3bbade8ef357043cc993d33c03f05a9da 100644 --- a/crates/client/src/zed_urls.rs +++ b/crates/client/src/zed_urls.rs @@ -44,22 +44,6 @@ pub fn ai_privacy_and_security(cx: &App) -> String { ) } -/// Returns the URL to Zed AI's external agents documentation. -pub fn external_agents_docs(cx: &App) -> String { - format!( - "{server_url}/docs/ai/external-agents", - server_url = server_url(cx) - ) -} - -/// Returns the URL to Zed agent servers documentation. -pub fn agent_server_docs(cx: &App) -> String { - format!( - "{server_url}/docs/extensions/agent-servers", - server_url = server_url(cx) - ) -} - /// Returns the URL to Zed's edit prediction documentation. pub fn edit_prediction_docs(cx: &App) -> String { format!( diff --git a/crates/clock/src/clock.rs b/crates/clock/src/clock.rs index cb0808abcae1709020f3fd3077436aeb1140a140..57612c5ff70ad7088dc4ff4bc348377b78184bae 100644 --- a/crates/clock/src/clock.rs +++ b/crates/clock/src/clock.rs @@ -61,8 +61,8 @@ pub type Seq = u32; /// used to determine the ordering of events in the editor. #[derive(Clone, Copy, Eq, Hash, PartialEq, Serialize, Deserialize)] pub struct Lamport { - pub replica_id: ReplicaId, pub value: Seq, + pub replica_id: ReplicaId, } /// A [version vector](https://en.wikipedia.org/wiki/Version_vector). diff --git a/crates/cloud_api_client/src/cloud_api_client.rs b/crates/cloud_api_client/src/cloud_api_client.rs index f485e2d20c619715ea342fccd2a5cec0ecaa6f4e..13d67838b216f4990f15ec22c1701aa7aef9dbf2 100644 --- a/crates/cloud_api_client/src/cloud_api_client.rs +++ b/crates/cloud_api_client/src/cloud_api_client.rs @@ -9,7 +9,9 @@ use futures::AsyncReadExt as _; use gpui::{App, Task}; use gpui_tokio::Tokio; use http_client::http::request; -use http_client::{AsyncBody, HttpClientWithUrl, HttpRequestExt, Method, Request, StatusCode}; +use http_client::{ + AsyncBody, HttpClientWithUrl, HttpRequestExt, Json, Method, Request, StatusCode, +}; use parking_lot::RwLock; use thiserror::Error; use yawc::WebSocket; @@ -141,6 +143,7 @@ impl CloudApiClient { pub async fn create_llm_token( &self, system_id: Option, + organization_id: Option, ) -> Result { let request_builder = Request::builder() .method(Method::POST) @@ -153,7 +156,10 @@ impl CloudApiClient { builder.header(ZED_SYSTEM_ID_HEADER_NAME, system_id) }); - let request = self.build_request(request_builder, AsyncBody::default())?; + let request = self.build_request( + request_builder, + Json(CreateLlmTokenBody { organization_id }), + )?; let mut response = self.http_client.send(request).await?; diff --git a/crates/cloud_api_types/src/cloud_api_types.rs b/crates/cloud_api_types/src/cloud_api_types.rs index 5f86dce21eea6f76a426fa1bca735be87a513ee2..42d3442bfc016f5cb1a39ba421ccdfe386bcbc65 100644 --- a/crates/cloud_api_types/src/cloud_api_types.rs +++ b/crates/cloud_api_types/src/cloud_api_types.rs @@ -52,6 +52,12 @@ pub struct AcceptTermsOfServiceResponse { #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct LlmToken(pub String); +#[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize)] +pub struct CreateLlmTokenBody { + #[serde(default)] + pub organization_id: Option, +} + #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct CreateLlmTokenResponse { pub token: LlmToken, @@ -62,6 +68,7 @@ pub struct SubmitAgentThreadFeedbackBody { pub organization_id: Option, pub agent: String, pub session_id: String, + pub parent_session_id: Option, pub rating: String, pub thread: serde_json::Value, } diff --git a/crates/cloud_llm_client/src/cloud_llm_client.rs b/crates/cloud_llm_client/src/cloud_llm_client.rs index f69c279b24718b76fe2ae10f066d57324fe03461..9ed82365ea910dd910226f70e242d68388b41796 100644 --- a/crates/cloud_llm_client/src/cloud_llm_client.rs +++ b/crates/cloud_llm_client/src/cloud_llm_client.rs @@ -142,6 +142,8 @@ pub struct PredictEditsResponse { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct AcceptEditPredictionBody { pub request_id: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub model_version: Option, } #[derive(Debug, Clone, Deserialize)] @@ -160,6 +162,8 @@ pub struct EditPredictionRejection { #[serde(default)] pub reason: EditPredictionRejectReason, pub was_shown: bool, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub model_version: Option, } #[derive(Default, Debug, Clone, Copy, Serialize, Deserialize, PartialEq)] @@ -302,6 +306,8 @@ pub struct LanguageModel { pub supports_tools: bool, pub supports_images: bool, pub supports_thinking: bool, + #[serde(default)] + pub supports_fast_mode: bool, pub supported_effort_levels: Vec, #[serde(default)] pub supports_streaming_tools: bool, diff --git a/crates/cloud_llm_client/src/predict_edits_v3.rs b/crates/cloud_llm_client/src/predict_edits_v3.rs index 9e7772ab7450cb47785d034b39d9c7c642b931c2..5002c1a770ec1955d2a96c97098867f20f9bd05d 100644 --- a/crates/cloud_llm_client/src/predict_edits_v3.rs +++ b/crates/cloud_llm_client/src/predict_edits_v3.rs @@ -1,6 +1,7 @@ use crate::PredictEditsRequestTrigger; use serde::{Deserialize, Serialize}; use std::borrow::Cow; +use std::ops::Range; #[derive(Debug, Deserialize, Serialize)] pub struct RawCompletionRequest { @@ -27,6 +28,13 @@ pub struct PredictEditsV3Request { pub struct PredictEditsV3Response { pub request_id: String, pub output: String, + /// The editable region byte range within `cursor_excerpt` that the + /// server used for this request. When present, the client should use + /// this range to extract the old text from its local excerpt for + /// diffing, rather than relying on its own format-derived range. + pub editable_range: Range, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub model_version: Option, } #[derive(Debug, Deserialize, Serialize)] diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 71e39fb595656e0dcdc53d97705b87a216ceb0f3..3e4b5c2ce211f68ef7e12895b542db5e6e3ea47c 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -122,6 +122,8 @@ CREATE TABLE "project_repository_statuses" ( "status_kind" INT4 NOT NULL, "first_status" INT4 NULL, "second_status" INT4 NULL, + "lines_added" INT4 NULL, + "lines_deleted" INT4 NULL, "scan_id" INT8 NOT NULL, "is_deleted" BOOL NOT NULL, PRIMARY KEY (project_id, repository_id, repo_path) diff --git a/crates/collab/migrations/20251208000000_test_schema.sql b/crates/collab/migrations/20251208000000_test_schema.sql index 493be3823e25a433d4a6a27a21c508f218dc68d1..0f4e4f2d2e3925ea1e4d2b964c5e4f159f393b4f 100644 --- a/crates/collab/migrations/20251208000000_test_schema.sql +++ b/crates/collab/migrations/20251208000000_test_schema.sql @@ -315,6 +315,8 @@ CREATE TABLE public.project_repository_statuses ( status_kind integer NOT NULL, first_status integer, second_status integer, + lines_added integer, + lines_deleted integer, scan_id bigint NOT NULL, is_deleted boolean NOT NULL ); diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 57fb0df86495dc2013e7cd780c2e62e57298bd11..d8803c253f5feef8ef5e040f3ea112abcc688f52 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -732,6 +732,8 @@ fn db_status_to_proto( status: Some(proto::GitFileStatus { variant: Some(variant), }), + diff_stat_added: entry.lines_added.map(|v| v as u32), + diff_stat_deleted: entry.lines_deleted.map(|v| v as u32), }) } diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index ed6325c62173358c8deac2dcd6289ce0b8ae5e71..24cf639a715aa9b88da80375b389debaea0c4295 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -334,147 +334,6 @@ impl Database { .await?; } - // Backward-compatibility for old Zed clients. - // - // Remove this block when Zed 1.80 stable has been out for a week. - { - if !update.updated_repositories.is_empty() { - project_repository::Entity::insert_many( - update.updated_repositories.iter().map(|repository| { - project_repository::ActiveModel { - project_id: ActiveValue::set(project_id), - legacy_worktree_id: ActiveValue::set(Some(worktree_id)), - id: ActiveValue::set(repository.repository_id as i64), - scan_id: ActiveValue::set(update.scan_id as i64), - is_deleted: ActiveValue::set(false), - branch_summary: ActiveValue::Set( - repository - .branch_summary - .as_ref() - .map(|summary| serde_json::to_string(summary).unwrap()), - ), - current_merge_conflicts: ActiveValue::Set(Some( - serde_json::to_string(&repository.current_merge_conflicts) - .unwrap(), - )), - // Old clients do not use abs path, entry ids, head_commit_details, or merge_message. - abs_path: ActiveValue::set(String::new()), - entry_ids: ActiveValue::set("[]".into()), - head_commit_details: ActiveValue::set(None), - merge_message: ActiveValue::set(None), - remote_upstream_url: ActiveValue::set(None), - remote_origin_url: ActiveValue::set(None), - } - }), - ) - .on_conflict( - OnConflict::columns([ - project_repository::Column::ProjectId, - project_repository::Column::Id, - ]) - .update_columns([ - project_repository::Column::ScanId, - project_repository::Column::BranchSummary, - project_repository::Column::CurrentMergeConflicts, - ]) - .to_owned(), - ) - .exec(&*tx) - .await?; - - let has_any_statuses = update - .updated_repositories - .iter() - .any(|repository| !repository.updated_statuses.is_empty()); - - if has_any_statuses { - project_repository_statuses::Entity::insert_many( - update.updated_repositories.iter().flat_map( - |repository: &proto::RepositoryEntry| { - repository.updated_statuses.iter().map(|status_entry| { - let (repo_path, status_kind, first_status, second_status) = - proto_status_to_db(status_entry.clone()); - project_repository_statuses::ActiveModel { - project_id: ActiveValue::set(project_id), - repository_id: ActiveValue::set( - repository.repository_id as i64, - ), - scan_id: ActiveValue::set(update.scan_id as i64), - is_deleted: ActiveValue::set(false), - repo_path: ActiveValue::set(repo_path), - status: ActiveValue::set(0), - status_kind: ActiveValue::set(status_kind), - first_status: ActiveValue::set(first_status), - second_status: ActiveValue::set(second_status), - } - }) - }, - ), - ) - .on_conflict( - OnConflict::columns([ - project_repository_statuses::Column::ProjectId, - project_repository_statuses::Column::RepositoryId, - project_repository_statuses::Column::RepoPath, - ]) - .update_columns([ - project_repository_statuses::Column::ScanId, - project_repository_statuses::Column::StatusKind, - project_repository_statuses::Column::FirstStatus, - project_repository_statuses::Column::SecondStatus, - ]) - .to_owned(), - ) - .exec(&*tx) - .await?; - } - - for repo in &update.updated_repositories { - if !repo.removed_statuses.is_empty() { - project_repository_statuses::Entity::update_many() - .filter( - project_repository_statuses::Column::ProjectId - .eq(project_id) - .and( - project_repository_statuses::Column::RepositoryId - .eq(repo.repository_id), - ) - .and( - project_repository_statuses::Column::RepoPath - .is_in(repo.removed_statuses.iter()), - ), - ) - .set(project_repository_statuses::ActiveModel { - is_deleted: ActiveValue::Set(true), - scan_id: ActiveValue::Set(update.scan_id as i64), - ..Default::default() - }) - .exec(&*tx) - .await?; - } - } - } - - if !update.removed_repositories.is_empty() { - project_repository::Entity::update_many() - .filter( - project_repository::Column::ProjectId - .eq(project_id) - .and(project_repository::Column::LegacyWorktreeId.eq(worktree_id)) - .and(project_repository::Column::Id.is_in( - update.removed_repositories.iter().map(|id| *id as i64), - )), - ) - .set(project_repository::ActiveModel { - is_deleted: ActiveValue::Set(true), - scan_id: ActiveValue::Set(update.scan_id as i64), - ..Default::default() - }) - .exec(&*tx) - .await?; - } - } - let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?; Ok(connection_ids) }) @@ -552,6 +411,12 @@ impl Database { status_kind: ActiveValue::set(status_kind), first_status: ActiveValue::set(first_status), second_status: ActiveValue::set(second_status), + lines_added: ActiveValue::set( + status_entry.diff_stat_added.map(|v| v as i32), + ), + lines_deleted: ActiveValue::set( + status_entry.diff_stat_deleted.map(|v| v as i32), + ), } }), ) @@ -566,6 +431,8 @@ impl Database { project_repository_statuses::Column::StatusKind, project_repository_statuses::Column::FirstStatus, project_repository_statuses::Column::SecondStatus, + project_repository_statuses::Column::LinesAdded, + project_repository_statuses::Column::LinesDeleted, ]) .to_owned(), ) @@ -1002,7 +869,7 @@ impl Database { repositories.push(proto::UpdateRepository { project_id: db_repository_entry.project_id.0 as u64, id: db_repository_entry.id as u64, - abs_path: db_repository_entry.abs_path, + abs_path: db_repository_entry.abs_path.clone(), entry_ids, updated_statuses, removed_statuses: Vec::new(), @@ -1015,6 +882,7 @@ impl Database { stash_entries: Vec::new(), remote_upstream_url: db_repository_entry.remote_upstream_url.clone(), remote_origin_url: db_repository_entry.remote_origin_url.clone(), + original_repo_abs_path: Some(db_repository_entry.abs_path), }); } } diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index d8fca0306f5b2ae5668a735db578061275192b58..b4cbd83167b227542d8de1022b7e2cf49f5a7645 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -738,7 +738,7 @@ impl Database { while let Some(db_status) = db_statuses.next().await { let db_status: project_repository_statuses::Model = db_status?; if db_status.is_deleted { - removed_statuses.push(db_status.repo_path); + removed_statuses.push(db_status.repo_path.clone()); } else { updated_statuses.push(db_status_to_proto(db_status)?); } @@ -791,13 +791,14 @@ impl Database { head_commit_details, project_id: project_id.to_proto(), id: db_repository.id as u64, - abs_path: db_repository.abs_path, + abs_path: db_repository.abs_path.clone(), scan_id: db_repository.scan_id as u64, is_last_update: true, merge_message: db_repository.merge_message, stash_entries: Vec::new(), remote_upstream_url: db_repository.remote_upstream_url.clone(), remote_origin_url: db_repository.remote_origin_url.clone(), + original_repo_abs_path: Some(db_repository.abs_path), }); } } diff --git a/crates/collab/src/db/tables/project_repository_statuses.rs b/crates/collab/src/db/tables/project_repository_statuses.rs index 7bb903d45085467a3285a58f8afdd7a29339731a..8160d8a03c2a3b4dd0db7675489eeafcef020a9a 100644 --- a/crates/collab/src/db/tables/project_repository_statuses.rs +++ b/crates/collab/src/db/tables/project_repository_statuses.rs @@ -17,6 +17,8 @@ pub struct Model { pub first_status: Option, /// For unmerged entries, this is the `second_head` status. For tracked entries, this is the `worktree_status`. pub second_status: Option, + pub lines_added: Option, + pub lines_deleted: Option, pub scan_id: i64, pub is_deleted: bool, } diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 087dbe2a0ba23851689e75401c62b64775cf2282..b521f6b083ae311d98ec46c900ce821fd8042e4a 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -437,6 +437,8 @@ impl Server { .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_message_handler(broadcast_project_message_from_host::) .add_message_handler(update_context) diff --git a/crates/collab/tests/integration/channel_buffer_tests.rs b/crates/collab/tests/integration/channel_buffer_tests.rs index c9fd0459f43ea74ca1052831903e913c191a6f7a..a5aca7dd82ca23b1c348bea1fff5d2da2870c654 100644 --- a/crates/collab/tests/integration/channel_buffer_tests.rs +++ b/crates/collab/tests/integration/channel_buffer_tests.rs @@ -3,6 +3,7 @@ use call::ActiveCall; use channel::ACKNOWLEDGE_DEBOUNCE_INTERVAL; use client::{Collaborator, ParticipantIndex, UserId}; use collab::rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT}; + use collab_ui::channel_view::ChannelView; use collections::HashMap; use editor::{Anchor, Editor, MultiBufferOffset, ToOffset}; @@ -698,6 +699,165 @@ async fn test_channel_buffer_changes_persist( }); } +#[gpui::test] +async fn test_channel_buffer_operations_lost_on_reconnect( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let channel_id = server + .make_channel( + "the-channel", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b)], + ) + .await; + + // Both clients open the channel buffer. + let channel_buffer_a = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + let channel_buffer_b = client_b + .channel_store() + .update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + + // Step 1: Client A makes an initial edit that syncs to B. + channel_buffer_a.update(cx_a, |buffer, cx| { + buffer.buffer().update(cx, |buffer, cx| { + buffer.edit([(0..0, "a")], None, cx); + }) + }); + executor.run_until_parked(); + + // Verify both clients see "a". + channel_buffer_a.read_with(cx_a, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "a"); + }); + channel_buffer_b.read_with(cx_b, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "a"); + }); + + // Step 2: Disconnect client A. Do NOT advance past RECONNECT_TIMEOUT + // so that the buffer stays in `opened_buffers` for rejoin. + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + executor.run_until_parked(); + + // Step 3: While disconnected, client A makes an offline edit ("b"). + // on_buffer_update fires but client.send() fails because transport is down. + channel_buffer_a.update(cx_a, |buffer, cx| { + buffer.buffer().update(cx, |buffer, cx| { + buffer.edit([(1..1, "b")], None, cx); + }) + }); + executor.run_until_parked(); + + // Client A sees "ab" locally; B still sees "a". + channel_buffer_a.read_with(cx_a, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "ab"); + }); + channel_buffer_b.read_with(cx_b, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "a"); + }); + + // Step 4: Reconnect and make a racing edit in parallel. + // + // The race condition occurs when: + // 1. Transport reconnects, handle_connect captures version V (with "b") and sends RejoinChannelBuffers + // 2. DURING the async gap (awaiting response), user makes edit "c" + // 3. on_buffer_update sends UpdateChannelBuffer (succeeds because transport is up) + // 4. Server receives BOTH messages concurrently (FuturesUnordered) + // 5. If UpdateChannelBuffer commits first, server version is inflated to include "c" + // 6. RejoinChannelBuffers reads inflated version and sends it back + // 7. Client's serialize_ops(inflated_version) filters out "b" (offline edit) + // because the inflated version's timestamp covers "b"'s timestamp + + // Get the buffer handle for spawning + let buffer_for_edit = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer()); + + // Spawn the edit task - it will wait for executor to run it + let edit_task = cx_a.spawn({ + let buffer = buffer_for_edit; + async move |mut cx| { + let _ = buffer.update(&mut cx, |buffer, cx| { + buffer.edit([(2..2, "c")], None, cx); + }); + } + }); + + // Allow connections so reconnect can succeed + server.allow_connections(); + + // Advance clock to trigger reconnection attempt + executor.advance_clock(RECEIVE_TIMEOUT); + + // Run the edit task - this races with handle_connect + edit_task.detach(); + + // Let everything settle. + executor.run_until_parked(); + + // Step 7: Read final buffer text from both clients. + let text_a = channel_buffer_a.read_with(cx_a, |buffer, cx| buffer.buffer().read(cx).text()); + let text_b = channel_buffer_b.read_with(cx_b, |buffer, cx| buffer.buffer().read(cx).text()); + + // Both clients must see the same text containing all three edits. + assert_eq!( + text_a, text_b, + "Client A and B diverged! A sees {:?}, B sees {:?}. \ + Operations were lost during reconnection.", + text_a, text_b + ); + assert!( + text_a.contains('a'), + "Initial edit 'a' missing from final text {:?}", + text_a + ); + assert!( + text_a.contains('b'), + "Offline edit 'b' missing from final text {:?}. \ + This is the reconnection race bug: the offline operation was \ + filtered out by serialize_ops because the server_version was \ + inflated by a racing UpdateChannelBuffer.", + text_a + ); + assert!( + text_a.contains('c'), + "Racing edit 'c' missing from final text {:?}", + text_a + ); + + // Step 8: Verify the invariant directly — every operation known to + // client A must be observed by client B's version. If any operation + // in A's history is not covered by B's version, it was lost. + channel_buffer_a.read_with(cx_a, |buf_a, cx_a_inner| { + let buffer_a = buf_a.buffer().read(cx_a_inner); + let ops_a = buffer_a.operations(); + channel_buffer_b.read_with(cx_b, |buf_b, cx_b_inner| { + let buffer_b = buf_b.buffer().read(cx_b_inner); + let version_b = buffer_b.version(); + for (lamport, _op) in ops_a.iter() { + assert!( + version_b.observed(*lamport), + "Operation with lamport timestamp {:?} from client A \ + is NOT observed by client B's version. This operation \ + was lost during reconnection.", + lamport + ); + } + }); + }); +} + #[track_caller] fn assert_collaborators(collaborators: &HashMap, ids: &[Option]) { let mut user_ids = collaborators diff --git a/crates/collab/tests/integration/following_tests.rs b/crates/collab/tests/integration/following_tests.rs index b761bef9ec3be679d55d1c82e3cb5cce0ac7f14e..c4031788c87f747c3125f4dbc509d68ea3720b43 100644 --- a/crates/collab/tests/integration/following_tests.rs +++ b/crates/collab/tests/integration/following_tests.rs @@ -8,8 +8,8 @@ use collab_ui::{ }; use editor::{Editor, MultiBuffer, MultiBufferOffset, PathKey, SelectionEffects}; use gpui::{ - AppContext as _, BackgroundExecutor, BorrowAppContext, Entity, SharedString, TestAppContext, - VisualContext, VisualTestContext, point, + Action, AppContext as _, BackgroundExecutor, BorrowAppContext, Entity, SharedString, + TestAppContext, VisualContext, VisualTestContext, point, }; use language::Capability; use rpc::proto::PeerId; @@ -18,7 +18,7 @@ use settings::SettingsStore; use text::{Point, ToPoint}; use util::{path, rel_path::rel_path, test::sample_text}; use workspace::{ - CollaboratorId, MultiWorkspace, ParticipantLocation, SplitDirection, Workspace, + CloseWindow, CollaboratorId, MultiWorkspace, ParticipantLocation, SplitDirection, Workspace, item::ItemHandle as _, }; @@ -259,8 +259,8 @@ async fn test_basic_following( // Client C closes the project. let weak_workspace_c = workspace_c.downgrade(); - workspace_c.update_in(cx_c, |workspace, window, cx| { - workspace.close_window(&Default::default(), window, cx); + workspace_c.update_in(cx_c, |_, window, cx| { + window.dispatch_action(Box::new(CloseWindow) as Box, cx); }); executor.run_until_parked(); // are you sure you want to leave the call? diff --git a/crates/collab/tests/integration/git_tests.rs b/crates/collab/tests/integration/git_tests.rs index f3abb5bc3f3e1a12e7ecb56c985f2cff46582cee..dccc99a07769e66a3eb318a8201d8e14a29ef4f2 100644 --- a/crates/collab/tests/integration/git_tests.rs +++ b/crates/collab/tests/integration/git_tests.rs @@ -1,17 +1,40 @@ -use std::path::Path; +use std::path::{Path, PathBuf}; use call::ActiveCall; -use git::status::{FileStatus, StatusCode, TrackedStatus}; -use git_ui::project_diff::ProjectDiff; -use gpui::{AppContext as _, TestAppContext, VisualTestContext}; +use collections::HashMap; +use git::{ + repository::RepoPath, + status::{DiffStat, FileStatus, StatusCode, TrackedStatus}, +}; +use git_ui::{git_panel::GitPanel, project_diff::ProjectDiff}; +use gpui::{AppContext as _, BackgroundExecutor, TestAppContext, VisualTestContext}; use project::ProjectPath; use serde_json::json; + use util::{path, rel_path::rel_path}; use workspace::{MultiWorkspace, Workspace}; -// use crate::TestServer; +fn collect_diff_stats( + panel: &gpui::Entity, + cx: &C, +) -> HashMap { + panel.read_with(cx, |panel, cx| { + let Some(repo) = panel.active_repository() else { + return HashMap::default(); + }; + let snapshot = repo.read(cx).snapshot(); + let mut stats = HashMap::default(); + for entry in snapshot.statuses_by_path.iter() { + if let Some(diff_stat) = entry.diff_stat { + stats.insert(entry.repo_path.clone(), diff_stat); + } + } + stats + }) +} + #[gpui::test] async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { let mut server = TestServer::start(cx_a.background_executor.clone()).await; @@ -141,3 +164,337 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) ); }); } + +#[gpui::test] +async fn test_remote_git_worktrees( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + path!("/project"), + json!({ ".git": {}, "file.txt": "content" }), + ) + .await; + + let (project_a, _) = client_a.build_local_project(path!("/project"), cx_a).await; + + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + executor.run_until_parked(); + + let repo_b = cx_b.update(|cx| project_b.read(cx).active_repository(cx).unwrap()); + + // Initially only the main worktree (the repo itself) should be present + let worktrees = cx_b + .update(|cx| repo_b.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 1); + assert_eq!(worktrees[0].path, PathBuf::from(path!("/project"))); + + // Client B creates a git worktree via the remote project + let worktree_directory = PathBuf::from(path!("/project")); + cx_b.update(|cx| { + repo_b.update(cx, |repository, _| { + repository.create_worktree( + "feature-branch".to_string(), + worktree_directory.clone(), + Some("abc123".to_string()), + ) + }) + }) + .await + .unwrap() + .unwrap(); + + executor.run_until_parked(); + + // Client B lists worktrees — should see main + the one just created + let worktrees = cx_b + .update(|cx| repo_b.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 2); + assert_eq!(worktrees[0].path, PathBuf::from(path!("/project"))); + assert_eq!(worktrees[1].path, worktree_directory.join("feature-branch")); + assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch"); + assert_eq!(worktrees[1].sha.as_ref(), "abc123"); + + // Verify from the host side that the worktree was actually created + let host_worktrees = { + let repo_a = cx_a.update(|cx| { + project_a + .read(cx) + .repositories(cx) + .values() + .next() + .unwrap() + .clone() + }); + cx_a.update(|cx| repo_a.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap() + }; + assert_eq!(host_worktrees.len(), 2); + assert_eq!(host_worktrees[0].path, PathBuf::from(path!("/project"))); + assert_eq!( + host_worktrees[1].path, + worktree_directory.join("feature-branch") + ); + + // Client B creates a second git worktree without an explicit commit + cx_b.update(|cx| { + repo_b.update(cx, |repository, _| { + repository.create_worktree( + "bugfix-branch".to_string(), + worktree_directory.clone(), + None, + ) + }) + }) + .await + .unwrap() + .unwrap(); + + executor.run_until_parked(); + + // Client B lists worktrees — should now have main + two created + let worktrees = cx_b + .update(|cx| repo_b.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 3); + + let feature_worktree = worktrees + .iter() + .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/feature-branch") + .expect("should find feature-branch worktree"); + assert_eq!( + feature_worktree.path, + worktree_directory.join("feature-branch") + ); + + let bugfix_worktree = worktrees + .iter() + .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/bugfix-branch") + .expect("should find bugfix-branch worktree"); + assert_eq!( + bugfix_worktree.path, + worktree_directory.join("bugfix-branch") + ); + assert_eq!(bugfix_worktree.sha.as_ref(), "fake-sha"); +} + +#[gpui::test] +async fn test_diff_stat_sync_between_host_and_downstream_client( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(cx_a.background_executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + + let fs = client_a.fs(); + fs.insert_tree( + path!("/code"), + json!({ + "project1": { + ".git": {}, + "src": { + "lib.rs": "line1\nline2\nline3\n", + "new_file.rs": "added1\nadded2\n", + }, + "README.md": "# project 1", + } + }), + ) + .await; + + let dot_git = Path::new(path!("/code/project1/.git")); + fs.set_head_for_repo( + dot_git, + &[ + ("src/lib.rs", "line1\nold_line2\n".into()), + ("src/deleted.rs", "was_here\n".into()), + ], + "deadbeef", + ); + fs.set_index_for_repo( + dot_git, + &[ + ("src/lib.rs", "line1\nold_line2\nline3\nline4\n".into()), + ("src/staged_only.rs", "x\ny\n".into()), + ("src/new_file.rs", "added1\nadded2\n".into()), + ("README.md", "# project 1".into()), + ], + ); + + let (project_a, worktree_id) = client_a + .build_local_project(path!("/code/project1"), cx_a) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + let _project_c = client_c.join_remote_project(project_id, cx_c).await; + cx_a.run_until_parked(); + + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + let panel_a = workspace_a.update_in(cx_a, GitPanel::new_test); + workspace_a.update_in(cx_a, |workspace, window, cx| { + workspace.add_panel(panel_a.clone(), window, cx); + }); + + let panel_b = workspace_b.update_in(cx_b, GitPanel::new_test); + workspace_b.update_in(cx_b, |workspace, window, cx| { + workspace.add_panel(panel_b.clone(), window, cx); + }); + + cx_a.run_until_parked(); + + let stats_a = collect_diff_stats(&panel_a, cx_a); + let stats_b = collect_diff_stats(&panel_b, cx_b); + + let mut expected: HashMap = HashMap::default(); + expected.insert( + RepoPath::new("src/lib.rs").unwrap(), + DiffStat { + added: 3, + deleted: 2, + }, + ); + expected.insert( + RepoPath::new("src/deleted.rs").unwrap(), + DiffStat { + added: 0, + deleted: 1, + }, + ); + expected.insert( + RepoPath::new("src/new_file.rs").unwrap(), + DiffStat { + added: 2, + deleted: 0, + }, + ); + expected.insert( + RepoPath::new("README.md").unwrap(), + DiffStat { + added: 1, + deleted: 0, + }, + ); + assert_eq!(stats_a, expected, "host diff stats should match expected"); + assert_eq!(stats_a, stats_b, "host and remote should agree"); + + let buffer_a = project_a + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("src/lib.rs")), cx) + }) + .await + .unwrap(); + + let _buffer_b = project_b + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("src/lib.rs")), cx) + }) + .await + .unwrap(); + cx_a.run_until_parked(); + + buffer_a.update(cx_a, |buf, cx| { + buf.edit([(buf.len()..buf.len(), "line4\n")], None, cx); + }); + project_a + .update(cx_a, |project, cx| { + project.save_buffer(buffer_a.clone(), cx) + }) + .await + .unwrap(); + cx_a.run_until_parked(); + + let stats_a = collect_diff_stats(&panel_a, cx_a); + let stats_b = collect_diff_stats(&panel_b, cx_b); + + let mut expected_after_edit = expected.clone(); + expected_after_edit.insert( + RepoPath::new("src/lib.rs").unwrap(), + DiffStat { + added: 4, + deleted: 2, + }, + ); + assert_eq!( + stats_a, expected_after_edit, + "host diff stats should reflect the edit" + ); + assert_eq!( + stats_b, expected_after_edit, + "remote diff stats should reflect the host's edit" + ); + + let active_call_b = cx_b.read(ActiveCall::global); + active_call_b + .update(cx_b, |call, cx| call.hang_up(cx)) + .await + .unwrap(); + cx_a.run_until_parked(); + + let user_id_b = client_b.current_user_id(cx_b).to_proto(); + active_call_a + .update(cx_a, |call, cx| call.invite(user_id_b, None, cx)) + .await + .unwrap(); + cx_b.run_until_parked(); + let active_call_b = cx_b.read(ActiveCall::global); + active_call_b + .update(cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + cx_a.run_until_parked(); + + let project_b = client_b.join_remote_project(project_id, cx_b).await; + cx_a.run_until_parked(); + + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + let panel_b = workspace_b.update_in(cx_b, GitPanel::new_test); + workspace_b.update_in(cx_b, |workspace, window, cx| { + workspace.add_panel(panel_b.clone(), window, cx); + }); + cx_b.run_until_parked(); + + let stats_b = collect_diff_stats(&panel_b, cx_b); + assert_eq!( + stats_b, expected_after_edit, + "remote diff stats should be restored from the database after rejoining the call" + ); +} diff --git a/crates/collab/tests/integration/integration_tests.rs b/crates/collab/tests/integration/integration_tests.rs index c26f20c1e294326f275dbfda1d2d41603719cd3e..3bad9c82c26392a935f67efc578b5d293b2cab3d 100644 --- a/crates/collab/tests/integration/integration_tests.rs +++ b/crates/collab/tests/integration/integration_tests.rs @@ -7205,3 +7205,89 @@ async fn test_remote_git_branches( assert_eq!(host_branch.name(), "totally-new-branch"); } + +#[gpui::test] +async fn test_guest_can_rejoin_shared_project_after_leaving_call( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + + client_a + .fs() + .insert_tree( + path!("/project"), + json!({ + "file.txt": "hello\n", + }), + ) + .await; + + let (project_a, _worktree_id) = client_a.build_local_project(path!("/project"), cx_a).await; + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + let _project_b = client_b.join_remote_project(project_id, cx_b).await; + executor.run_until_parked(); + + // third client joins call to prevent room from being torn down + let _project_c = client_c.join_remote_project(project_id, cx_c).await; + executor.run_until_parked(); + + let active_call_b = cx_b.read(ActiveCall::global); + active_call_b + .update(cx_b, |call, cx| call.hang_up(cx)) + .await + .unwrap(); + executor.run_until_parked(); + + let user_id_b = client_b.current_user_id(cx_b).to_proto(); + let active_call_a = cx_a.read(ActiveCall::global); + active_call_a + .update(cx_a, |call, cx| call.invite(user_id_b, None, cx)) + .await + .unwrap(); + executor.run_until_parked(); + let active_call_b = cx_b.read(ActiveCall::global); + active_call_b + .update(cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + executor.run_until_parked(); + + let _project_b2 = client_b.join_remote_project(project_id, cx_b).await; + executor.run_until_parked(); + + project_a.read_with(cx_a, |project, _| { + let guest_count = project + .collaborators() + .values() + .filter(|c| !c.is_host) + .count(); + + assert_eq!( + guest_count, 2, + "host should have exactly one guest collaborator after rejoin" + ); + }); + + _project_b.read_with(cx_b, |project, _| { + assert_eq!( + project.client_subscriptions().len(), + 0, + "We should clear all host subscriptions after leaving the project" + ); + }) +} diff --git a/crates/collab/tests/integration/randomized_test_helpers.rs b/crates/collab/tests/integration/randomized_test_helpers.rs index e3e4a122d1df069385ef850aeccaa4c5788d253d..a6772019768ba19e2a92843a1e33b256f0eb8b0c 100644 --- a/crates/collab/tests/integration/randomized_test_helpers.rs +++ b/crates/collab/tests/integration/randomized_test_helpers.rs @@ -180,6 +180,13 @@ pub async fn run_randomized_test( T::on_quiesce(&mut server, &mut clients).await; for (client, cx) in clients { + cx.update(|cx| { + for window in cx.windows() { + window + .update(cx, |_, window, _| window.remove_window()) + .ok(); + } + }); cx.update(|cx| { let settings = cx.remove_global::(); cx.clear_globals(); @@ -187,8 +194,8 @@ pub async fn run_randomized_test( theme::init(theme::LoadThemes::JustBase, cx); drop(client); }); + executor.run_until_parked(); } - executor.run_until_parked(); if let Some(path) = plan_save_path() { eprintln!("saved test plan to path {:?}", path); @@ -556,6 +563,13 @@ impl TestPlan { log::info!("{} removed", client.username); plan.lock().user(removed_user_id).online = false; + client_cx.update(|cx| { + for window in cx.windows() { + window + .update(cx, |_, window, _| window.remove_window()) + .ok(); + } + }); client_cx.update(|cx| { cx.clear_globals(); drop(client); diff --git a/crates/collab/tests/integration/remote_editing_collaboration_tests.rs b/crates/collab/tests/integration/remote_editing_collaboration_tests.rs index 4556c740ec74f6fb1bc8a2c760812376dae6b4a8..6825c468e783ee8d3a2a6107a031accfc108abd0 100644 --- a/crates/collab/tests/integration/remote_editing_collaboration_tests.rs +++ b/crates/collab/tests/integration/remote_editing_collaboration_tests.rs @@ -33,7 +33,7 @@ use settings::{ SettingsStore, }; use std::{ - path::Path, + path::{Path, PathBuf}, sync::{ Arc, atomic::{AtomicUsize, Ordering}, @@ -396,6 +396,130 @@ async fn test_ssh_collaboration_git_branches( }); } +#[gpui::test] +async fn test_ssh_collaboration_git_worktrees( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { + cx_a.set_name("a"); + cx_b.set_name("b"); + server_cx.set_name("server"); + + cx_a.update(|cx| { + release_channel::init(semver::Version::new(0, 0, 0), cx); + }); + server_cx.update(|cx| { + release_channel::init(semver::Version::new(0, 0, 0), cx); + }); + + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + let (opts, server_ssh, _) = RemoteClient::fake_server(cx_a, server_cx); + let remote_fs = FakeFs::new(server_cx.executor()); + remote_fs + .insert_tree("/project", json!({ ".git": {}, "file.txt": "content" })) + .await; + + server_cx.update(HeadlessProject::init); + let languages = Arc::new(LanguageRegistry::new(server_cx.executor())); + let headless_project = server_cx.new(|cx| { + HeadlessProject::new( + HeadlessAppState { + session: server_ssh, + fs: remote_fs.clone(), + http_client: Arc::new(BlockedHttpClient), + node_runtime: NodeRuntime::unavailable(), + languages, + extension_host_proxy: Arc::new(ExtensionHostProxy::new()), + startup_time: std::time::Instant::now(), + }, + false, + cx, + ) + }); + + let client_ssh = RemoteClient::connect_mock(opts, cx_a).await; + let (project_a, _) = client_a + .build_ssh_project("/project", client_ssh, false, cx_a) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + executor.run_until_parked(); + + let repo_b = cx_b.update(|cx| project_b.read(cx).active_repository(cx).unwrap()); + + let worktrees = cx_b + .update(|cx| repo_b.update(cx, |repo, _| repo.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 1); + + let worktree_directory = PathBuf::from("/project"); + cx_b.update(|cx| { + repo_b.update(cx, |repo, _| { + repo.create_worktree( + "feature-branch".to_string(), + worktree_directory.clone(), + Some("abc123".to_string()), + ) + }) + }) + .await + .unwrap() + .unwrap(); + + executor.run_until_parked(); + + let worktrees = cx_b + .update(|cx| repo_b.update(cx, |repo, _| repo.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 2); + assert_eq!(worktrees[1].path, worktree_directory.join("feature-branch")); + assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch"); + assert_eq!(worktrees[1].sha.as_ref(), "abc123"); + + let server_worktrees = { + let server_repo = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .git_store + .read(cx) + .repositories() + .values() + .next() + .unwrap() + .clone() + }) + }); + server_cx + .update(|cx| server_repo.update(cx, |repo, _| repo.worktrees())) + .await + .unwrap() + .unwrap() + }; + assert_eq!(server_worktrees.len(), 2); + assert_eq!( + server_worktrees[1].path, + worktree_directory.join("feature-branch") + ); +} + #[gpui::test] async fn test_ssh_collaboration_formatting_with_prettier( executor: BackgroundExecutor, diff --git a/crates/collab_ui/src/notifications/incoming_call_notification.rs b/crates/collab_ui/src/notifications/incoming_call_notification.rs index aabb477072c97f829ab64971488ab66d2f6a79e4..164b91395a8853c330e2f7842b5676fff0916e63 100644 --- a/crates/collab_ui/src/notifications/incoming_call_notification.rs +++ b/crates/collab_ui/src/notifications/incoming_call_notification.rs @@ -42,6 +42,14 @@ pub fn init(app_state: &Arc, cx: &mut App) { } } } + + for window in notification_windows.drain(..) { + window + .update(cx, |_, window, _| { + window.remove_window(); + }) + .log_err(); + } }) .detach(); } diff --git a/crates/command_palette/src/command_palette.rs b/crates/command_palette/src/command_palette.rs index a6fc0193a4b18407c2f4473a0fbea471d91eb9a9..d13360a7c5403d997cfb2363f33cfe3b257dcef1 100644 --- a/crates/command_palette/src/command_palette.rs +++ b/crates/command_palette/src/command_palette.rs @@ -510,7 +510,7 @@ impl PickerDelegate for CommandPaletteDelegate { .delegate .matches_updated(query, commands, matches, intercept_result, cx) }) - .log_err(); + .ok(); }) } @@ -543,7 +543,7 @@ impl PickerDelegate for CommandPaletteDelegate { fn dismissed(&mut self, _window: &mut Window, cx: &mut Context>) { self.command_palette .update(cx, |_, cx| cx.emit(DismissEvent)) - .log_err(); + .ok(); } fn confirm(&mut self, secondary: bool, window: &mut Window, cx: &mut Context>) { diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index a1bbc26b9c44d0f68e120a10bf11d0f3cae19d73..179e217d207554bcf226ce905aa9226c1c334b72 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -1035,10 +1035,9 @@ impl Copilot { }; let buffer_entity = buffer.clone(); let lsp = server.lsp.clone(); - let registered_buffer = server - .registered_buffers - .get_mut(&buffer.entity_id()) - .unwrap(); + let Some(registered_buffer) = server.registered_buffers.get_mut(&buffer.entity_id()) else { + return Task::ready(Err(anyhow::anyhow!("buffer not registered"))); + }; let pending_snapshot = registered_buffer.report_changes(buffer, cx); let buffer = buffer.read(cx); let uri = registered_buffer.uri.clone(); diff --git a/crates/copilot/src/copilot_edit_prediction_delegate.rs b/crates/copilot/src/copilot_edit_prediction_delegate.rs index 6bb36b616f782e8109255b4bca9a643060399962..cfb5eef7e08caab8fec624a1cf364eecac16ec9b 100644 --- a/crates/copilot/src/copilot_edit_prediction_delegate.rs +++ b/crates/copilot/src/copilot_edit_prediction_delegate.rs @@ -233,8 +233,8 @@ mod tests { use super::*; use edit_prediction_types::EditPredictionGranularity; use editor::{ - Editor, ExcerptRange, MultiBuffer, MultiBufferOffset, SelectionEffects, - test::editor_lsp_test_context::EditorLspTestContext, + Editor, MultiBuffer, MultiBufferOffset, PathKey, SelectionEffects, + test::{editor_content_with_blocks, editor_lsp_test_context::EditorLspTestContext}, }; use fs::FakeFs; use futures::StreamExt; @@ -685,32 +685,32 @@ mod tests { let buffer_2 = cx.new(|cx| Buffer::local("c = 3\nd = 4\n", cx)); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))], + [Point::new(0, 0)..Point::new(1, 0)], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))], + [Point::new(0, 0)..Point::new(1, 0)], + 0, cx, ); multibuffer }); - let editor = - cx.add_window(|window, cx| Editor::for_multibuffer(multibuffer, None, window, cx)); - editor - .update(cx, |editor, window, cx| { - use gpui::Focusable; - window.focus(&editor.focus_handle(cx), cx); - }) - .unwrap(); + let (editor, cx) = + cx.add_window_view(|window, cx| Editor::for_multibuffer(multibuffer, None, window, cx)); + editor.update_in(cx, |editor, window, cx| { + use gpui::Focusable; + window.focus(&editor.focus_handle(cx), cx); + }); let copilot_provider = cx.new(|_| CopilotEditPredictionDelegate::new(copilot)); - editor - .update(cx, |editor, window, cx| { - editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) - }) - .unwrap(); + editor.update_in(cx, |editor, window, cx| { + editor.set_edit_prediction_provider(Some(copilot_provider), window, cx) + }); handle_copilot_completion_request( &copilot_lsp, @@ -724,7 +724,7 @@ mod tests { }, }], ); - _ = editor.update(cx, |editor, window, cx| { + _ = editor.update_in(cx, |editor, window, cx| { // Ensure copilot suggestions are shown for the first excerpt. editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges([Point::new(1, 5)..Point::new(1, 5)]) @@ -732,14 +732,22 @@ mod tests { editor.show_edit_prediction(&Default::default(), window, cx); }); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); - _ = editor.update(cx, |editor, _, cx| { + _ = editor.update_in(cx, |editor, _, _| { assert!(editor.has_active_edit_prediction()); - assert_eq!( - editor.display_text(cx), - "\n\na = 1\nb = 2 + a\n\n\n\nc = 3\nd = 4\n" - ); - assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4\n"); }); + pretty_assertions::assert_eq!( + editor_content_with_blocks(&editor, cx), + indoc! { " + § + § ----- + a = 1 + b = 2 + a + § + § ----- + c = 3 + d = 4" + } + ); handle_copilot_completion_request( &copilot_lsp, @@ -753,38 +761,61 @@ mod tests { }, }], ); - _ = editor.update(cx, |editor, window, cx| { + _ = editor.update_in(cx, |editor, window, cx| { // Move to another excerpt, ensuring the suggestion gets cleared. editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges([Point::new(4, 5)..Point::new(4, 5)]) }); assert!(!editor.has_active_edit_prediction()); - assert_eq!( - editor.display_text(cx), - "\n\na = 1\nb = 2\n\n\n\nc = 3\nd = 4\n" - ); - assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4\n"); - + }); + pretty_assertions::assert_eq!( + editor_content_with_blocks(&editor, cx), + indoc! { " + § + § ----- + a = 1 + b = 2 + § + § ----- + c = 3 + d = 4"} + ); + editor.update_in(cx, |editor, window, cx| { // Type a character, ensuring we don't even try to interpolate the previous suggestion. editor.handle_input(" ", window, cx); assert!(!editor.has_active_edit_prediction()); - assert_eq!( - editor.display_text(cx), - "\n\na = 1\nb = 2\n\n\n\nc = 3\nd = 4 \n" - ); - assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4 \n"); }); + pretty_assertions::assert_eq!( + editor_content_with_blocks(&editor, cx), + indoc! {" + § + § ----- + a = 1 + b = 2 + § + § ----- + c = 3 + d = 4\x20" + }, + ); // Ensure the new suggestion is displayed when the debounce timeout expires. executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); - _ = editor.update(cx, |editor, _, cx| { + _ = editor.update(cx, |editor, _| { assert!(editor.has_active_edit_prediction()); - assert_eq!( - editor.display_text(cx), - "\n\na = 1\nb = 2\n\n\n\nc = 3\nd = 4 + c\n" - ); - assert_eq!(editor.text(cx), "a = 1\nb = 2\n\nc = 3\nd = 4 \n"); }); + assert_eq!( + editor_content_with_blocks(&editor, cx), + indoc! {" + § + § ----- + a = 1 + b = 2 + § + § ----- + c = 3 + d = 4 + c"} + ); } #[gpui::test] @@ -947,14 +978,18 @@ mod tests { let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), private_buffer.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))], + [Point::new(0, 0)..Point::new(1, 0)], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), public_buffer.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(6, 0))], + [Point::new(0, 0)..Point::new(6, 0)], + 0, cx, ); multibuffer diff --git a/crates/crashes/Cargo.toml b/crates/crashes/Cargo.toml index 5e451853a925d86ffcc1491a5c95af1f94e6ed05..2c13dc83c5a88c3504da6f8be48c1d75c8e43652 100644 --- a/crates/crashes/Cargo.toml +++ b/crates/crashes/Cargo.toml @@ -6,13 +6,12 @@ edition.workspace = true license = "GPL-3.0-or-later" [dependencies] -bincode.workspace = true cfg-if.workspace = true crash-handler.workspace = true futures.workspace = true log.workspace = true minidumper.workspace = true - +parking_lot.workspace = true paths.workspace = true release_channel.workspace = true smol.workspace = true diff --git a/crates/crashes/src/crashes.rs b/crates/crashes/src/crashes.rs index a1a43dbb88198b7afd4b89141f7578c0a5bc25ce..0c848d759cd444f3eb6e2a9838d3005254a25b19 100644 --- a/crates/crashes/src/crashes.rs +++ b/crates/crashes/src/crashes.rs @@ -2,12 +2,14 @@ use crash_handler::{CrashEventResult, CrashHandler}; use futures::future::BoxFuture; use log::info; use minidumper::{Client, LoopAction, MinidumpBinary}; +use parking_lot::Mutex; use release_channel::{RELEASE_CHANNEL, ReleaseChannel}; use serde::{Deserialize, Serialize}; use std::mem; #[cfg(not(target_os = "windows"))] use smol::process::Command; +use system_specs::GpuSpecs; #[cfg(target_os = "macos")] use std::sync::atomic::AtomicU32; @@ -27,12 +29,14 @@ use std::{ }; // set once the crash handler has initialized and the client has connected to it -pub static CRASH_HANDLER: OnceLock> = OnceLock::new(); +static CRASH_HANDLER: OnceLock> = OnceLock::new(); // set when the first minidump request is made to avoid generating duplicate crash reports pub static REQUESTED_MINIDUMP: AtomicBool = AtomicBool::new(false); const CRASH_HANDLER_PING_TIMEOUT: Duration = Duration::from_secs(60); const CRASH_HANDLER_CONNECT_TIMEOUT: Duration = Duration::from_secs(10); +static PENDING_CRASH_SERVER_MESSAGES: Mutex> = Mutex::new(Vec::new()); + #[cfg(target_os = "macos")] static PANIC_THREAD_ID: AtomicU32 = AtomicU32::new(0); @@ -118,6 +122,7 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl spawn_crash_handler_windows(&exe, &socket_name); info!("spawning crash handler process"); + send_crash_server_message(CrashServerMessage::Init(crash_init)); let mut elapsed = Duration::ZERO; let retry_frequency = Duration::from_millis(100); @@ -134,10 +139,6 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl smol::Timer::after(retry_frequency).await; } let client = maybe_client.unwrap(); - client - .send_message(1, serde_json::to_vec(&crash_init).unwrap()) - .unwrap(); - let client = Arc::new(client); #[cfg(target_os = "linux")] @@ -146,6 +147,10 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl // Publishing the client to the OnceLock makes it visible to the signal // handler callback installed earlier. CRASH_HANDLER.set(client.clone()).ok(); + let messages: Vec<_> = mem::take(PENDING_CRASH_SERVER_MESSAGES.lock().as_mut()); + for message in messages.into_iter() { + send_crash_server_message(message); + } // mem::forget so that the drop is not called mem::forget(handler); info!("crash handler registered"); @@ -177,9 +182,10 @@ unsafe fn suspend_all_other_threads() { } pub struct CrashServer { - initialization_params: OnceLock, - panic_info: OnceLock, - active_gpu: OnceLock, + initialization_params: Mutex>, + panic_info: Mutex>, + active_gpu: Mutex>, + user_info: Mutex>, has_connection: Arc, } @@ -190,6 +196,7 @@ pub struct CrashInfo { pub minidump_error: Option, pub gpus: Vec, pub active_gpu: Option, + pub user_info: Option, } #[derive(Debug, Deserialize, Serialize, Clone)] @@ -207,15 +214,55 @@ pub struct CrashPanic { pub span: String, } +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct UserInfo { + pub metrics_id: Option, + pub is_staff: Option, +} + +fn send_crash_server_message(message: CrashServerMessage) { + let Some(crash_server) = CRASH_HANDLER.get() else { + PENDING_CRASH_SERVER_MESSAGES.lock().push(message); + return; + }; + let data = match serde_json::to_vec(&message) { + Ok(data) => data, + Err(err) => { + log::warn!("Failed to serialize crash server message: {:?}", err); + return; + } + }; + + if let Err(err) = crash_server.send_message(0, data) { + log::warn!("Failed to send data to crash server {:?}", err); + } +} + +pub fn set_gpu_info(specs: GpuSpecs) { + send_crash_server_message(CrashServerMessage::GPUInfo(specs)); +} + +pub fn set_user_info(info: UserInfo) { + send_crash_server_message(CrashServerMessage::UserInfo(info)); +} + +#[derive(Serialize, Deserialize, Debug)] +enum CrashServerMessage { + Init(InitCrashHandler), + Panic(CrashPanic), + GPUInfo(GpuSpecs), + UserInfo(UserInfo), +} + impl minidumper::ServerHandler for CrashServer { fn create_minidump_file(&self) -> Result<(File, PathBuf), io::Error> { - let err_message = "Missing initialization data"; let dump_path = paths::logs_dir() .join( &self .initialization_params - .get() - .expect(err_message) + .lock() + .as_ref() + .expect("Missing initialization data") .session_id, ) .with_extension("dmp"); @@ -255,13 +302,14 @@ impl minidumper::ServerHandler for CrashServer { let crash_info = CrashInfo { init: self .initialization_params - .get() - .expect("not initialized") - .clone(), - panic: self.panic_info.get().cloned(), + .lock() + .clone() + .expect("not initialized"), + panic: self.panic_info.lock().clone(), minidump_error, - active_gpu: self.active_gpu.get().cloned(), + active_gpu: self.active_gpu.lock().clone(), gpus, + user_info: self.user_info.lock().clone(), }; let crash_data_path = paths::logs_dir() @@ -273,30 +321,21 @@ impl minidumper::ServerHandler for CrashServer { LoopAction::Exit } - fn on_message(&self, kind: u32, buffer: Vec) { - match kind { - 1 => { - let init_data = - serde_json::from_slice::(&buffer).expect("invalid init data"); - self.initialization_params - .set(init_data) - .expect("already initialized"); + fn on_message(&self, _: u32, buffer: Vec) { + let message: CrashServerMessage = + serde_json::from_slice(&buffer).expect("invalid init data"); + match message { + CrashServerMessage::Init(init_data) => { + self.initialization_params.lock().replace(init_data); } - 2 => { - let panic_data = - serde_json::from_slice::(&buffer).expect("invalid panic data"); - self.panic_info.set(panic_data).expect("already panicked"); + CrashServerMessage::Panic(crash_panic) => { + self.panic_info.lock().replace(crash_panic); } - 3 => { - let gpu_specs: system_specs::GpuSpecs = - bincode::deserialize(&buffer).expect("gpu specs"); - // we ignore the case where it was already set because this message is sent - // on each new window. in theory all zed windows should be using the same - // GPU so this is fine. - self.active_gpu.set(gpu_specs).ok(); + CrashServerMessage::GPUInfo(gpu_specs) => { + self.active_gpu.lock().replace(gpu_specs); } - _ => { - panic!("invalid message kind"); + CrashServerMessage::UserInfo(user_info) => { + self.user_info.lock().replace(user_info); } } } @@ -326,37 +365,33 @@ pub fn panic_hook(info: &PanicHookInfo) { // if it's still not there just write panic info and no minidump let retry_frequency = Duration::from_millis(100); for _ in 0..5 { - if let Some(client) = CRASH_HANDLER.get() { - let location = info - .location() - .map_or_else(|| "".to_owned(), |location| location.to_string()); - log::error!("thread '{thread_name}' panicked at {location}:\n{message}..."); - client - .send_message( - 2, - serde_json::to_vec(&CrashPanic { message, span }).unwrap(), - ) - .ok(); - log::error!("triggering a crash to generate a minidump..."); - - #[cfg(target_os = "macos")] - PANIC_THREAD_ID.store( - unsafe { mach2::mach_init::mach_thread_self() }, - Ordering::SeqCst, - ); - - cfg_if::cfg_if! { - if #[cfg(target_os = "windows")] { - // https://learn.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499- - CrashHandler.simulate_exception(Some(234)); // (MORE_DATA_AVAILABLE) - break; - } else { - std::process::abort(); - } - } + if CRASH_HANDLER.get().is_some() { + break; } thread::sleep(retry_frequency); } + let location = info + .location() + .map_or_else(|| "".to_owned(), |location| location.to_string()); + log::error!("thread '{thread_name}' panicked at {location}:\n{message}..."); + + send_crash_server_message(CrashServerMessage::Panic(CrashPanic { message, span })); + log::error!("triggering a crash to generate a minidump..."); + + #[cfg(target_os = "macos")] + PANIC_THREAD_ID.store( + unsafe { mach2::mach_init::mach_thread_self() }, + Ordering::SeqCst, + ); + + cfg_if::cfg_if! { + if #[cfg(target_os = "windows")] { + // https://learn.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499- + CrashHandler.simulate_exception(Some(234)); // (MORE_DATA_AVAILABLE) + } else { + std::process::abort(); + } + } } #[cfg(target_os = "windows")] @@ -436,10 +471,11 @@ pub fn crash_server(socket: &Path) { server .run( Box::new(CrashServer { - initialization_params: OnceLock::new(), - panic_info: OnceLock::new(), + initialization_params: Mutex::default(), + panic_info: Mutex::default(), + user_info: Mutex::default(), has_connection, - active_gpu: OnceLock::new(), + active_gpu: Mutex::default(), }), &shutdown, Some(CRASH_HANDLER_PING_TIMEOUT), diff --git a/crates/csv_preview/Cargo.toml b/crates/csv_preview/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..7e9ce2c4d515cfce9586a0686475a8dfed0ddc95 --- /dev/null +++ b/crates/csv_preview/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "csv_preview" +version = "0.1.0" +publish.workspace = true +edition.workspace = true + +[lib] +path = "src/csv_preview.rs" + +[dependencies] +anyhow.workspace = true +feature_flags.workspace = true +gpui.workspace = true +editor.workspace = true +ui.workspace = true +workspace.workspace = true +log.workspace = true +text.workspace = true + +[lints] +workspace = true diff --git a/crates/supermaven/LICENSE-GPL b/crates/csv_preview/LICENSE-GPL similarity index 100% rename from crates/supermaven/LICENSE-GPL rename to crates/csv_preview/LICENSE-GPL diff --git a/crates/csv_preview/src/csv_preview.rs b/crates/csv_preview/src/csv_preview.rs new file mode 100644 index 0000000000000000000000000000000000000000..f056f5a12225b000527b9087760e3d683bda1b5b --- /dev/null +++ b/crates/csv_preview/src/csv_preview.rs @@ -0,0 +1,302 @@ +use editor::{Editor, EditorEvent}; +use feature_flags::{FeatureFlag, FeatureFlagAppExt as _}; +use gpui::{ + AppContext, Entity, EventEmitter, FocusHandle, Focusable, ListAlignment, Task, actions, +}; +use std::{ + collections::HashMap, + time::{Duration, Instant}, +}; + +use crate::table_data_engine::TableDataEngine; +use ui::{SharedString, TableColumnWidths, TableInteractionState, prelude::*}; +use workspace::{Item, SplitDirection, Workspace}; + +use crate::{parser::EditorState, settings::CsvPreviewSettings, types::TableLikeContent}; + +mod parser; +mod renderer; +mod settings; +mod table_data_engine; +mod types; + +actions!(csv, [OpenPreview, OpenPreviewToTheSide]); + +pub struct TabularDataPreviewFeatureFlag; + +impl FeatureFlag for TabularDataPreviewFeatureFlag { + const NAME: &'static str = "tabular-data-preview"; +} + +pub struct CsvPreviewView { + pub(crate) engine: TableDataEngine, + + pub(crate) focus_handle: FocusHandle, + active_editor_state: EditorState, + pub(crate) table_interaction_state: Entity, + pub(crate) column_widths: ColumnWidths, + pub(crate) parsing_task: Option>>, + pub(crate) settings: CsvPreviewSettings, + /// Performance metrics for debugging and monitoring CSV operations. + pub(crate) performance_metrics: PerformanceMetrics, + pub(crate) list_state: gpui::ListState, + /// Time when the last parsing operation ended, used for smart debouncing + pub(crate) last_parse_end_time: Option, +} + +pub fn init(cx: &mut App) { + cx.observe_new(|workspace: &mut Workspace, _, _| { + CsvPreviewView::register(workspace); + }) + .detach() +} + +impl CsvPreviewView { + pub fn register(workspace: &mut Workspace) { + workspace.register_action_renderer(|div, _, _, cx| { + div.when(cx.has_flag::(), |div| { + div.on_action(cx.listener(|workspace, _: &OpenPreview, window, cx| { + if let Some(editor) = workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + .filter(|editor| Self::is_csv_file(editor, cx)) + { + let csv_preview = Self::new(&editor, cx); + workspace.active_pane().update(cx, |pane, cx| { + let existing = pane + .items_of_type::() + .find(|view| view.read(cx).active_editor_state.editor == editor); + if let Some(idx) = existing.and_then(|e| pane.index_for_item(&e)) { + pane.activate_item(idx, true, true, window, cx); + } else { + pane.add_item(Box::new(csv_preview), true, true, None, window, cx); + } + }); + cx.notify(); + } + })) + .on_action(cx.listener( + |workspace, _: &OpenPreviewToTheSide, window, cx| { + if let Some(editor) = workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + .filter(|editor| Self::is_csv_file(editor, cx)) + { + let csv_preview = Self::new(&editor, cx); + let pane = workspace + .find_pane_in_direction(SplitDirection::Right, cx) + .unwrap_or_else(|| { + workspace.split_pane( + workspace.active_pane().clone(), + SplitDirection::Right, + window, + cx, + ) + }); + pane.update(cx, |pane, cx| { + let existing = + pane.items_of_type::().find(|view| { + view.read(cx).active_editor_state.editor == editor + }); + if let Some(idx) = existing.and_then(|e| pane.index_for_item(&e)) { + pane.activate_item(idx, true, true, window, cx); + } else { + pane.add_item( + Box::new(csv_preview), + false, + false, + None, + window, + cx, + ); + } + }); + cx.notify(); + } + }, + )) + }) + }); + } + + fn new(editor: &Entity, cx: &mut Context) -> Entity { + let contents = TableLikeContent::default(); + let table_interaction_state = cx.new(|cx| { + TableInteractionState::new(cx) + .with_custom_scrollbar(ui::Scrollbars::for_settings::()) + }); + + cx.new(|cx| { + let subscription = cx.subscribe( + editor, + |this: &mut CsvPreviewView, _editor, event: &EditorEvent, cx| { + match event { + EditorEvent::Edited { .. } + | EditorEvent::DirtyChanged + | EditorEvent::ExcerptsEdited { .. } => { + this.parse_csv_from_active_editor(true, cx); + } + _ => {} + }; + }, + ); + + let mut view = CsvPreviewView { + focus_handle: cx.focus_handle(), + active_editor_state: EditorState { + editor: editor.clone(), + _subscription: subscription, + }, + table_interaction_state, + column_widths: ColumnWidths::new(cx, 1), + parsing_task: None, + performance_metrics: PerformanceMetrics::default(), + list_state: gpui::ListState::new(contents.rows.len(), ListAlignment::Top, px(1.)), + settings: CsvPreviewSettings::default(), + last_parse_end_time: None, + engine: TableDataEngine::default(), + }; + + view.parse_csv_from_active_editor(false, cx); + view + }) + } + + pub(crate) fn editor_state(&self) -> &EditorState { + &self.active_editor_state + } + pub(crate) fn apply_sort(&mut self) { + self.performance_metrics.record("Sort", || { + self.engine.apply_sort(); + }); + } + + /// Update ordered indices when ordering or content changes + pub(crate) fn apply_filter_sort(&mut self) { + self.performance_metrics.record("Filter&sort", || { + self.engine.calculate_d2d_mapping(); + }); + + // Update list state with filtered row count + let visible_rows = self.engine.d2d_mapping().visible_row_count(); + self.list_state = gpui::ListState::new(visible_rows, ListAlignment::Top, px(1.)); + } + + pub fn resolve_active_item_as_csv_editor( + workspace: &Workspace, + cx: &mut Context, + ) -> Option> { + let editor = workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx))?; + Self::is_csv_file(&editor, cx).then_some(editor) + } + + fn is_csv_file(editor: &Entity, cx: &App) -> bool { + editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .and_then(|buffer| { + buffer + .read(cx) + .file() + .and_then(|file| file.path().extension()) + .map(|ext| ext.eq_ignore_ascii_case("csv")) + }) + .unwrap_or(false) + } +} + +impl Focusable for CsvPreviewView { + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl EventEmitter<()> for CsvPreviewView {} + +impl Item for CsvPreviewView { + type Event = (); + + fn tab_icon(&self, _window: &Window, _cx: &App) -> Option { + Some(Icon::new(IconName::FileDoc)) + } + + fn tab_content_text(&self, _detail: usize, cx: &App) -> SharedString { + self.editor_state() + .editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .and_then(|b| { + let file = b.read(cx).file()?; + let local_file = file.as_local()?; + local_file + .abs_path(cx) + .file_name() + .map(|name| format!("Preview {}", name.to_string_lossy()).into()) + }) + .unwrap_or_else(|| SharedString::from("CSV Preview")) + } +} + +#[derive(Debug, Default)] +pub struct PerformanceMetrics { + /// Map of timing metrics with their duration and measurement time. + pub timings: HashMap<&'static str, (Duration, Instant)>, + /// List of display indices that were rendered in the current frame. + pub rendered_indices: Vec, +} +impl PerformanceMetrics { + pub fn record(&mut self, name: &'static str, mut f: F) -> R + where + F: FnMut() -> R, + { + let start_time = Instant::now(); + let ret = f(); + let duration = start_time.elapsed(); + self.timings.insert(name, (duration, Instant::now())); + ret + } + + /// Displays all metrics sorted A-Z in format: `{name}: {took}ms {ago}s ago` + pub fn display(&self) -> String { + let mut metrics = self.timings.iter().collect::>(); + metrics.sort_by_key(|&(name, _)| *name); + metrics + .iter() + .map(|(name, (duration, time))| { + let took = duration.as_secs_f32() * 1000.; + let ago = time.elapsed().as_secs(); + format!("{name}: {took:.2}ms {ago}s ago") + }) + .collect::>() + .join("\n") + } + + /// Get timing for a specific metric + pub fn get_timing(&self, name: &str) -> Option { + self.timings.get(name).map(|(duration, _)| *duration) + } +} + +/// Holds state of column widths for a table component in CSV preview. +pub(crate) struct ColumnWidths { + pub widths: Entity, +} + +impl ColumnWidths { + pub(crate) fn new(cx: &mut Context, cols: usize) -> Self { + Self { + widths: cx.new(|cx| TableColumnWidths::new(cols, cx)), + } + } + /// Replace the current `TableColumnWidths` entity with a new one for the given column count. + pub(crate) fn replace(&self, cx: &mut Context, cols: usize) { + self.widths + .update(cx, |entity, cx| *entity = TableColumnWidths::new(cols, cx)); + } +} diff --git a/crates/csv_preview/src/parser.rs b/crates/csv_preview/src/parser.rs new file mode 100644 index 0000000000000000000000000000000000000000..b087404e0ebbd13cdaf20cab692f5470ea6ce292 --- /dev/null +++ b/crates/csv_preview/src/parser.rs @@ -0,0 +1,513 @@ +use crate::{ + CsvPreviewView, + types::TableLikeContent, + types::{LineNumber, TableCell}, +}; +use editor::Editor; +use gpui::{AppContext, Context, Entity, Subscription, Task}; +use std::time::{Duration, Instant}; +use text::BufferSnapshot; +use ui::{SharedString, table_row::TableRow}; + +pub(crate) const REPARSE_DEBOUNCE: Duration = Duration::from_millis(200); + +pub(crate) struct EditorState { + pub editor: Entity, + pub _subscription: Subscription, +} + +impl CsvPreviewView { + pub(crate) fn parse_csv_from_active_editor( + &mut self, + wait_for_debounce: bool, + cx: &mut Context, + ) { + let editor = self.active_editor_state.editor.clone(); + self.parsing_task = Some(self.parse_csv_in_background(wait_for_debounce, editor, cx)); + } + + fn parse_csv_in_background( + &mut self, + wait_for_debounce: bool, + editor: Entity, + cx: &mut Context, + ) -> Task> { + cx.spawn(async move |view, cx| { + if wait_for_debounce { + // Smart debouncing: check if cooldown period has already passed + let now = Instant::now(); + let should_wait = view.update(cx, |view, _| { + if let Some(last_end) = view.last_parse_end_time { + let cooldown_until = last_end + REPARSE_DEBOUNCE; + if now < cooldown_until { + Some(cooldown_until - now) + } else { + None // Cooldown already passed, parse immediately + } + } else { + None // First parse, no debounce + } + })?; + + if let Some(wait_duration) = should_wait { + cx.background_executor().timer(wait_duration).await; + } + } + + let buffer_snapshot = view.update(cx, |_, cx| { + editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .map(|b| b.read(cx).text_snapshot()) + })?; + + let Some(buffer_snapshot) = buffer_snapshot else { + return Ok(()); + }; + + let instant = Instant::now(); + let parsed_csv = cx + .background_spawn(async move { from_buffer(&buffer_snapshot) }) + .await; + let parse_duration = instant.elapsed(); + let parse_end_time: Instant = Instant::now(); + log::debug!("Parsed CSV in {}ms", parse_duration.as_millis()); + view.update(cx, move |view, cx| { + view.performance_metrics + .timings + .insert("Parsing", (parse_duration, Instant::now())); + + log::debug!("Parsed {} rows", parsed_csv.rows.len()); + // Update table width so it can be rendered properly + let cols = parsed_csv.headers.cols(); + view.column_widths.replace(cx, cols + 1); // Add 1 for the line number column + + view.engine.contents = parsed_csv; + view.last_parse_end_time = Some(parse_end_time); + + view.apply_filter_sort(); + cx.notify(); + }) + }) + } +} + +pub fn from_buffer(buffer_snapshot: &BufferSnapshot) -> TableLikeContent { + let text = buffer_snapshot.text(); + + if text.trim().is_empty() { + return TableLikeContent::default(); + } + + let (parsed_cells_with_positions, line_numbers) = parse_csv_with_positions(&text); + if parsed_cells_with_positions.is_empty() { + return TableLikeContent::default(); + } + let raw_headers = parsed_cells_with_positions[0].clone(); + + // Calculating the longest row, as CSV might have less headers than max row width + let Some(max_number_of_cols) = parsed_cells_with_positions.iter().map(|r| r.len()).max() else { + return TableLikeContent::default(); + }; + + // Convert to TableCell objects with buffer positions + let headers = create_table_row(&buffer_snapshot, max_number_of_cols, raw_headers); + + let rows = parsed_cells_with_positions + .into_iter() + .skip(1) + .map(|row| create_table_row(&buffer_snapshot, max_number_of_cols, row)) + .collect(); + + let row_line_numbers = line_numbers.into_iter().skip(1).collect(); + + TableLikeContent { + headers, + rows, + line_numbers: row_line_numbers, + number_of_cols: max_number_of_cols, + } +} + +/// Parse CSV and track byte positions for each cell +fn parse_csv_with_positions( + text: &str, +) -> ( + Vec)>>, + Vec, +) { + let mut rows = Vec::new(); + let mut line_numbers = Vec::new(); + let mut current_row: Vec<(SharedString, std::ops::Range)> = Vec::new(); + let mut current_field = String::new(); + let mut field_start_offset = 0; + let mut current_offset = 0; + let mut in_quotes = false; + let mut current_line = 1; // 1-based line numbering + let mut row_start_line = 1; + let mut chars = text.chars().peekable(); + + while let Some(ch) = chars.next() { + let char_byte_len = ch.len_utf8(); + + match ch { + '"' => { + if in_quotes { + if chars.peek() == Some(&'"') { + // Escaped quote + chars.next(); + current_field.push('"'); + current_offset += 1; // Skip the second quote + } else { + // End of quoted field + in_quotes = false; + } + } else { + // Start of quoted field + in_quotes = true; + if current_field.is_empty() { + // Include the opening quote in the range + field_start_offset = current_offset; + } + } + } + ',' if !in_quotes => { + // Field separator + let field_end_offset = current_offset; + if current_field.is_empty() && !in_quotes { + field_start_offset = current_offset; + } + current_row.push(( + current_field.clone().into(), + field_start_offset..field_end_offset, + )); + current_field.clear(); + field_start_offset = current_offset + char_byte_len; + } + '\n' => { + current_line += 1; + if !in_quotes { + // Row separator (only when not inside quotes) + let field_end_offset = current_offset; + if current_field.is_empty() && current_row.is_empty() { + field_start_offset = 0; + } + current_row.push(( + current_field.clone().into(), + field_start_offset..field_end_offset, + )); + current_field.clear(); + + // Only add non-empty rows + if !current_row.is_empty() + && !current_row.iter().all(|(field, _)| field.trim().is_empty()) + { + rows.push(current_row); + // Add line number info for this row + let line_info = if row_start_line == current_line - 1 { + LineNumber::Line(row_start_line) + } else { + LineNumber::LineRange(row_start_line, current_line - 1) + }; + line_numbers.push(line_info); + } + current_row = Vec::new(); + row_start_line = current_line; + field_start_offset = current_offset + char_byte_len; + } else { + // Newline inside quotes - preserve it + current_field.push(ch); + } + } + '\r' => { + if chars.peek() == Some(&'\n') { + // Handle Windows line endings (\r\n): account for \r byte, let \n be handled next + current_offset += char_byte_len; + continue; + } else { + // Standalone \r + current_line += 1; + if !in_quotes { + // Row separator (only when not inside quotes) + let field_end_offset = current_offset; + current_row.push(( + current_field.clone().into(), + field_start_offset..field_end_offset, + )); + current_field.clear(); + + // Only add non-empty rows + if !current_row.is_empty() + && !current_row.iter().all(|(field, _)| field.trim().is_empty()) + { + rows.push(current_row); + // Add line number info for this row + let line_info = if row_start_line == current_line - 1 { + LineNumber::Line(row_start_line) + } else { + LineNumber::LineRange(row_start_line, current_line - 1) + }; + line_numbers.push(line_info); + } + current_row = Vec::new(); + row_start_line = current_line; + field_start_offset = current_offset + char_byte_len; + } else { + // \r inside quotes - preserve it + current_field.push(ch); + } + } + } + _ => { + if current_field.is_empty() && !in_quotes { + field_start_offset = current_offset; + } + current_field.push(ch); + } + } + + current_offset += char_byte_len; + } + + // Add the last field and row if not empty + if !current_field.is_empty() || !current_row.is_empty() { + let field_end_offset = current_offset; + current_row.push(( + current_field.clone().into(), + field_start_offset..field_end_offset, + )); + } + if !current_row.is_empty() && !current_row.iter().all(|(field, _)| field.trim().is_empty()) { + rows.push(current_row); + // Add line number info for the last row + let line_info = if row_start_line == current_line { + LineNumber::Line(row_start_line) + } else { + LineNumber::LineRange(row_start_line, current_line) + }; + line_numbers.push(line_info); + } + + (rows, line_numbers) +} + +fn create_table_row( + buffer_snapshot: &BufferSnapshot, + max_number_of_cols: usize, + row: Vec<(SharedString, std::ops::Range)>, +) -> TableRow { + let mut raw_row = row + .into_iter() + .map(|(content, range)| { + TableCell::from_buffer_position(content, range.start, range.end, &buffer_snapshot) + }) + .collect::>(); + + let append_elements = max_number_of_cols - raw_row.len(); + if append_elements > 0 { + for _ in 0..append_elements { + raw_row.push(TableCell::Virtual); + } + } + + TableRow::from_vec(raw_row, max_number_of_cols) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_csv_parsing_basic() { + let csv_data = "Name,Age,City\nJohn,30,New York\nJane,25,Los Angeles"; + let parsed = TableLikeContent::from_str(csv_data.to_string()); + + assert_eq!(parsed.headers.cols(), 3); + assert_eq!(parsed.headers[0].display_value().unwrap().as_ref(), "Name"); + assert_eq!(parsed.headers[1].display_value().unwrap().as_ref(), "Age"); + assert_eq!(parsed.headers[2].display_value().unwrap().as_ref(), "City"); + + assert_eq!(parsed.rows.len(), 2); + assert_eq!(parsed.rows[0][0].display_value().unwrap().as_ref(), "John"); + assert_eq!(parsed.rows[0][1].display_value().unwrap().as_ref(), "30"); + assert_eq!( + parsed.rows[0][2].display_value().unwrap().as_ref(), + "New York" + ); + } + + #[test] + fn test_csv_parsing_with_quotes() { + let csv_data = r#"Name,Description +"John Doe","A person with ""special"" characters" +Jane,"Simple name""#; + let parsed = TableLikeContent::from_str(csv_data.to_string()); + + assert_eq!(parsed.headers.cols(), 2); + assert_eq!(parsed.rows.len(), 2); + assert_eq!( + parsed.rows[0][1].display_value().unwrap().as_ref(), + r#"A person with "special" characters"# + ); + } + + #[test] + fn test_csv_parsing_with_newlines_in_quotes() { + let csv_data = "Name,Description,Status\n\"John\nDoe\",\"A person with\nmultiple lines\",Active\n\"Jane Smith\",\"Simple\",\"Also\nActive\""; + let parsed = TableLikeContent::from_str(csv_data.to_string()); + + assert_eq!(parsed.headers.cols(), 3); + assert_eq!(parsed.headers[0].display_value().unwrap().as_ref(), "Name"); + assert_eq!( + parsed.headers[1].display_value().unwrap().as_ref(), + "Description" + ); + assert_eq!( + parsed.headers[2].display_value().unwrap().as_ref(), + "Status" + ); + + assert_eq!(parsed.rows.len(), 2); + assert_eq!( + parsed.rows[0][0].display_value().unwrap().as_ref(), + "John\nDoe" + ); + assert_eq!( + parsed.rows[0][1].display_value().unwrap().as_ref(), + "A person with\nmultiple lines" + ); + assert_eq!( + parsed.rows[0][2].display_value().unwrap().as_ref(), + "Active" + ); + + assert_eq!( + parsed.rows[1][0].display_value().unwrap().as_ref(), + "Jane Smith" + ); + assert_eq!( + parsed.rows[1][1].display_value().unwrap().as_ref(), + "Simple" + ); + assert_eq!( + parsed.rows[1][2].display_value().unwrap().as_ref(), + "Also\nActive" + ); + + // Check line numbers + assert_eq!(parsed.line_numbers.len(), 2); + match &parsed.line_numbers[0] { + LineNumber::LineRange(start, end) => { + assert_eq!(start, &2); + assert_eq!(end, &4); + } + _ => panic!("Expected LineRange for multiline row"), + } + match &parsed.line_numbers[1] { + LineNumber::LineRange(start, end) => { + assert_eq!(start, &5); + assert_eq!(end, &6); + } + _ => panic!("Expected LineRange for second multiline row"), + } + } + + #[test] + fn test_empty_csv() { + let parsed = TableLikeContent::from_str("".to_string()); + assert_eq!(parsed.headers.cols(), 0); + assert!(parsed.rows.is_empty()); + } + + #[test] + fn test_csv_parsing_quote_offset_handling() { + let csv_data = r#"first,"se,cond",third"#; + let (parsed_cells, _) = parse_csv_with_positions(csv_data); + + assert_eq!(parsed_cells.len(), 1); // One row + assert_eq!(parsed_cells[0].len(), 3); // Three cells + + // first: 0..5 (no quotes) + let (content1, range1) = &parsed_cells[0][0]; + assert_eq!(content1.as_ref(), "first"); + assert_eq!(*range1, 0..5); + + // "se,cond": 6..15 (includes quotes in range, content without quotes) + let (content2, range2) = &parsed_cells[0][1]; + assert_eq!(content2.as_ref(), "se,cond"); + assert_eq!(*range2, 6..15); + + // third: 16..21 (no quotes) + let (content3, range3) = &parsed_cells[0][2]; + assert_eq!(content3.as_ref(), "third"); + assert_eq!(*range3, 16..21); + } + + #[test] + fn test_csv_parsing_complex_quotes() { + let csv_data = r#"id,"name with spaces","description, with commas",status +1,"John Doe","A person with ""quotes"" and, commas",active +2,"Jane Smith","Simple description",inactive"#; + let (parsed_cells, _) = parse_csv_with_positions(csv_data); + + assert_eq!(parsed_cells.len(), 3); // header + 2 rows + + // Check header row + let header_row = &parsed_cells[0]; + assert_eq!(header_row.len(), 4); + + // id: 0..2 + assert_eq!(header_row[0].0.as_ref(), "id"); + assert_eq!(header_row[0].1, 0..2); + + // "name with spaces": 3..21 (includes quotes) + assert_eq!(header_row[1].0.as_ref(), "name with spaces"); + assert_eq!(header_row[1].1, 3..21); + + // "description, with commas": 22..48 (includes quotes) + assert_eq!(header_row[2].0.as_ref(), "description, with commas"); + assert_eq!(header_row[2].1, 22..48); + + // status: 49..55 + assert_eq!(header_row[3].0.as_ref(), "status"); + assert_eq!(header_row[3].1, 49..55); + + // Check first data row + let first_row = &parsed_cells[1]; + assert_eq!(first_row.len(), 4); + + // 1: 56..57 + assert_eq!(first_row[0].0.as_ref(), "1"); + assert_eq!(first_row[0].1, 56..57); + + // "John Doe": 58..68 (includes quotes) + assert_eq!(first_row[1].0.as_ref(), "John Doe"); + assert_eq!(first_row[1].1, 58..68); + + // Content should be stripped of quotes but include escaped quotes + assert_eq!( + first_row[2].0.as_ref(), + r#"A person with "quotes" and, commas"# + ); + // The range should include the outer quotes: 69..107 + assert_eq!(first_row[2].1, 69..107); + + // active: 108..114 + assert_eq!(first_row[3].0.as_ref(), "active"); + assert_eq!(first_row[3].1, 108..114); + } +} + +impl TableLikeContent { + #[cfg(test)] + pub fn from_str(text: String) -> Self { + use text::{Buffer, BufferId, ReplicaId}; + + let buffer_id = BufferId::new(1).unwrap(); + let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, text); + let snapshot = buffer.snapshot(); + from_buffer(snapshot) + } +} diff --git a/crates/csv_preview/src/renderer.rs b/crates/csv_preview/src/renderer.rs new file mode 100644 index 0000000000000000000000000000000000000000..42ae05936c7ebd3fb9c619793376998b6d33e2c1 --- /dev/null +++ b/crates/csv_preview/src/renderer.rs @@ -0,0 +1,5 @@ +mod preview_view; +mod render_table; +mod row_identifiers; +mod table_cell; +mod table_header; diff --git a/crates/csv_preview/src/renderer/preview_view.rs b/crates/csv_preview/src/renderer/preview_view.rs new file mode 100644 index 0000000000000000000000000000000000000000..55e62d03806b578f59c2542cf997f90ec22a1f8f --- /dev/null +++ b/crates/csv_preview/src/renderer/preview_view.rs @@ -0,0 +1,50 @@ +use std::time::Instant; + +use ui::{div, prelude::*}; + +use crate::{CsvPreviewView, settings::FontType}; + +impl Render for CsvPreviewView { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let theme = cx.theme(); + + self.performance_metrics.rendered_indices.clear(); + let render_prep_start = Instant::now(); + let table_with_settings = v_flex() + .size_full() + .p_4() + .bg(theme.colors().editor_background) + .track_focus(&self.focus_handle) + .child({ + if self.engine.contents.number_of_cols == 0 { + div() + .flex() + .items_center() + .justify_center() + .h_32() + .text_ui(cx) + .map(|div| match self.settings.font_type { + FontType::Ui => div.font_ui(cx), + FontType::Monospace => div.font_buffer(cx), + }) + .text_color(cx.theme().colors().text_muted) + .child("No CSV content to display") + .into_any_element() + } else { + self.create_table(&self.column_widths.widths, cx) + } + }); + + let render_prep_duration = render_prep_start.elapsed(); + self.performance_metrics.timings.insert( + "render_prep", + (render_prep_duration, std::time::Instant::now()), + ); + + div() + .relative() + .w_full() + .h_full() + .child(table_with_settings) + } +} diff --git a/crates/csv_preview/src/renderer/render_table.rs b/crates/csv_preview/src/renderer/render_table.rs new file mode 100644 index 0000000000000000000000000000000000000000..0cc3bc3c46fb24570b3c99c9121dff3860c6b820 --- /dev/null +++ b/crates/csv_preview/src/renderer/render_table.rs @@ -0,0 +1,193 @@ +use crate::types::TableCell; +use gpui::{AnyElement, Entity}; +use std::ops::Range; +use ui::Table; +use ui::TableColumnWidths; +use ui::TableResizeBehavior; +use ui::UncheckedTableRow; +use ui::{DefiniteLength, div, prelude::*}; + +use crate::{ + CsvPreviewView, + settings::RowRenderMechanism, + types::{AnyColumn, DisplayCellId, DisplayRow}, +}; + +impl CsvPreviewView { + /// Creates a new table. + /// Column number is derived from the `TableColumnWidths` entity. + pub(crate) fn create_table( + &self, + current_widths: &Entity, + cx: &mut Context, + ) -> AnyElement { + let cols = current_widths.read(cx).cols(); + let remaining_col_number = cols - 1; + let fraction = if remaining_col_number > 0 { + 1. / remaining_col_number as f32 + } else { + 1. // only column with line numbers is present. Put 100%, but it will be overwritten anyways :D + }; + let mut widths = vec![DefiniteLength::Fraction(fraction); cols]; + let line_number_width = self.calculate_row_identifier_column_width(); + widths[0] = DefiniteLength::Absolute(AbsoluteLength::Pixels(line_number_width.into())); + + let mut resize_behaviors = vec![TableResizeBehavior::Resizable; cols]; + resize_behaviors[0] = TableResizeBehavior::None; + + self.create_table_inner( + self.engine.contents.rows.len(), + widths, + resize_behaviors, + current_widths, + cx, + ) + } + + fn create_table_inner( + &self, + row_count: usize, + widths: UncheckedTableRow, + resize_behaviors: UncheckedTableRow, + current_widths: &Entity, + cx: &mut Context, + ) -> AnyElement { + let cols = widths.len(); + // Create headers array with interactive elements + let mut headers = Vec::with_capacity(cols); + + headers.push(self.create_row_identifier_header(cx)); + + // Add the actual CSV headers with sort buttons + for i in 0..(cols - 1) { + let header_text = self + .engine + .contents + .headers + .get(AnyColumn(i)) + .and_then(|h| h.display_value().cloned()) + .unwrap_or_else(|| format!("Col {}", i + 1).into()); + + headers.push(self.create_header_element_with_sort_button( + header_text, + cx, + AnyColumn::from(i), + )); + } + + Table::new(cols) + .interactable(&self.table_interaction_state) + .striped() + .column_widths(widths) + .resizable_columns(resize_behaviors, current_widths, cx) + .header(headers) + .disable_base_style() + .map(|table| { + let row_identifier_text_color = cx.theme().colors().editor_line_number; + match self.settings.rendering_with { + RowRenderMechanism::VariableList => { + table.variable_row_height_list(row_count, self.list_state.clone(), { + cx.processor(move |this, display_row: usize, _window, cx| { + this.performance_metrics.rendered_indices.push(display_row); + + let display_row = DisplayRow(display_row); + Self::render_single_table_row( + this, + cols, + display_row, + row_identifier_text_color, + cx, + ) + .unwrap_or_else(|| panic!("Expected to render a table row")) + }) + }) + } + RowRenderMechanism::UniformList => { + table.uniform_list("csv-table", row_count, { + cx.processor(move |this, range: Range, _window, cx| { + // Record all display indices in the range for performance metrics + this.performance_metrics + .rendered_indices + .extend(range.clone()); + + range + .filter_map(|display_index| { + Self::render_single_table_row( + this, + cols, + DisplayRow(display_index), + row_identifier_text_color, + cx, + ) + }) + .collect() + }) + }) + } + } + }) + .into_any_element() + } + + /// Render a single table row + /// + /// Used both by UniformList and VariableRowHeightList + fn render_single_table_row( + this: &CsvPreviewView, + cols: usize, + display_row: DisplayRow, + row_identifier_text_color: gpui::Hsla, + cx: &Context, + ) -> Option> { + // Get the actual row index from our sorted indices + let data_row = this.engine.d2d_mapping().get_data_row(display_row)?; + let row = this.engine.contents.get_row(data_row)?; + + let mut elements = Vec::with_capacity(cols); + elements.push(this.create_row_identifier_cell(display_row, data_row, cx)?); + + // Remaining columns: actual CSV data + for col in (0..this.engine.contents.number_of_cols).map(AnyColumn) { + let table_cell = row.expect_get(col); + + // TODO: Introduce `` cell type + let cell_content = table_cell.display_value().cloned().unwrap_or_default(); + + let display_cell_id = DisplayCellId::new(display_row, col); + + let cell = div().size_full().whitespace_nowrap().text_ellipsis().child( + CsvPreviewView::create_selectable_cell( + display_cell_id, + cell_content, + this.settings.vertical_alignment, + this.settings.font_type, + cx, + ), + ); + + elements.push( + div() + .size_full() + .when(this.settings.show_debug_info, |parent| { + parent.child(div().text_color(row_identifier_text_color).child( + match table_cell { + TableCell::Real { position: pos, .. } => { + let slv = pos.start.timestamp().value; + let so = pos.start.offset; + let elv = pos.end.timestamp().value; + let eo = pos.end.offset; + format!("Pos {so}(L{slv})-{eo}(L{elv})") + } + TableCell::Virtual => "Virtual cell".into(), + }, + )) + }) + .text_ui(cx) + .child(cell) + .into_any_element(), + ); + } + + Some(elements) + } +} diff --git a/crates/csv_preview/src/renderer/row_identifiers.rs b/crates/csv_preview/src/renderer/row_identifiers.rs new file mode 100644 index 0000000000000000000000000000000000000000..a122aa9bf3d803b9deb9c6211e117ba4aa593d93 --- /dev/null +++ b/crates/csv_preview/src/renderer/row_identifiers.rs @@ -0,0 +1,189 @@ +use ui::{ + ActiveTheme as _, AnyElement, Button, ButtonCommon as _, ButtonSize, ButtonStyle, + Clickable as _, Context, ElementId, FluentBuilder as _, IntoElement as _, ParentElement as _, + SharedString, Styled as _, StyledTypography as _, Tooltip, div, +}; + +use crate::{ + CsvPreviewView, + settings::{FontType, RowIdentifiers}, + types::{DataRow, DisplayRow, LineNumber}, +}; + +pub enum RowIdentDisplayMode { + /// E.g + /// ```text + /// 1 + /// ... + /// 5 + /// ``` + Vertical, + /// E.g. + /// ```text + /// 1-5 + /// ``` + Horizontal, +} + +impl LineNumber { + pub fn display_string(&self, mode: RowIdentDisplayMode) -> String { + match *self { + LineNumber::Line(line) => line.to_string(), + LineNumber::LineRange(start, end) => match mode { + RowIdentDisplayMode::Vertical => { + if start + 1 == end { + format!("{start}\n{end}") + } else { + format!("{start}\n...\n{end}") + } + } + RowIdentDisplayMode::Horizontal => { + format!("{start}-{end}") + } + }, + } + } +} + +impl CsvPreviewView { + /// Calculate the optimal width for the row identifier column (line numbers or row numbers). + /// + /// This ensures the column is wide enough to display the largest identifier comfortably, + /// but not wastefully wide for small files. + pub(crate) fn calculate_row_identifier_column_width(&self) -> f32 { + match self.settings.numbering_type { + RowIdentifiers::SrcLines => self.calculate_line_number_width(), + RowIdentifiers::RowNum => self.calculate_row_number_width(), + } + } + + /// Calculate width needed for line numbers (can be multi-line) + fn calculate_line_number_width(&self) -> f32 { + // Find the maximum line number that could be displayed + let max_line_number = self + .engine + .contents + .line_numbers + .iter() + .map(|ln| match ln { + LineNumber::Line(n) => *n, + LineNumber::LineRange(_, end) => *end, + }) + .max() + .unwrap_or_default(); + + let digit_count = if max_line_number == 0 { + 1 + } else { + (max_line_number as f32).log10().floor() as usize + 1 + }; + + // if !self.settings.multiline_cells_enabled { + // // Uses horizontal line numbers layout like `123-456`. Needs twice the size + // digit_count *= 2; + // } + + let char_width_px = 9.0; // TODO: get real width of the characters + let base_width = (digit_count as f32) * char_width_px; + let padding = 20.0; + let min_width = 60.0; + (base_width + padding).max(min_width) + } + + /// Calculate width needed for sequential row numbers + fn calculate_row_number_width(&self) -> f32 { + let max_row_number = self.engine.contents.rows.len(); + + let digit_count = if max_row_number == 0 { + 1 + } else { + (max_row_number as f32).log10().floor() as usize + 1 + }; + + let char_width_px = 9.0; // TODO: get real width of the characters + let base_width = (digit_count as f32) * char_width_px; + let padding = 20.0; + let min_width = 60.0; + (base_width + padding).max(min_width) + } + + pub(crate) fn create_row_identifier_header( + &self, + cx: &mut Context<'_, CsvPreviewView>, + ) -> AnyElement { + // First column: row identifier (clickable to toggle between Lines and Rows) + let row_identifier_text = match self.settings.numbering_type { + RowIdentifiers::SrcLines => "Lines", + RowIdentifiers::RowNum => "Rows", + }; + + let view = cx.entity(); + let value = div() + .map(|div| match self.settings.font_type { + FontType::Ui => div.font_ui(cx), + FontType::Monospace => div.font_buffer(cx), + }) + .child( + Button::new( + ElementId::Name("row-identifier-toggle".into()), + row_identifier_text, + ) + .style(ButtonStyle::Subtle) + .size(ButtonSize::Compact) + .tooltip(Tooltip::text( + "Toggle between: file line numbers or sequential row numbers", + )) + .on_click(move |_event, _window, cx| { + view.update(cx, |this, cx| { + this.settings.numbering_type = match this.settings.numbering_type { + RowIdentifiers::SrcLines => RowIdentifiers::RowNum, + RowIdentifiers::RowNum => RowIdentifiers::SrcLines, + }; + cx.notify(); + }); + }), + ) + .into_any_element(); + value + } + + pub(crate) fn create_row_identifier_cell( + &self, + display_row: DisplayRow, + data_row: DataRow, + cx: &Context<'_, CsvPreviewView>, + ) -> Option { + let row_identifier: SharedString = match self.settings.numbering_type { + RowIdentifiers::SrcLines => self + .engine + .contents + .line_numbers + .get(*data_row)? + .display_string(if self.settings.multiline_cells_enabled { + RowIdentDisplayMode::Vertical + } else { + RowIdentDisplayMode::Horizontal + }) + .into(), + RowIdentifiers::RowNum => (*display_row + 1).to_string().into(), + }; + + let value = div() + .flex() + .px_1() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .h_full() + .text_ui(cx) + // Row identifiers are always centered + .items_center() + .justify_end() + .map(|div| match self.settings.font_type { + FontType::Ui => div.font_ui(cx), + FontType::Monospace => div.font_buffer(cx), + }) + .child(row_identifier) + .into_any_element(); + Some(value) + } +} diff --git a/crates/csv_preview/src/renderer/table_cell.rs b/crates/csv_preview/src/renderer/table_cell.rs new file mode 100644 index 0000000000000000000000000000000000000000..32900ab77708936e218e9af10a4de5fba796e6a7 --- /dev/null +++ b/crates/csv_preview/src/renderer/table_cell.rs @@ -0,0 +1,72 @@ +//! Table Cell Rendering + +use gpui::{AnyElement, ElementId}; +use ui::{SharedString, Tooltip, div, prelude::*}; + +use crate::{ + CsvPreviewView, + settings::{FontType, VerticalAlignment}, + types::DisplayCellId, +}; + +impl CsvPreviewView { + /// Create selectable table cell with mouse event handlers. + pub fn create_selectable_cell( + display_cell_id: DisplayCellId, + cell_content: SharedString, + vertical_alignment: VerticalAlignment, + font_type: FontType, + cx: &Context, + ) -> AnyElement { + create_table_cell( + display_cell_id, + cell_content, + vertical_alignment, + font_type, + cx, + ) + // Mouse events handlers will be here + .into_any_element() + } +} + +/// Create styled table cell div element. +fn create_table_cell( + display_cell_id: DisplayCellId, + cell_content: SharedString, + vertical_alignment: VerticalAlignment, + font_type: FontType, + cx: &Context<'_, CsvPreviewView>, +) -> gpui::Stateful
{ + div() + .id(ElementId::NamedInteger( + format!( + "csv-display-cell-{}-{}", + *display_cell_id.row, *display_cell_id.col + ) + .into(), + 0, + )) + .cursor_pointer() + .flex() + .h_full() + .px_1() + .bg(cx.theme().colors().editor_background) + .border_b_1() + .border_r_1() + .border_color(cx.theme().colors().border_variant) + .map(|div| match vertical_alignment { + VerticalAlignment::Top => div.items_start(), + VerticalAlignment::Center => div.items_center(), + }) + .map(|div| match vertical_alignment { + VerticalAlignment::Top => div.content_start(), + VerticalAlignment::Center => div.content_center(), + }) + .map(|div| match font_type { + FontType::Ui => div.font_ui(cx), + FontType::Monospace => div.font_buffer(cx), + }) + .tooltip(Tooltip::text(cell_content.clone())) + .child(div().child(cell_content)) +} diff --git a/crates/csv_preview/src/renderer/table_header.rs b/crates/csv_preview/src/renderer/table_header.rs new file mode 100644 index 0000000000000000000000000000000000000000..52a16be9fc81ef1c3f001513b652a33c3b06dc82 --- /dev/null +++ b/crates/csv_preview/src/renderer/table_header.rs @@ -0,0 +1,94 @@ +use gpui::ElementId; +use ui::{Tooltip, prelude::*}; + +use crate::{ + CsvPreviewView, + settings::FontType, + table_data_engine::sorting_by_column::{AppliedSorting, SortDirection}, + types::AnyColumn, +}; + +impl CsvPreviewView { + /// Create header for data, which is orderable with text on the left and sort button on the right + pub(crate) fn create_header_element_with_sort_button( + &self, + header_text: SharedString, + cx: &mut Context<'_, CsvPreviewView>, + col_idx: AnyColumn, + ) -> AnyElement { + // CSV data columns: text + filter/sort buttons + h_flex() + .justify_between() + .items_center() + .w_full() + .map(|div| match self.settings.font_type { + FontType::Ui => div.font_ui(cx), + FontType::Monospace => div.font_buffer(cx), + }) + .child(div().child(header_text)) + .child(h_flex().gap_1().child(self.create_sort_button(cx, col_idx))) + .into_any_element() + } + + fn create_sort_button( + &self, + cx: &mut Context<'_, CsvPreviewView>, + col_idx: AnyColumn, + ) -> Button { + let sort_btn = Button::new( + ElementId::NamedInteger("sort-button".into(), col_idx.get() as u64), + match self.engine.applied_sorting { + Some(ordering) if ordering.col_idx == col_idx => match ordering.direction { + SortDirection::Asc => "↓", + SortDirection::Desc => "↑", + }, + _ => "↕", // Unsorted/available for sorting + }, + ) + .size(ButtonSize::Compact) + .style( + if self + .engine + .applied_sorting + .is_some_and(|o| o.col_idx == col_idx) + { + ButtonStyle::Filled + } else { + ButtonStyle::Subtle + }, + ) + .tooltip(Tooltip::text(match self.engine.applied_sorting { + Some(ordering) if ordering.col_idx == col_idx => match ordering.direction { + SortDirection::Asc => "Sorted A-Z. Click to sort Z-A", + SortDirection::Desc => "Sorted Z-A. Click to disable sorting", + }, + _ => "Not sorted. Click to sort A-Z", + })) + .on_click(cx.listener(move |this, _event, _window, cx| { + let new_sorting = match this.engine.applied_sorting { + Some(ordering) if ordering.col_idx == col_idx => { + // Same column clicked - cycle through states + match ordering.direction { + SortDirection::Asc => Some(AppliedSorting { + col_idx, + direction: SortDirection::Desc, + }), + SortDirection::Desc => None, // Clear sorting + } + } + _ => { + // Different column or no sorting - start with ascending + Some(AppliedSorting { + col_idx, + direction: SortDirection::Asc, + }) + } + }; + + this.engine.applied_sorting = new_sorting; + this.apply_sort(); + cx.notify(); + })); + sort_btn + } +} diff --git a/crates/csv_preview/src/settings.rs b/crates/csv_preview/src/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..e627b3cc994a84f54268a05ba17534789f631fe0 --- /dev/null +++ b/crates/csv_preview/src/settings.rs @@ -0,0 +1,46 @@ +#[derive(Default, Clone, Copy)] +pub enum RowRenderMechanism { + /// Default behaviour + #[default] + VariableList, + /// More performance oriented, but all rows are same height + #[allow(dead_code)] // Will be used when settings ui is added + UniformList, +} + +#[derive(Default, Clone, Copy)] +pub enum VerticalAlignment { + /// Align text to the top of cells + #[default] + Top, + /// Center text vertically in cells + Center, +} + +#[derive(Default, Clone, Copy)] +pub enum FontType { + /// Use the default UI font + #[default] + Ui, + /// Use monospace font (same as buffer/editor font) + Monospace, +} + +#[derive(Default, Clone, Copy)] +pub enum RowIdentifiers { + /// Show original line numbers from CSV file + #[default] + SrcLines, + /// Show sequential row numbers starting from 1 + RowNum, +} + +#[derive(Clone, Default)] +pub(crate) struct CsvPreviewSettings { + pub(crate) rendering_with: RowRenderMechanism, + pub(crate) vertical_alignment: VerticalAlignment, + pub(crate) font_type: FontType, + pub(crate) numbering_type: RowIdentifiers, + pub(crate) show_debug_info: bool, + pub(crate) multiline_cells_enabled: bool, +} diff --git a/crates/csv_preview/src/table_data_engine.rs b/crates/csv_preview/src/table_data_engine.rs new file mode 100644 index 0000000000000000000000000000000000000000..382b41a28507213dcc5993adb49a1fddc5e7b64c --- /dev/null +++ b/crates/csv_preview/src/table_data_engine.rs @@ -0,0 +1,90 @@ +//! This module defines core operations and config of tabular data view (CSV table) +//! It operates in 2 coordinate systems: +//! - `DataCellId` - indices of src data cells +//! - `DisplayCellId` - indices of data after applied transformations like sorting/filtering, which is used to render cell on the screen +//! +//! It's designed to contain core logic of operations without relying on `CsvPreviewView`, context or window handles. + +use std::{collections::HashMap, sync::Arc}; + +use ui::table_row::TableRow; + +use crate::{ + table_data_engine::sorting_by_column::{AppliedSorting, sort_data_rows}, + types::{DataRow, DisplayRow, TableCell, TableLikeContent}, +}; + +pub mod sorting_by_column; + +#[derive(Default)] +pub(crate) struct TableDataEngine { + pub applied_sorting: Option, + d2d_mapping: DisplayToDataMapping, + pub contents: TableLikeContent, +} + +impl TableDataEngine { + pub(crate) fn d2d_mapping(&self) -> &DisplayToDataMapping { + &self.d2d_mapping + } + + pub(crate) fn apply_sort(&mut self) { + self.d2d_mapping + .apply_sorting(self.applied_sorting, &self.contents.rows); + self.d2d_mapping.merge_mappings(); + } + + /// Applies sorting and filtering to the data and produces display to data mapping + pub(crate) fn calculate_d2d_mapping(&mut self) { + self.d2d_mapping + .apply_sorting(self.applied_sorting, &self.contents.rows); + self.d2d_mapping.merge_mappings(); + } +} + +/// Relation of Display (rendered) rows to Data (src) rows with applied transformations +/// Transformations applied: +/// - sorting by column +#[derive(Debug, Default)] +pub struct DisplayToDataMapping { + /// All rows sorted, regardless of applied filtering. Applied every time sorting changes + pub sorted_rows: Vec, + /// Filtered and sorted rows. Computed cheaply from `sorted_mapping` and `filtered_out_rows` + pub mapping: Arc>, +} + +impl DisplayToDataMapping { + /// Get the data row for a given display row + pub fn get_data_row(&self, display_row: DisplayRow) -> Option { + self.mapping.get(&display_row).copied() + } + + /// Get the number of filtered rows + pub fn visible_row_count(&self) -> usize { + self.mapping.len() + } + + /// Computes sorting + fn apply_sorting(&mut self, sorting: Option, rows: &[TableRow]) { + let data_rows: Vec = (0..rows.len()).map(DataRow).collect(); + + let sorted_rows = if let Some(sorting) = sorting { + sort_data_rows(&rows, data_rows, sorting) + } else { + data_rows + }; + + self.sorted_rows = sorted_rows; + } + + /// Take pre-computed sorting and filtering results, and apply them to the mapping + fn merge_mappings(&mut self) { + self.mapping = Arc::new( + self.sorted_rows + .iter() + .enumerate() + .map(|(display, data)| (DisplayRow(display), *data)) + .collect(), + ); + } +} diff --git a/crates/csv_preview/src/table_data_engine/sorting_by_column.rs b/crates/csv_preview/src/table_data_engine/sorting_by_column.rs new file mode 100644 index 0000000000000000000000000000000000000000..52d61351a3d4a8fad0cec60d8c6c594fec05c545 --- /dev/null +++ b/crates/csv_preview/src/table_data_engine/sorting_by_column.rs @@ -0,0 +1,49 @@ +use ui::table_row::TableRow; + +use crate::types::{AnyColumn, DataRow, TableCell}; + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum SortDirection { + Asc, + Desc, +} + +/// Config or currently active sorting +#[derive(Debug, Clone, Copy)] +pub struct AppliedSorting { + /// 0-based column index + pub col_idx: AnyColumn, + /// Direction of sorting (asc/desc) + pub direction: SortDirection, +} + +pub fn sort_data_rows( + content_rows: &[TableRow], + mut data_row_ids: Vec, + sorting: AppliedSorting, +) -> Vec { + data_row_ids.sort_by(|&a, &b| { + let row_a = &content_rows[*a]; + let row_b = &content_rows[*b]; + + // TODO: Decide how to handle nulls (on top or on bottom) + let val_a = row_a + .get(sorting.col_idx) + .and_then(|tc| tc.display_value()) + .map(|tc| tc.as_str()) + .unwrap_or(""); + let val_b = row_b + .get(sorting.col_idx) + .and_then(|tc| tc.display_value()) + .map(|tc| tc.as_str()) + .unwrap_or(""); + + let cmp = val_a.cmp(val_b); + match sorting.direction { + SortDirection::Asc => cmp, + SortDirection::Desc => cmp.reverse(), + } + }); + + data_row_ids +} diff --git a/crates/csv_preview/src/types.rs b/crates/csv_preview/src/types.rs new file mode 100644 index 0000000000000000000000000000000000000000..87fc513f53e61db996d39dcb05409c765fd0c6dc --- /dev/null +++ b/crates/csv_preview/src/types.rs @@ -0,0 +1,17 @@ +use std::fmt::Debug; + +pub use coordinates::*; +mod coordinates; +pub use table_cell::*; +mod table_cell; +pub use table_like_content::*; +mod table_like_content; + +/// Line number information for CSV rows +#[derive(Debug, Clone, Copy)] +pub enum LineNumber { + /// Single line row + Line(usize), + /// Multi-line row spanning from start to end line. Incluisive + LineRange(usize, usize), +} diff --git a/crates/csv_preview/src/types/coordinates.rs b/crates/csv_preview/src/types/coordinates.rs new file mode 100644 index 0000000000000000000000000000000000000000..d800bef6ce0dd54d5ae65301163f79013e447ce3 --- /dev/null +++ b/crates/csv_preview/src/types/coordinates.rs @@ -0,0 +1,127 @@ +//! Type definitions for CSV table coordinates and cell identifiers. +//! +//! Provides newtypes for self-documenting coordinate systems: +//! - Display coordinates: Visual positions in rendered table +//! - Data coordinates: Original CSV data positions + +use std::ops::Deref; + +///// Rows ///// +/// Visual row position in rendered table. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct DisplayRow(pub usize); + +impl DisplayRow { + /// Create a new display row + pub fn new(row: usize) -> Self { + Self(row) + } + + /// Get the inner row value + pub fn get(self) -> usize { + self.0 + } +} + +impl Deref for DisplayRow { + type Target = usize; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +/// Original CSV row position. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct DataRow(pub usize); + +impl DataRow { + /// Create a new data row + pub fn new(row: usize) -> Self { + Self(row) + } +} + +impl Deref for DataRow { + type Target = usize; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From for DisplayRow { + fn from(row: usize) -> Self { + DisplayRow::new(row) + } +} + +impl From for DataRow { + fn from(row: usize) -> Self { + DataRow::new(row) + } +} + +///// Columns ///// +/// Data column position in CSV table. 0-based +/// +/// Currently represents both display and data coordinate systems since +/// column reordering is not yet implemented. When column reordering is added, +/// this will need to be split into `DisplayColumn` and `DataColumn` types. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct AnyColumn(pub usize); + +impl AnyColumn { + /// Create a new column ID + pub fn new(col: usize) -> Self { + Self(col) + } + + /// Get the inner column value + pub fn get(self) -> usize { + self.0 + } +} + +impl Deref for AnyColumn { + type Target = usize; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From for AnyColumn { + fn from(col: usize) -> Self { + AnyColumn::new(col) + } +} + +impl From for usize { + fn from(value: AnyColumn) -> Self { + *value + } +} + +///// Cells ///// +/// Visual cell position in rendered table. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct DisplayCellId { + pub row: DisplayRow, + pub col: AnyColumn, +} + +impl DisplayCellId { + /// Create a new display cell ID + pub fn new(row: impl Into, col: impl Into) -> Self { + Self { + row: row.into(), + col: col.into(), + } + } + + /// Returns (row, column) + pub fn to_raw(&self) -> (usize, usize) { + (self.row.0, self.col.0) + } +} diff --git a/crates/csv_preview/src/types/table_cell.rs b/crates/csv_preview/src/types/table_cell.rs new file mode 100644 index 0000000000000000000000000000000000000000..b6f9adb3fe82b0d468d1ffc8404e707a762e94ea --- /dev/null +++ b/crates/csv_preview/src/types/table_cell.rs @@ -0,0 +1,54 @@ +use text::Anchor; +use ui::SharedString; + +/// Position of a cell within the source CSV buffer +#[derive(Clone, Debug)] +pub struct CellContentSpan { + /// Start anchor of the cell content in the source buffer + pub start: Anchor, + /// End anchor of the cell content in the source buffer + pub end: Anchor, +} + +/// A table cell with its content and position in the source buffer +#[derive(Clone, Debug)] +pub enum TableCell { + /// Cell existing in the CSV + Real { + /// Position of this cell in the source buffer + position: CellContentSpan, + /// Cached display value (for performance) + cached_value: SharedString, + }, + /// Virtual cell, created to pad malformed row + Virtual, +} + +impl TableCell { + /// Create a TableCell with buffer position tracking + pub fn from_buffer_position( + content: SharedString, + start_offset: usize, + end_offset: usize, + buffer_snapshot: &text::BufferSnapshot, + ) -> Self { + let start_anchor = buffer_snapshot.anchor_before(start_offset); + let end_anchor = buffer_snapshot.anchor_after(end_offset); + + Self::Real { + position: CellContentSpan { + start: start_anchor, + end: end_anchor, + }, + cached_value: content, + } + } + + /// Get the display value for this cell + pub fn display_value(&self) -> Option<&SharedString> { + match self { + TableCell::Real { cached_value, .. } => Some(cached_value), + TableCell::Virtual => None, + } + } +} diff --git a/crates/csv_preview/src/types/table_like_content.rs b/crates/csv_preview/src/types/table_like_content.rs new file mode 100644 index 0000000000000000000000000000000000000000..7bf205af812c24d70f33157f8ab7acc454c3b0d5 --- /dev/null +++ b/crates/csv_preview/src/types/table_like_content.rs @@ -0,0 +1,32 @@ +use ui::table_row::TableRow; + +use crate::types::{DataRow, LineNumber, TableCell}; + +/// Generic container struct of table-like data (CSV, TSV, etc) +#[derive(Clone)] +pub struct TableLikeContent { + /// Number of data columns. + /// Defines table width used to validate `TableRow` on creation + pub number_of_cols: usize, + pub headers: TableRow, + pub rows: Vec>, + /// Follows the same indices as `rows` + pub line_numbers: Vec, +} + +impl Default for TableLikeContent { + fn default() -> Self { + Self { + number_of_cols: 0, + headers: TableRow::::from_vec(vec![], 0), + rows: vec![], + line_numbers: vec![], + } + } +} + +impl TableLikeContent { + pub(crate) fn get_row(&self, data_row: DataRow) -> Option<&TableRow> { + self.rows.get(*data_row) + } +} diff --git a/crates/debugger_ui/Cargo.toml b/crates/debugger_ui/Cargo.toml index fb79b1b0790b28d7204774720bf9c413cfed64e6..f95712b05129b7f86699f658c4c2c3effbd7d216 100644 --- a/crates/debugger_ui/Cargo.toml +++ b/crates/debugger_ui/Cargo.toml @@ -64,7 +64,6 @@ settings.workspace = true sysinfo.workspace = true task.workspace = true tasks_ui.workspace = true -telemetry.workspace = true terminal_view.workspace = true text.workspace = true theme.workspace = true diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index 51b11a24923c11205b9845bb98b3f2692ebd0e3d..cac96918e32cde4770bedac69fb92a08825e3b25 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -35,6 +35,7 @@ use tree_sitter::{Query, StreamingIterator as _}; use ui::{ ContextMenu, Divider, PopoverMenu, PopoverMenuHandle, SplitButton, Tab, Tooltip, prelude::*, }; +use util::redact::redact_command; use util::rel_path::RelPath; use util::{ResultExt, debug_panic, maybe}; use workspace::SplitDirection; @@ -43,7 +44,7 @@ use workspace::{ Item, Pane, Workspace, dock::{DockPosition, Panel, PanelEvent}, }; -use zed_actions::ToggleFocus; +use zed_actions::debug_panel::ToggleFocus; pub struct DebuggerHistoryFeatureFlag; @@ -275,12 +276,13 @@ impl DebugPanel { async move |_, cx| { if let Err(error) = task.await { - log::error!("{error:#}"); + let redacted_error = redact_command(&format!("{error:#}")); + log::error!("{redacted_error}"); session .update(cx, |session, cx| { session .console_output(cx) - .unbounded_send(format!("error: {:#}", error)) + .unbounded_send(format!("error: {:#}", redacted_error)) .ok(); session.shutdown(cx) }) diff --git a/crates/debugger_ui/src/debugger_ui.rs b/crates/debugger_ui/src/debugger_ui.rs index 3a70216ec743f78ebeaa98ad86d3c0dddba60efb..124967650b31cd88e72b2867838fb3a4ecbcf920 100644 --- a/crates/debugger_ui/src/debugger_ui.rs +++ b/crates/debugger_ui/src/debugger_ui.rs @@ -8,12 +8,12 @@ use project::debugger::{self, breakpoint_store::SourceBreakpoint, session::Threa use schemars::JsonSchema; use serde::Deserialize; use session::DebugSession; -use stack_trace_view::StackTraceView; + use tasks_ui::{Spawn, TaskOverrides}; use ui::{FluentBuilder, InteractiveElement}; use util::maybe; -use workspace::{ItemHandle, ShutdownDebugAdapters, Workspace}; -use zed_actions::{Toggle, ToggleFocus}; +use workspace::{ShutdownDebugAdapters, Workspace}; +use zed_actions::debug_panel::{Toggle, ToggleFocus}; pub mod attach_modal; pub mod debugger_panel; @@ -21,7 +21,6 @@ mod dropdown_menus; mod new_process_modal; mod persistence; pub(crate) mod session; -mod stack_trace_view; #[cfg(any(test, feature = "test-support"))] pub mod tests; @@ -70,8 +69,6 @@ actions!( FocusLoadedSources, /// Focuses on the terminal panel. FocusTerminal, - /// Shows the stack trace for the current thread. - ShowStackTrace, /// Toggles the thread picker dropdown. ToggleThreadPicker, /// Toggles the session picker dropdown. @@ -207,39 +204,6 @@ pub fn init(cx: &mut App) { .ok(); } }) - .on_action(cx.listener( - |workspace, _: &ShowStackTrace, window, cx| { - let Some(debug_panel) = workspace.panel::(cx) else { - return; - }; - - if let Some(existing) = workspace.item_of_type::(cx) { - let is_active = workspace - .active_item(cx) - .is_some_and(|item| item.item_id() == existing.item_id()); - workspace.activate_item(&existing, true, !is_active, window, cx); - } else { - let Some(active_session) = debug_panel.read(cx).active_session() - else { - return; - }; - - let project = workspace.project(); - - let stack_trace_view = active_session.update(cx, |session, cx| { - session.stack_trace_view(project, window, cx).clone() - }); - - workspace.add_item_to_active_pane( - Box::new(stack_trace_view), - None, - true, - window, - cx, - ); - } - }, - )) }) .when(supports_detach, |div| { let active_item = active_item.clone(); diff --git a/crates/debugger_ui/src/session.rs b/crates/debugger_ui/src/session.rs index fc11f40e851a48ff9d0d4634eb69dd899b48a113..32d3e5992a87b99978b5cd0332576d6f5872aad6 100644 --- a/crates/debugger_ui/src/session.rs +++ b/crates/debugger_ui/src/session.rs @@ -1,14 +1,13 @@ pub mod running; -use crate::{StackTraceView, persistence::SerializedLayout, session::running::DebugTerminal}; +use crate::{persistence::SerializedLayout, session::running::DebugTerminal}; use dap::client::SessionId; use gpui::{App, Axis, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity}; use project::debugger::session::Session; -use project::worktree_store::WorktreeStore; + use project::{Project, debugger::session::SessionQuirks}; use rpc::proto; use running::RunningState; -use std::cell::OnceCell; use ui::prelude::*; use workspace::{ CollaboratorId, FollowableItem, ViewId, Workspace, @@ -19,9 +18,6 @@ pub struct DebugSession { remote_id: Option, pub(crate) running_state: Entity, pub(crate) quirks: SessionQuirks, - stack_trace_view: OnceCell>, - _worktree_store: WeakEntity, - workspace: WeakEntity, } impl DebugSession { @@ -49,13 +45,10 @@ impl DebugSession { }); let quirks = session.read(cx).quirks(); - cx.new(|cx| Self { + cx.new(|_| Self { remote_id: None, running_state, quirks, - stack_trace_view: OnceCell::new(), - _worktree_store: project.read(cx).worktree_store().downgrade(), - workspace, }) } @@ -63,30 +56,6 @@ impl DebugSession { self.running_state.read(cx).session_id() } - pub(crate) fn stack_trace_view( - &mut self, - project: &Entity, - window: &mut Window, - cx: &mut Context, - ) -> &Entity { - let workspace = self.workspace.clone(); - let running_state = self.running_state.clone(); - - self.stack_trace_view.get_or_init(|| { - let stackframe_list = running_state.read(cx).stack_frame_list().clone(); - - cx.new(|cx| { - StackTraceView::new( - workspace.clone(), - project.clone(), - stackframe_list, - window, - cx, - ) - }) - }) - } - pub fn session(&self, cx: &App) -> Entity { self.running_state.read(cx).session().clone() } diff --git a/crates/debugger_ui/src/session/running.rs b/crates/debugger_ui/src/session/running.rs index 626c807ded5e0a01b086d7311cc083bab321c7f6..59e7226f596f1266fdeb3c5f3b60e1f97b81c850 100644 --- a/crates/debugger_ui/src/session/running.rs +++ b/crates/debugger_ui/src/session/running.rs @@ -356,11 +356,11 @@ pub(crate) fn new_debugger_pane( debug_assert!(_previous_subscription.is_none()); running .panes - .split(&this_pane, &new_pane, split_direction, cx)?; - anyhow::Ok(new_pane) + .split(&this_pane, &new_pane, split_direction, cx); + new_pane }); - match new_pane.and_then(|r| r) { + match new_pane { Ok(new_pane) => { move_item( &source, diff --git a/crates/debugger_ui/src/session/running/memory_view.rs b/crates/debugger_ui/src/session/running/memory_view.rs index f10e5179e37f87be0e27985b557fcb63cf089a42..69ea556018fdadeb1e270b1d7c2520d25752e670 100644 --- a/crates/debugger_ui/src/session/running/memory_view.rs +++ b/crates/debugger_ui/src/session/running/memory_view.rs @@ -133,7 +133,7 @@ impl ViewState { fn set_offset(&mut self, point: Point) { if point.y >= -Pixels::ZERO { self.schedule_scroll_up(); - } else if point.y <= -self.scroll_handle.max_offset().height { + } else if point.y <= -self.scroll_handle.max_offset().y { self.schedule_scroll_down(); } self.scroll_handle.set_offset(point); @@ -141,7 +141,7 @@ impl ViewState { } impl ScrollableHandle for ViewStateHandle { - fn max_offset(&self) -> gpui::Size { + fn max_offset(&self) -> gpui::Point { self.0.borrow().scroll_handle.max_offset() } diff --git a/crates/debugger_ui/src/session/running/stack_frame_list.rs b/crates/debugger_ui/src/session/running/stack_frame_list.rs index ccdfa22e89f449d2d40ae72f6b794b27ee6c8934..3e8a28a40bfc194413e0bf19d371a86609ba58c7 100644 --- a/crates/debugger_ui/src/session/running/stack_frame_list.rs +++ b/crates/debugger_ui/src/session/running/stack_frame_list.rs @@ -15,13 +15,13 @@ use util::{ paths::{PathStyle, is_absolute}, }; -use crate::{StackTraceView, ToggleUserFrames}; +use crate::ToggleUserFrames; use language::PointUtf16; use project::debugger::breakpoint_store::ActiveStackFrame; use project::debugger::session::{Session, SessionEvent, StackFrame, ThreadStatus}; use project::{ProjectItem, ProjectPath}; use ui::{Tooltip, WithScrollbar, prelude::*}; -use workspace::{ItemHandle, Workspace, WorkspaceId}; +use workspace::{Workspace, WorkspaceId}; use super::RunningState; @@ -154,6 +154,7 @@ impl StackFrameList { &self.entries } + #[cfg(test)] pub(crate) fn flatten_entries( &self, show_collapsed: bool, @@ -437,14 +438,7 @@ impl StackFrameList { .project_path(cx) .context("Could not select a stack frame for unnamed buffer")?; - let open_preview = !workspace - .item_of_type::(cx) - .map(|viewer| { - workspace - .active_item(cx) - .is_some_and(|item| item.item_id() == viewer.item_id()) - }) - .unwrap_or_default(); + let open_preview = true; let active_debug_line_pane = workspace .project() diff --git a/crates/debugger_ui/src/stack_trace_view.rs b/crates/debugger_ui/src/stack_trace_view.rs deleted file mode 100644 index 02d6e793f290863cc002101bba00ac7a46446bb7..0000000000000000000000000000000000000000 --- a/crates/debugger_ui/src/stack_trace_view.rs +++ /dev/null @@ -1,458 +0,0 @@ -use std::{ - any::{Any, TypeId}, - sync::Arc, -}; - -use collections::HashMap; -use dap::StackFrameId; -use editor::{ - Anchor, Bias, DebugStackFrameLine, Editor, EditorEvent, ExcerptId, ExcerptRange, HighlightKey, - MultiBuffer, RowHighlightOptions, SelectionEffects, ToPoint, scroll::Autoscroll, -}; -use gpui::{ - App, AppContext, Entity, EventEmitter, Focusable, IntoElement, Render, SharedString, - Subscription, Task, WeakEntity, Window, -}; -use language::{BufferSnapshot, Capability, Point, Selection, SelectionGoal, TreeSitterOptions}; -use project::{Project, ProjectPath}; -use ui::{ActiveTheme as _, Context, ParentElement as _, Styled as _, div}; -use util::ResultExt as _; -use workspace::{ - Item, ItemHandle as _, ItemNavHistory, ToolbarItemLocation, Workspace, - item::{BreadcrumbText, ItemEvent, SaveOptions}, - searchable::SearchableItemHandle, -}; - -use crate::session::running::stack_frame_list::{StackFrameList, StackFrameListEvent}; -use anyhow::Result; - -pub(crate) struct StackTraceView { - editor: Entity, - multibuffer: Entity, - workspace: WeakEntity, - project: Entity, - stack_frame_list: Entity, - selected_stack_frame_id: Option, - highlights: Vec<(StackFrameId, Anchor)>, - excerpt_for_frames: collections::HashMap, - refresh_task: Option>>, - _subscription: Option, -} - -impl StackTraceView { - pub(crate) fn new( - workspace: WeakEntity, - project: Entity, - stack_frame_list: Entity, - window: &mut Window, - cx: &mut Context, - ) -> Self { - telemetry::event!("Stack Trace View Deployed"); - - let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let editor = cx.new(|cx| { - let mut editor = - Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx); - editor.set_vertical_scroll_margin(5, cx); - editor - }); - - cx.subscribe_in(&editor, window, |this, editor, event, window, cx| { - if let EditorEvent::SelectionsChanged { local: true } = event { - let excerpt_id = editor.update(cx, |editor, cx| { - let position: Point = editor - .selections - .newest(&editor.display_snapshot(cx)) - .head(); - - editor - .snapshot(window, cx) - .buffer_snapshot() - .excerpt_containing(position..position) - .map(|excerpt| excerpt.id()) - }); - - if let Some(stack_frame_id) = excerpt_id - .and_then(|id| this.excerpt_for_frames.get(&id)) - .filter(|id| Some(**id) != this.selected_stack_frame_id) - { - this.stack_frame_list.update(cx, |list, cx| { - list.go_to_stack_frame(*stack_frame_id, window, cx).detach(); - }); - } - } - }) - .detach(); - - cx.subscribe_in( - &stack_frame_list, - window, - |this, stack_frame_list, event, window, cx| match event { - StackFrameListEvent::BuiltEntries => { - this.selected_stack_frame_id = - stack_frame_list.read(cx).opened_stack_frame_id(); - this.update_excerpts(window, cx); - } - StackFrameListEvent::SelectedStackFrameChanged(selected_frame_id) => { - this.selected_stack_frame_id = Some(*selected_frame_id); - this.update_highlights(window, cx); - - if let Some(frame_anchor) = this - .highlights - .iter() - .find(|(frame_id, _)| frame_id == selected_frame_id) - .map(|highlight| highlight.1) - { - this.editor.update(cx, |editor, cx| { - if frame_anchor.excerpt_id - != editor.selections.newest_anchor().head().excerpt_id - { - let effects = SelectionEffects::scroll( - Autoscroll::center().for_anchor(frame_anchor), - ); - - editor.change_selections(effects, window, cx, |selections| { - let selection_id = selections.new_selection_id(); - - let selection = Selection { - id: selection_id, - start: frame_anchor, - end: frame_anchor, - goal: SelectionGoal::None, - reversed: false, - }; - - selections.select_anchors(vec![selection]); - }) - } - }); - } - } - }, - ) - .detach(); - - let mut this = Self { - editor, - multibuffer, - workspace, - project, - excerpt_for_frames: HashMap::default(), - highlights: Vec::default(), - stack_frame_list, - selected_stack_frame_id: None, - refresh_task: None, - _subscription: None, - }; - - this.update_excerpts(window, cx); - this - } - - fn update_excerpts(&mut self, window: &mut Window, cx: &mut Context) { - self.refresh_task.take(); - self.editor.update(cx, |editor, cx| { - editor.clear_highlights(HighlightKey::DebugStackFrameLine, cx) - }); - - let stack_frames = self - .stack_frame_list - .read_with(cx, |list, _| list.flatten_entries(false, false)); - - let frames_to_open: Vec<_> = stack_frames - .into_iter() - .filter_map(|frame| { - Some(( - frame.id, - frame.line as u32 - 1, - StackFrameList::abs_path_from_stack_frame(&frame)?, - )) - }) - .collect(); - - self.multibuffer - .update(cx, |multi_buffer, cx| multi_buffer.clear(cx)); - - let task = cx.spawn_in(window, async move |this, cx| { - let mut to_highlights = Vec::default(); - - for (stack_frame_id, line, abs_path) in frames_to_open { - let (worktree, relative_path) = this - .update(cx, |this, cx| { - this.workspace.update(cx, |workspace, cx| { - workspace.project().update(cx, |this, cx| { - this.find_or_create_worktree(&abs_path, false, cx) - }) - }) - })?? - .await?; - - let project_path = ProjectPath { - worktree_id: worktree.read_with(cx, |tree, _| tree.id()), - path: relative_path, - }; - - if let Some(buffer) = this - .read_with(cx, |this, _| this.project.clone())? - .update(cx, |project, cx| project.open_buffer(project_path, cx)) - .await - .log_err() - { - this.update(cx, |this, cx| { - this.multibuffer.update(cx, |multi_buffer, cx| { - let line_point = Point::new(line, 0); - let start_context = Self::heuristic_syntactic_expand( - &buffer.read(cx).snapshot(), - line_point, - ); - - // Users will want to see what happened before an active debug line in most cases - let range = ExcerptRange { - context: start_context..Point::new(line.saturating_add(1), 0), - primary: line_point..line_point, - }; - multi_buffer.push_excerpts(buffer.clone(), vec![range], cx); - - let line_anchor = - multi_buffer.buffer_point_to_anchor(&buffer, line_point, cx); - - if let Some(line_anchor) = line_anchor { - this.excerpt_for_frames - .insert(line_anchor.excerpt_id, stack_frame_id); - to_highlights.push((stack_frame_id, line_anchor)); - } - }); - }) - .ok(); - } - } - - this.update_in(cx, |this, window, cx| { - this.highlights = to_highlights; - this.update_highlights(window, cx); - }) - .ok(); - - anyhow::Ok(()) - }); - - self.refresh_task = Some(task); - } - - fn update_highlights(&mut self, window: &mut Window, cx: &mut Context) { - self.editor.update(cx, |editor, _| { - editor.clear_row_highlights::() - }); - - let stack_frames = self - .stack_frame_list - .read_with(cx, |session, _| session.flatten_entries(false, false)); - - let active_idx = self - .selected_stack_frame_id - .and_then(|id| { - stack_frames - .iter() - .enumerate() - .find_map(|(idx, frame)| if frame.id == id { Some(idx) } else { None }) - }) - .unwrap_or(0); - - self.editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(window, cx).display_snapshot; - let first_color = cx.theme().colors().editor_debugger_active_line_background; - - let color = first_color.opacity(0.5); - - let mut is_first = true; - - for (_, highlight) in self.highlights.iter().skip(active_idx) { - let position = highlight.to_point(&snapshot.buffer_snapshot()); - let color = if is_first { - is_first = false; - first_color - } else { - color - }; - - let start = snapshot - .buffer_snapshot() - .clip_point(Point::new(position.row, 0), Bias::Left); - let end = start + Point::new(1, 0); - let start = snapshot.buffer_snapshot().anchor_before(start); - let end = snapshot.buffer_snapshot().anchor_before(end); - editor.highlight_rows::( - start..end, - color, - RowHighlightOptions::default(), - cx, - ); - } - }) - } - - fn heuristic_syntactic_expand(snapshot: &BufferSnapshot, selected_point: Point) -> Point { - let mut text_objects = snapshot.text_object_ranges( - selected_point..selected_point, - TreeSitterOptions::max_start_depth(4), - ); - - let mut start_position = text_objects - .find(|(_, obj)| matches!(obj, language::TextObject::AroundFunction)) - .map(|(range, _)| snapshot.offset_to_point(range.start)) - .map(|point| Point::new(point.row.max(selected_point.row.saturating_sub(8)), 0)) - .unwrap_or(selected_point); - - if start_position.row == selected_point.row { - start_position.row = start_position.row.saturating_sub(1); - } - - start_position - } -} - -impl Render for StackTraceView { - fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { - div().size_full().child(self.editor.clone()) - } -} - -impl EventEmitter for StackTraceView {} -impl Focusable for StackTraceView { - fn focus_handle(&self, cx: &App) -> gpui::FocusHandle { - self.editor.focus_handle(cx) - } -} - -impl Item for StackTraceView { - type Event = EditorEvent; - - fn to_item_events(event: &EditorEvent, f: &mut dyn FnMut(ItemEvent)) { - Editor::to_item_events(event, f) - } - - fn deactivated(&mut self, window: &mut Window, cx: &mut Context) { - self.editor - .update(cx, |editor, cx| editor.deactivated(window, cx)); - } - - fn navigate( - &mut self, - data: Arc, - window: &mut Window, - cx: &mut Context, - ) -> bool { - self.editor - .update(cx, |editor, cx| editor.navigate(data, window, cx)) - } - - fn tab_tooltip_text(&self, _: &App) -> Option { - Some("Stack Frame Viewer".into()) - } - - fn tab_content_text(&self, _detail: usize, _: &App) -> SharedString { - "Stack Frames".into() - } - - fn for_each_project_item( - &self, - cx: &App, - f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem), - ) { - self.editor.for_each_project_item(cx, f) - } - - fn set_nav_history( - &mut self, - nav_history: ItemNavHistory, - _: &mut Window, - cx: &mut Context, - ) { - self.editor.update(cx, |editor, _| { - editor.set_nav_history(Some(nav_history)); - }); - } - - fn is_dirty(&self, cx: &App) -> bool { - self.multibuffer.read(cx).is_dirty(cx) - } - - fn has_deleted_file(&self, cx: &App) -> bool { - self.multibuffer.read(cx).has_deleted_file(cx) - } - - fn has_conflict(&self, cx: &App) -> bool { - self.multibuffer.read(cx).has_conflict(cx) - } - - fn can_save(&self, _: &App) -> bool { - true - } - - fn save( - &mut self, - options: SaveOptions, - project: Entity, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - self.editor.save(options, project, window, cx) - } - - fn save_as( - &mut self, - _: Entity, - _: ProjectPath, - _window: &mut Window, - _: &mut Context, - ) -> Task> { - unreachable!() - } - - fn reload( - &mut self, - project: Entity, - window: &mut Window, - cx: &mut Context, - ) -> Task> { - self.editor.reload(project, window, cx) - } - - fn act_as_type<'a>( - &'a self, - type_id: TypeId, - self_handle: &'a Entity, - _: &'a App, - ) -> Option { - if type_id == TypeId::of::() { - Some(self_handle.clone().into()) - } else if type_id == TypeId::of::() { - Some(self.editor.clone().into()) - } else { - None - } - } - - fn as_searchable(&self, _: &Entity, _: &App) -> Option> { - Some(Box::new(self.editor.clone())) - } - - fn breadcrumb_location(&self, _: &App) -> ToolbarItemLocation { - ToolbarItemLocation::PrimaryLeft - } - - fn breadcrumbs(&self, cx: &App) -> Option> { - self.editor.breadcrumbs(cx) - } - - fn added_to_workspace( - &mut self, - workspace: &mut Workspace, - window: &mut Window, - cx: &mut Context, - ) { - self.editor.update(cx, |editor, cx| { - editor.added_to_workspace(workspace, window, cx) - }); - } -} diff --git a/crates/dev_container/src/lib.rs b/crates/dev_container/src/lib.rs index 908be691a7ace8d5a7b64e73233f252e2f964a2b..7fcacf8004bef6c4c26e2751df6f26c02b4629ce 100644 --- a/crates/dev_container/src/lib.rs +++ b/crates/dev_container/src/lib.rs @@ -300,14 +300,20 @@ impl PickerDelegate for TemplatePickerDelegate { ) { let fun = &mut self.on_confirm; + if self.matching_indices.is_empty() { + return; + } self.stateful_modal .update(cx, |modal, cx| { - fun( - self.candidate_templates[self.matching_indices[self.selected_index]].clone(), - modal, - window, - cx, - ); + let Some(confirmed_entry) = self + .matching_indices + .get(self.selected_index) + .and_then(|ix| self.candidate_templates.get(*ix)) + else { + log::error!("Selected index not in range of known matches"); + return; + }; + fun(confirmed_entry.clone(), modal, window, cx); }) .ok(); } @@ -476,7 +482,17 @@ impl PickerDelegate for FeaturePickerDelegate { }) .ok(); } else { - let current = &mut self.candidate_features[self.matching_indices[self.selected_index]]; + if self.matching_indices.is_empty() { + return; + } + let Some(current) = self + .matching_indices + .get(self.selected_index) + .and_then(|ix| self.candidate_features.get_mut(*ix)) + else { + log::error!("Selected index not in range of matches"); + return; + }; current.toggle_state = match current.toggle_state { ToggleState::Selected => { self.template_entry diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 6df124c48472134b50b85241511f16f93cc4ad08..57ce6f03d2b56c9441bee763a28dcc7010f8311e 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -322,16 +322,14 @@ impl ProjectDiagnosticsEditor { if !has_no_blocks { continue; } - let is_dirty = self - .multibuffer - .read(cx) - .buffer(buffer_id) - .is_none_or(|buffer| buffer.read(cx).is_dirty()); - if is_dirty { + let Some(buffer) = self.multibuffer.read(cx).buffer(buffer_id) else { + continue; + }; + if buffer.read(cx).is_dirty() { continue; } self.multibuffer.update(cx, |b, cx| { - b.remove_excerpts_for_buffer(buffer_id, cx); + b.remove_excerpts_for_path(PathKey::for_buffer(&buffer, cx), cx); }); } } diff --git a/crates/docs_preprocessor/src/main.rs b/crates/docs_preprocessor/src/main.rs index 6ef599542a5b2f511915d7435af192162a5dbd3b..43efbeea0b0310cf70cd9bdb560b1b0d2b0c14ef 100644 --- a/crates/docs_preprocessor/src/main.rs +++ b/crates/docs_preprocessor/src/main.rs @@ -578,6 +578,7 @@ fn handle_postprocessing() -> Result<()> { .expect("Default title not a string") .to_string(); let amplitude_key = std::env::var("DOCS_AMPLITUDE_API_KEY").unwrap_or_default(); + let consent_io_instance = std::env::var("DOCS_CONSENT_IO_INSTANCE").unwrap_or_default(); output.insert("html".to_string(), zed_html); mdbook::Renderer::render(&mdbook::renderer::HtmlHandlebars::new(), &ctx)?; @@ -647,6 +648,7 @@ fn handle_postprocessing() -> Result<()> { zlog::trace!(logger => "Updating {:?}", pretty_path(&file, &root_dir)); let contents = contents.replace("#description#", meta_description); let contents = contents.replace("#amplitude_key#", &litude_key); + let contents = contents.replace("#consent_io_instance#", &consent_io_instance); let contents = title_regex() .replace(&contents, |_: ®ex::Captures| { format!("{}", meta_title) diff --git a/crates/edit_prediction/Cargo.toml b/crates/edit_prediction/Cargo.toml index ace898fb6004668fbde916ab4b0447d8e5b8a553..9f867584b57c8aed86f7003cca3a2b034c184476 100644 --- a/crates/edit_prediction/Cargo.toml +++ b/crates/edit_prediction/Cargo.toml @@ -21,6 +21,7 @@ arrayvec.workspace = true brotli.workspace = true buffer_diff.workspace = true client.workspace = true +clock.workspace = true cloud_api_types.workspace = true cloud_llm_client.workspace = true collections.workspace = true diff --git a/crates/edit_prediction/src/cursor_excerpt.rs b/crates/edit_prediction/src/cursor_excerpt.rs index 18a13130720ca2a5bd313f94437a8c3cfab3c691..690e7001bd45ab3d9a995b4dfd43c2e8e297dbe9 100644 --- a/crates/edit_prediction/src/cursor_excerpt.rs +++ b/crates/edit_prediction/src/cursor_excerpt.rs @@ -13,6 +13,7 @@ pub fn compute_excerpt_ranges( let editable_150 = compute_editable_range(snapshot, position, 150); let editable_180 = compute_editable_range(snapshot, position, 180); let editable_350 = compute_editable_range(snapshot, position, 350); + let editable_512 = compute_editable_range(snapshot, position, 512); let editable_150_context_350 = expand_context_syntactically_then_linewise(snapshot, editable_150.clone(), 350); @@ -20,17 +21,20 @@ pub fn compute_excerpt_ranges( expand_context_syntactically_then_linewise(snapshot, editable_180.clone(), 350); let editable_350_context_150 = expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 150); + let editable_350_context_512 = + expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 512); + let editable_350_context_1024 = + expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 1024); + let context_4096 = expand_context_syntactically_then_linewise( + snapshot, + editable_350_context_1024.clone(), + 4096 - 1024, + ); + let context_8192 = + expand_context_syntactically_then_linewise(snapshot, context_4096.clone(), 8192 - 4096); - let full_start_row = editable_150_context_350 - .start - .row - .min(editable_180_context_350.start.row) - .min(editable_350_context_150.start.row); - let full_end_row = editable_150_context_350 - .end - .row - .max(editable_180_context_350.end.row) - .max(editable_350_context_150.end.row); + let full_start_row = context_8192.start.row; + let full_end_row = context_8192.end.row; let full_context = Point::new(full_start_row, 0)..Point::new(full_end_row, snapshot.line_len(full_end_row)); @@ -47,9 +51,14 @@ pub fn compute_excerpt_ranges( editable_150: to_offset(&editable_150), editable_180: to_offset(&editable_180), editable_350: to_offset(&editable_350), + editable_512: Some(to_offset(&editable_512)), editable_150_context_350: to_offset(&editable_150_context_350), editable_180_context_350: to_offset(&editable_180_context_350), editable_350_context_150: to_offset(&editable_350_context_150), + editable_350_context_512: Some(to_offset(&editable_350_context_512)), + editable_350_context_1024: Some(to_offset(&editable_350_context_1024)), + context_4096: Some(to_offset(&context_4096)), + context_8192: Some(to_offset(&context_8192)), }; (full_context, full_context_offset_range, ranges) diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index fb6af292fca3b610b5344da146fba558380ad22f..d73fdc9b39e350ff0697cdb5cdf1ec7d0c866a72 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -1,7 +1,7 @@ use anyhow::Result; use arrayvec::ArrayVec; use client::{Client, EditPredictionUsage, UserStore}; -use cloud_api_types::SubmitEditPredictionFeedbackBody; +use cloud_api_types::{OrganizationId, SubmitEditPredictionFeedbackBody}; use cloud_llm_client::predict_edits_v3::{ PredictEditsV3Request, PredictEditsV3Response, RawCompletionRequest, RawCompletionResponse, }; @@ -40,7 +40,7 @@ use settings::{ }; use std::collections::{VecDeque, hash_map}; use std::env; -use text::Edit; +use text::{AnchorRangeExt, Edit}; use workspace::Workspace; use zeta_prompt::{ZetaFormat, ZetaPromptInput}; @@ -69,12 +69,14 @@ pub mod sweep_ai; pub mod udiff; mod capture_example; +pub mod open_ai_compatible; mod zed_edit_prediction_delegate; pub mod zeta; #[cfg(test)] mod edit_prediction_tests; +use crate::example_spec::ExampleSpec; use crate::license_detection::LicenseDetectionWatcher; use crate::mercury::Mercury; use crate::onboarding_modal::ZedPredictModal; @@ -103,15 +105,14 @@ const CHANGE_GROUPING_LINE_SPAN: u32 = 8; const LAST_CHANGE_GROUPING_TIME: Duration = Duration::from_secs(1); const ZED_PREDICT_DATA_COLLECTION_CHOICE: &str = "zed_predict_data_collection_choice"; const REJECT_REQUEST_DEBOUNCE: Duration = Duration::from_secs(15); +const EDIT_PREDICTION_SETTLED_EVENT: &str = "Edit Prediction Settled"; +const EDIT_PREDICTION_SETTLED_TTL: Duration = Duration::from_secs(60 * 5); +const EDIT_PREDICTION_SETTLED_QUIESCENCE: Duration = Duration::from_secs(10); -pub struct Zeta2FeatureFlag; +pub struct EditPredictionJumpsFeatureFlag; -impl FeatureFlag for Zeta2FeatureFlag { - const NAME: &'static str = "zeta2"; - - fn enabled_for_staff() -> bool { - true - } +impl FeatureFlag for EditPredictionJumpsFeatureFlag { + const NAME: &'static str = "edit_prediction_jumps"; } #[derive(Clone)] @@ -125,6 +126,7 @@ impl Global for EditPredictionStoreGlobal {} #[derive(Clone)] pub struct Zeta2RawConfig { pub model_id: Option, + pub environment: Option, pub format: ZetaFormat, } @@ -133,22 +135,32 @@ pub struct EditPredictionStore { user_store: Entity, llm_token: LlmApiToken, _llm_token_subscription: Subscription, + _fetch_experiments_task: Task<()>, projects: HashMap, update_required: bool, edit_prediction_model: EditPredictionModel, zeta2_raw_config: Option, + preferred_experiment: Option, + available_experiments: Vec, pub sweep_ai: SweepAi, pub mercury: Mercury, data_collection_choice: DataCollectionChoice, - reject_predictions_tx: mpsc::UnboundedSender, + reject_predictions_tx: mpsc::UnboundedSender, + settled_predictions_tx: mpsc::UnboundedSender, shown_predictions: VecDeque, rated_predictions: HashSet, + #[cfg(test)] + settled_event_callback: Option>, +} + +pub(crate) struct EditPredictionRejectionPayload { + rejection: EditPredictionRejection, + organization_id: Option, } #[derive(Copy, Clone, PartialEq, Eq)] pub enum EditPredictionModel { - Zeta1, - Zeta2, + Zeta, Fim { format: EditPredictionPromptFormat }, Sweep, Mercury, @@ -166,6 +178,8 @@ pub struct EditPredictionModelInput { trigger: PredictEditsRequestTrigger, diagnostic_search_range: Range, debug_tx: Option>, + can_collect_data: bool, + is_open_source: bool, pub user_actions: Vec, } @@ -360,6 +374,7 @@ impl ProjectState { prediction_id, EditPredictionRejectReason::Canceled, false, + None, cx, ); }) @@ -480,9 +495,19 @@ impl std::ops::Deref for BufferEditPrediction<'_> { } } +#[derive(Clone)] +struct PendingSettledPrediction { + request_id: EditPredictionId, + editable_anchor_range: Range, + example: Option, + enqueued_at: Instant, + last_edit_at: Instant, +} + struct RegisteredBuffer { file: Option>, snapshot: TextBufferSnapshot, + pending_predictions: Vec, last_position: Option, _subscriptions: [gpui::Subscription; 2], } @@ -674,33 +699,61 @@ impl EditPredictionStore { }) .detach(); + let (settled_predictions_tx, settled_predictions_rx) = mpsc::unbounded(); + cx.spawn(async move |this, cx| { + Self::run_settled_predictions_worker(this, settled_predictions_rx, cx).await; + }) + .detach(); + + let mut current_user = user_store.read(cx).watch_current_user(); + let fetch_experiments_task = cx.spawn(async move |this, cx| { + while current_user.borrow().is_none() { + current_user.next().await; + } + this.update(cx, |this, cx| { + this.refresh_available_experiments(cx); + }) + .log_err(); + }); + let this = Self { projects: HashMap::default(), client, user_store, llm_token, + _fetch_experiments_task: fetch_experiments_task, _llm_token_subscription: cx.subscribe( &refresh_llm_token_listener, |this, _listener, _event, cx| { let client = this.client.clone(); let llm_token = this.llm_token.clone(); + let organization_id = this + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); cx.spawn(async move |_this, _cx| { - llm_token.refresh(&client).await?; + llm_token.refresh(&client, organization_id).await?; anyhow::Ok(()) }) .detach_and_log_err(cx); }, ), update_required: false, - edit_prediction_model: EditPredictionModel::Zeta2, + edit_prediction_model: EditPredictionModel::Zeta, zeta2_raw_config: Self::zeta2_raw_config_from_env(), + preferred_experiment: None, + available_experiments: Vec::new(), sweep_ai: SweepAi::new(cx), mercury: Mercury::new(cx), data_collection_choice, reject_predictions_tx: reject_tx, + settled_predictions_tx, rated_predictions: Default::default(), shown_predictions: Default::default(), + #[cfg(test)] + settled_event_callback: None, }; this @@ -710,7 +763,12 @@ impl EditPredictionStore { let version_str = env::var("ZED_ZETA_FORMAT").ok()?; let format = ZetaFormat::parse(&version_str).ok()?; let model_id = env::var("ZED_ZETA_MODEL").ok(); - Some(Zeta2RawConfig { model_id, format }) + let environment = env::var("ZED_ZETA_ENVIRONMENT").ok(); + Some(Zeta2RawConfig { + model_id, + environment, + format, + }) } pub fn set_edit_prediction_model(&mut self, model: EditPredictionModel) { @@ -725,6 +783,75 @@ impl EditPredictionStore { self.zeta2_raw_config.as_ref() } + pub fn preferred_experiment(&self) -> Option<&str> { + self.preferred_experiment.as_deref() + } + + pub fn set_preferred_experiment(&mut self, experiment: Option) { + self.preferred_experiment = experiment; + } + + pub fn available_experiments(&self) -> &[String] { + &self.available_experiments + } + + pub fn active_experiment(&self) -> Option<&str> { + self.preferred_experiment.as_deref().or_else(|| { + self.shown_predictions + .iter() + .find_map(|p| p.model_version.as_ref()) + .and_then(|model_version| model_version.strip_prefix("zeta2:")) + }) + } + + pub fn refresh_available_experiments(&mut self, cx: &mut Context) { + let client = self.client.clone(); + let llm_token = self.llm_token.clone(); + let app_version = AppVersion::global(cx); + let organization_id = self + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); + + cx.spawn(async move |this, cx| { + let experiments = cx + .background_spawn(async move { + let http_client = client.http_client(); + let token = llm_token.acquire(&client, organization_id).await?; + let url = http_client.build_zed_llm_url("/edit_prediction_experiments", &[])?; + let request = http_client::Request::builder() + .method(Method::GET) + .uri(url.as_ref()) + .header("Authorization", format!("Bearer {}", token)) + .header(ZED_VERSION_HEADER_NAME, app_version.to_string()) + .body(Default::default())?; + let mut response = http_client.send(request).await?; + if response.status().is_success() { + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; + let experiments: Vec = serde_json::from_slice(&body)?; + Ok(experiments) + } else { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + anyhow::bail!( + "Failed to fetch experiments: {:?}\nBody: {}", + response.status(), + body + ); + } + }) + .await?; + this.update(cx, |this, cx| { + this.available_experiments = experiments; + cx.notify(); + })?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + pub fn icons(&self, cx: &App) -> edit_prediction_types::EditPredictionIconSet { use ui::IconName; match self.edit_prediction_model { @@ -738,7 +865,7 @@ impl EditPredictionStore { EditPredictionModel::Mercury => { edit_prediction_types::EditPredictionIconSet::new(IconName::Inception) } - EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 => { + EditPredictionModel::Zeta => { edit_prediction_types::EditPredictionIconSet::new(IconName::ZedPredict) .with_disabled(IconName::ZedPredictDisabled) .with_up(IconName::ZedPredictUp) @@ -867,10 +994,7 @@ impl EditPredictionStore { } pub fn usage(&self, cx: &App) -> Option { - if matches!( - self.edit_prediction_model, - EditPredictionModel::Zeta2 | EditPredictionModel::Zeta1 - ) { + if matches!(self.edit_prediction_model, EditPredictionModel::Zeta) { self.user_store.read(cx).edit_prediction_usage() } else { None @@ -1035,7 +1159,7 @@ impl EditPredictionStore { } } project::Event::DiagnosticsUpdated { .. } => { - if cx.has_flag::() { + if cx.has_flag::() { self.refresh_prediction_from_diagnostics( project, DiagnosticSearchScope::Global, @@ -1089,6 +1213,7 @@ impl EditPredictionStore { snapshot, file, last_position: None, + pending_predictions: Vec::new(), _subscriptions: [ cx.subscribe(buffer, { let project = project.downgrade(); @@ -1137,6 +1262,7 @@ impl EditPredictionStore { let mut total_inserted = 0usize; let mut edit_range: Option> = None; let mut last_offset: Option = None; + let now = cx.background_executor().now(); for (edit, anchor_range) in new_snapshot.anchored_edits_since::(&old_snapshot.version) @@ -1155,6 +1281,12 @@ impl EditPredictionStore { return; }; + for pending_prediction in &mut registered_buffer.pending_predictions { + if edit_range.overlaps(&pending_prediction.editable_anchor_range, &new_snapshot) { + pending_prediction.last_edit_at = now; + } + } + let action_type = match (total_deleted, total_inserted, num_edits) { (0, ins, n) if ins == n => UserActionType::InsertChar, (0, _, _) => UserActionType::InsertSelection, @@ -1181,7 +1313,6 @@ impl EditPredictionStore { let events = &mut project_state.events; - let now = cx.background_executor().now(); if let Some(last_event) = project_state.last_event.as_mut() { let is_next_snapshot_of_same_buffer = old_snapshot.remote_id() == last_event.new_snapshot.remote_id() @@ -1312,7 +1443,7 @@ impl EditPredictionStore { cx, ); } - EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 => { + EditPredictionModel::Zeta => { let is_cloud = !matches!( all_language_settings(None, cx).edit_predictions.provider, EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi @@ -1326,7 +1457,7 @@ impl EditPredictionStore { } async fn handle_rejected_predictions( - rx: UnboundedReceiver, + rx: UnboundedReceiver, client: Arc, llm_token: LlmApiToken, app_version: Version, @@ -1335,7 +1466,11 @@ impl EditPredictionStore { let mut rx = std::pin::pin!(rx.peekable()); let mut batched = Vec::new(); - while let Some(rejection) = rx.next().await { + while let Some(EditPredictionRejectionPayload { + rejection, + organization_id, + }) = rx.next().await + { batched.push(rejection); if batched.len() < MAX_EDIT_PREDICTION_REJECTIONS_PER_REQUEST / 2 { @@ -1373,6 +1508,7 @@ impl EditPredictionStore { }, client.clone(), llm_token.clone(), + organization_id, app_version.clone(), true, ) @@ -1384,6 +1520,120 @@ impl EditPredictionStore { } } + async fn run_settled_predictions_worker( + this: WeakEntity, + mut rx: UnboundedReceiver, + cx: &mut AsyncApp, + ) { + let mut next_wake_time: Option = None; + loop { + let now = cx.background_executor().now(); + if let Some(wake_time) = next_wake_time.take() { + cx.background_executor() + .timer(wake_time.duration_since(now)) + .await; + } else { + let Some(new_enqueue_time) = rx.next().await else { + break; + }; + next_wake_time = Some(new_enqueue_time + EDIT_PREDICTION_SETTLED_QUIESCENCE); + while rx.next().now_or_never().flatten().is_some() {} + continue; + } + + let Some(this) = this.upgrade() else { + break; + }; + + let now = cx.background_executor().now(); + + let mut oldest_edited_at = None; + + this.update(cx, |this, _| { + for (_, project_state) in this.projects.iter_mut() { + for (_, registered_buffer) in project_state.registered_buffers.iter_mut() { + registered_buffer + .pending_predictions + .retain_mut(|pending_prediction| { + let age = + now.saturating_duration_since(pending_prediction.enqueued_at); + if age >= EDIT_PREDICTION_SETTLED_TTL { + return false; + } + + let quiet_for = + now.saturating_duration_since(pending_prediction.last_edit_at); + if quiet_for >= EDIT_PREDICTION_SETTLED_QUIESCENCE { + let settled_editable_region = registered_buffer + .snapshot + .text_for_range( + pending_prediction.editable_anchor_range.clone(), + ) + .collect::(); + + #[cfg(test)] + if let Some(callback) = &this.settled_event_callback { + callback( + pending_prediction.request_id.clone(), + settled_editable_region.clone(), + ); + } + + telemetry::event!( + EDIT_PREDICTION_SETTLED_EVENT, + request_id = pending_prediction.request_id.0.clone(), + settled_editable_region, + example = pending_prediction.example.take(), + ); + + return false; + } + + if oldest_edited_at + .is_none_or(|t| pending_prediction.last_edit_at < t) + { + oldest_edited_at = Some(pending_prediction.last_edit_at); + } + + true + }); + } + } + }); + + next_wake_time = oldest_edited_at.map(|t| t + EDIT_PREDICTION_SETTLED_QUIESCENCE); + } + } + + pub(crate) fn enqueue_settled_prediction( + &mut self, + request_id: EditPredictionId, + project: &Entity, + edited_buffer: &Entity, + edited_buffer_snapshot: &BufferSnapshot, + editable_offset_range: Range, + example: Option, + cx: &mut Context, + ) { + let this = &mut *self; + let project_state = this.get_or_init_project(project, cx); + if let Some(buffer) = project_state + .registered_buffers + .get_mut(&edited_buffer.entity_id()) + { + let now = cx.background_executor().now(); + buffer.pending_predictions.push(PendingSettledPrediction { + request_id: request_id, + editable_anchor_range: edited_buffer_snapshot + .anchor_range_around(editable_offset_range), + example, + enqueued_at: now, + last_edit_at: now, + }); + this.settled_predictions_tx.unbounded_send(now).ok(); + } + } + fn reject_current_prediction( &mut self, reason: EditPredictionRejectReason, @@ -1393,7 +1643,14 @@ impl EditPredictionStore { if let Some(project_state) = self.projects.get_mut(&project.entity_id()) { project_state.pending_predictions.clear(); if let Some(prediction) = project_state.current_prediction.take() { - self.reject_prediction(prediction.prediction.id, reason, prediction.was_shown, cx); + let model_version = prediction.prediction.model_version.clone(); + self.reject_prediction( + prediction.prediction.id, + reason, + prediction.was_shown, + model_version, + cx, + ); } }; } @@ -1452,20 +1709,32 @@ impl EditPredictionStore { prediction_id: EditPredictionId, reason: EditPredictionRejectReason, was_shown: bool, + model_version: Option, cx: &App, ) { match self.edit_prediction_model { - EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 => { + EditPredictionModel::Zeta => { let is_cloud = !matches!( all_language_settings(None, cx).edit_predictions.provider, EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi ); + if is_cloud { + let organization_id = self + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); + self.reject_predictions_tx - .unbounded_send(EditPredictionRejection { - request_id: prediction_id.to_string(), - reason, - was_shown, + .unbounded_send(EditPredictionRejectionPayload { + rejection: EditPredictionRejection { + request_id: prediction_id.to_string(), + reason, + was_shown, + model_version, + }, + organization_id, }) .log_err(); } @@ -1547,6 +1816,9 @@ impl EditPredictionStore { // Prefer predictions from buffer if project_state.current_prediction.is_some() { + log::debug!( + "edit_prediction: diagnostic refresh skipped, current prediction already exists" + ); return; } @@ -1684,7 +1956,6 @@ fn is_ep_store_provider(provider: EditPredictionProvider) -> bool { | EditPredictionProvider::Experimental(_) => true, EditPredictionProvider::None | EditPredictionProvider::Copilot - | EditPredictionProvider::Supermaven | EditPredictionProvider::Codestral => false, } } @@ -1725,7 +1996,6 @@ impl EditPredictionStore { EditPredictionProvider::OpenAiCompatibleApi => (false, 2), EditPredictionProvider::None | EditPredictionProvider::Copilot - | EditPredictionProvider::Supermaven | EditPredictionProvider::Codestral => { log::error!("queue_prediction_refresh called with non-store provider"); return; @@ -1737,31 +2007,49 @@ impl EditPredictionStore { let project_state = self.get_or_init_project(&project, cx); let pending_prediction_id = project_state.next_pending_prediction_id; project_state.next_pending_prediction_id += 1; - let last_request = *select_throttle(project_state, request_trigger); + let throttle_at_enqueue = *select_throttle(project_state, request_trigger); let task = cx.spawn(async move |this, cx| { - if let Some(timeout) = last_request.and_then(|(last_entity, last_timestamp)| { - if throttle_entity != last_entity { - return None; - } - (last_timestamp + throttle_timeout).checked_duration_since(Instant::now()) - }) { + let throttle_wait = this + .update(cx, |this, cx| { + let project_state = this.get_or_init_project(&project, cx); + let throttle = *select_throttle(project_state, request_trigger); + + throttle.and_then(|(last_entity, last_timestamp)| { + if throttle_entity != last_entity { + return None; + } + (last_timestamp + throttle_timeout).checked_duration_since(Instant::now()) + }) + }) + .ok() + .flatten(); + + if let Some(timeout) = throttle_wait { cx.background_executor().timer(timeout).await; } // If this task was cancelled before the throttle timeout expired, - // do not perform a request. + // do not perform a request. Also skip if another task already + // proceeded since we were enqueued (duplicate). let mut is_cancelled = true; this.update(cx, |this, cx| { let project_state = this.get_or_init_project(&project, cx); let was_cancelled = project_state .cancelled_predictions .remove(&pending_prediction_id); - if !was_cancelled { - let new_refresh = (throttle_entity, Instant::now()); - *select_throttle(project_state, request_trigger) = Some(new_refresh); - is_cancelled = false; + if was_cancelled { + return; } + + // Another request has been already sent since this was enqueued + if *select_throttle(project_state, request_trigger) != throttle_at_enqueue { + return; + } + + let new_refresh = (throttle_entity, Instant::now()); + *select_throttle(project_state, request_trigger) = Some(new_refresh); + is_cancelled = false; }) .ok(); if is_cancelled { @@ -1811,6 +2099,7 @@ impl EditPredictionStore { new_prediction.prediction.id, EditPredictionRejectReason::CurrentPreferred, false, + new_prediction.prediction.model_version, cx, ); None @@ -1820,7 +2109,13 @@ impl EditPredictionStore { } } Err(reject_reason) => { - this.reject_prediction(prediction_result.id, reject_reason, false, cx); + this.reject_prediction( + prediction_result.id, + reject_reason, + false, + None, + cx, + ); None } } @@ -1882,7 +2177,7 @@ impl EditPredictionStore { active_buffer.clone(), position, trigger, - cx.has_flag::(), + cx.has_flag::(), cx, ) } @@ -1901,7 +2196,7 @@ impl EditPredictionStore { let stored_events = project_state.events(cx); let has_events = !stored_events.is_empty(); let events: Vec> = - stored_events.into_iter().map(|e| e.event).collect(); + stored_events.iter().map(|e| e.event.clone()).collect(); let debug_tx = project_state.debug_tx.clone(); let snapshot = active_buffer.read(cx).snapshot(); @@ -1935,33 +2230,41 @@ impl EditPredictionStore { let related_files = self.context_for_project(&project, cx); + let is_open_source = snapshot + .file() + .map_or(false, |file| self.is_file_open_source(&project, file, cx)) + && events.iter().all(|event| event.in_open_source_repo()) + && related_files.iter().all(|file| file.in_open_source_repo); + + let can_collect_data = !cfg!(test) + && is_open_source + && self.is_data_collection_enabled(cx) + && matches!(self.edit_prediction_model, EditPredictionModel::Zeta); + + let recent_paths = project_state.recent_paths.clone(); + let inputs = EditPredictionModelInput { project: project.clone(), buffer: active_buffer, - snapshot: snapshot, + snapshot, position, events, related_files, - recent_paths: project_state.recent_paths.clone(), + recent_paths, trigger, diagnostic_search_range: diagnostic_search_range, debug_tx, user_actions, + can_collect_data, + is_open_source, }; + let capture_data = (can_collect_data && rand::random_ratio(1, 1000)).then(|| stored_events); + let task = match self.edit_prediction_model { - EditPredictionModel::Zeta1 => zeta::request_prediction_with_zeta( - self, - inputs, - Some(zeta_prompt::EditPredictionModelKind::Zeta1), - cx, - ), - EditPredictionModel::Zeta2 => zeta::request_prediction_with_zeta( - self, - inputs, - Some(zeta_prompt::EditPredictionModelKind::Zeta2), - cx, - ), + EditPredictionModel::Zeta => { + zeta::request_prediction_with_zeta(self, inputs, capture_data, cx) + } EditPredictionModel::Fim { format } => fim::request_prediction(inputs, format, cx), EditPredictionModel::Sweep => self.sweep_ai.request_prediction_with_sweep(inputs, cx), EditPredictionModel::Mercury => self.mercury.request_prediction(inputs, cx), @@ -1970,7 +2273,13 @@ impl EditPredictionStore { cx.spawn(async move |this, cx| { let prediction = task.await?; - if prediction.is_none() && allow_jump && has_events { + // Only fall back to diagnostics-based prediction if we got a + // the model had nothing to suggest for the buffer + if prediction.is_none() + && allow_jump + && has_events + && !matches!(trigger, PredictEditsRequestTrigger::Diagnostics) + { this.update(cx, |this, cx| { this.refresh_prediction_from_diagnostics( project, @@ -2100,6 +2409,7 @@ impl EditPredictionStore { client: Arc, custom_url: Option>, llm_token: LlmApiToken, + organization_id: Option, app_version: Version, ) -> Result<(RawCompletionResponse, Option)> { let url = if let Some(custom_url) = custom_url { @@ -2119,6 +2429,7 @@ impl EditPredictionStore { }, client, llm_token, + organization_id, app_version, true, ) @@ -2129,6 +2440,7 @@ impl EditPredictionStore { input: ZetaPromptInput, client: Arc, llm_token: LlmApiToken, + organization_id: Option, app_version: Version, trigger: PredictEditsRequestTrigger, ) -> Result<(PredictEditsV3Response, Option)> { @@ -2151,6 +2463,7 @@ impl EditPredictionStore { }, client, llm_token, + organization_id, app_version, true, ) @@ -2204,6 +2517,7 @@ impl EditPredictionStore { build: impl Fn(http_client::http::request::Builder) -> Result>, client: Arc, llm_token: LlmApiToken, + organization_id: Option, app_version: Version, require_auth: bool, ) -> Result<(Res, Option)> @@ -2213,9 +2527,12 @@ impl EditPredictionStore { let http_client = client.http_client(); let mut token = if require_auth { - Some(llm_token.acquire(&client).await?) + Some(llm_token.acquire(&client, organization_id.clone()).await?) } else { - llm_token.acquire(&client).await.ok() + llm_token + .acquire(&client, organization_id.clone()) + .await + .ok() }; let mut did_retry = false; @@ -2257,7 +2574,7 @@ impl EditPredictionStore { return Ok((serde_json::from_slice(&body)?, usage)); } else if !did_retry && token.is_some() && response.needs_llm_token_refresh() { did_retry = true; - token = Some(llm_token.refresh(&client).await?); + token = Some(llm_token.refresh(&client, organization_id.clone()).await?); } else { let mut body = String::new(); response.body_mut().read_to_string(&mut body).await?; diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs index eb76e0fd05182a1b9048bcf36f1bcebe8e808ef2..bbad3c104e6f84f30c7906ba310df132ee66191e 100644 --- a/crates/edit_prediction/src/edit_prediction_tests.rs +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -29,7 +29,10 @@ use util::path; use uuid::Uuid; use zeta_prompt::ZetaPromptInput; -use crate::{BufferEditPrediction, EditPredictionId, EditPredictionStore, REJECT_REQUEST_DEBOUNCE}; +use crate::{ + BufferEditPrediction, EDIT_PREDICTION_SETTLED_QUIESCENCE, EditPredictionId, + EditPredictionStore, REJECT_REQUEST_DEBOUNCE, +}; #[gpui::test] async fn test_current_state(cx: &mut TestAppContext) { @@ -897,7 +900,8 @@ async fn test_empty_prediction(cx: &mut TestAppContext) { &[EditPredictionRejection { request_id: id, reason: EditPredictionRejectReason::Empty, - was_shown: false + was_shown: false, + model_version: None, }] ); } @@ -957,7 +961,8 @@ async fn test_interpolated_empty(cx: &mut TestAppContext) { &[EditPredictionRejection { request_id: id, reason: EditPredictionRejectReason::InterpolatedEmpty, - was_shown: false + was_shown: false, + model_version: None, }] ); } @@ -1049,7 +1054,8 @@ async fn test_replace_current(cx: &mut TestAppContext) { &[EditPredictionRejection { request_id: first_id, reason: EditPredictionRejectReason::Replaced, - was_shown: false + was_shown: false, + model_version: None, }] ); } @@ -1143,7 +1149,8 @@ async fn test_current_preferred(cx: &mut TestAppContext) { &[EditPredictionRejection { request_id: second_id, reason: EditPredictionRejectReason::CurrentPreferred, - was_shown: false + was_shown: false, + model_version: None, }] ); } @@ -1234,7 +1241,8 @@ async fn test_cancel_earlier_pending_requests(cx: &mut TestAppContext) { &[EditPredictionRejection { request_id: first_id, reason: EditPredictionRejectReason::Canceled, - was_shown: false + was_shown: false, + model_version: None, }] ); } @@ -1364,12 +1372,14 @@ async fn test_cancel_second_on_third_request(cx: &mut TestAppContext) { EditPredictionRejection { request_id: cancelled_id, reason: EditPredictionRejectReason::Canceled, - was_shown: false + was_shown: false, + model_version: None, }, EditPredictionRejection { request_id: first_id, reason: EditPredictionRejectReason::Replaced, - was_shown: false + was_shown: false, + model_version: None, } ] ); @@ -1476,6 +1486,52 @@ async fn test_jump_and_edit_throttles_are_independent(cx: &mut TestAppContext) { cx.run_until_parked(); } +#[gpui::test] +async fn test_same_frame_duplicate_requests_deduplicated(cx: &mut TestAppContext) { + let (ep_store, mut requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.md": "Hello!\nHow\nBye\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let position = snapshot.anchor_before(language::Point::new(1, 3)); + + // Enqueue two refresh calls in the same synchronous frame (no yielding). + // Both `cx.spawn` tasks are created before either executes, so they both + // capture the same `proceed_count_at_enqueue`. Only the first task should + // pass the deduplication gate; the second should be skipped. + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + }); + + // Let both spawned tasks run to completion (including any throttle waits). + cx.run_until_parked(); + + // Exactly one prediction request should have been sent. + let (request, respond_tx) = requests.predict.next().await.unwrap(); + respond_tx + .send(model_response(&request, SIMPLE_DIFF)) + .unwrap(); + cx.run_until_parked(); + + // No second request should be pending. + assert_no_predict_request_ready(&mut requests.predict); +} + #[gpui::test] async fn test_rejections_flushing(cx: &mut TestAppContext) { let (ep_store, mut requests) = init_test_with_fake_client(cx); @@ -1485,12 +1541,14 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { EditPredictionId("test-1".into()), EditPredictionRejectReason::Discarded, false, + None, cx, ); ep_store.reject_prediction( EditPredictionId("test-2".into()), EditPredictionRejectReason::Canceled, true, + None, cx, ); }); @@ -1508,7 +1566,8 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { EditPredictionRejection { request_id: "test-1".to_string(), reason: EditPredictionRejectReason::Discarded, - was_shown: false + was_shown: false, + model_version: None, } ); assert_eq!( @@ -1516,7 +1575,8 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { EditPredictionRejection { request_id: "test-2".to_string(), reason: EditPredictionRejectReason::Canceled, - was_shown: true + was_shown: true, + model_version: None, } ); @@ -1527,6 +1587,7 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { EditPredictionId(format!("batch-{}", i).into()), EditPredictionRejectReason::Discarded, false, + None, cx, ); } @@ -1558,6 +1619,7 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { EditPredictionId("retry-1".into()), EditPredictionRejectReason::Discarded, false, + None, cx, ); }); @@ -1577,6 +1639,7 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { EditPredictionId("retry-2".into()), EditPredictionRejectReason::Discarded, false, + None, cx, ); }); @@ -1687,20 +1750,25 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { // Generate a model response that would apply the given diff to the active file. fn model_response(request: &PredictEditsV3Request, diff_to_apply: &str) -> PredictEditsV3Response { - let excerpt = - request.input.cursor_excerpt[request.input.editable_range_in_excerpt.clone()].to_string(); + let editable_range = + zeta_prompt::excerpt_range_for_format(Default::default(), &request.input.excerpt_ranges).1; + let excerpt = request.input.cursor_excerpt[editable_range.clone()].to_string(); let new_excerpt = apply_diff_to_string(diff_to_apply, &excerpt).unwrap(); PredictEditsV3Response { request_id: Uuid::new_v4().to_string(), + editable_range, output: new_excerpt, + model_version: None, } } fn empty_response() -> PredictEditsV3Response { PredictEditsV3Response { request_id: Uuid::new_v4().to_string(), + editable_range: 0..0, output: String::new(), + model_version: None, } } @@ -1820,16 +1888,17 @@ async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) { related_files: Default::default(), cursor_path: Path::new("").into(), cursor_excerpt: "".into(), - editable_range_in_excerpt: 0..0, cursor_offset_in_excerpt: 0, excerpt_start_row: None, - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: Default::default(), + experiment: None, in_open_source_repo: false, can_collect_data: false, + repo_url: None, }, buffer_snapshotted_at: Instant::now(), response_received_at: Instant::now(), + model_version: None, }; cx.update(|cx| { @@ -2018,13 +2087,16 @@ async fn test_edit_prediction_no_spurious_trailing_newline(cx: &mut TestAppConte ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); }); - let (_request, respond_tx) = requests.predict.next().await.unwrap(); + let (request, respond_tx) = requests.predict.next().await.unwrap(); // Model returns output WITH a trailing newline, even though the buffer doesn't have one. // Zeta2 should normalize both sides before diffing, so no spurious newline is inserted. + let excerpt_length = request.input.cursor_excerpt.len(); let response = PredictEditsV3Response { request_id: Uuid::new_v4().to_string(), output: "hello world\n".to_string(), + editable_range: 0..excerpt_length, + model_version: None, }; respond_tx.send(response).unwrap(); @@ -2099,9 +2171,12 @@ async fn make_test_ep_store( let mut next_request_id = 0; move |req| { let completion_response = completion_response.clone(); + let method = req.method().clone(); + let uri = req.uri().path().to_string(); + let mut body = req.into_body(); async move { - match (req.method(), req.uri().path()) { - (&Method::POST, "/client/llm_tokens") => Ok(http_client::Response::builder() + match (method, uri.as_str()) { + (Method::POST, "/client/llm_tokens") => Ok(http_client::Response::builder() .status(200) .body( serde_json::to_string(&CreateLlmTokenResponse { @@ -2111,14 +2186,22 @@ async fn make_test_ep_store( .into(), ) .unwrap()), - (&Method::POST, "/predict_edits/v3") => { + (Method::POST, "/predict_edits/v3") => { + let mut buf = Vec::new(); + body.read_to_end(&mut buf).await.ok(); + let decompressed = zstd::decode_all(&buf[..]).unwrap(); + let req: PredictEditsV3Request = + serde_json::from_slice(&decompressed).unwrap(); + next_request_id += 1; Ok(http_client::Response::builder() .status(200) .body( serde_json::to_string(&PredictEditsV3Response { request_id: format!("request-{next_request_id}"), + editable_range: 0..req.input.cursor_excerpt.len(), output: completion_response.lock().clone(), + model_version: None, }) .unwrap() .into(), @@ -2127,7 +2210,7 @@ async fn make_test_ep_store( } _ => Ok(http_client::Response::builder() .status(404) - .body("Not Found".into()) + .body("Not Found".to_string().into()) .unwrap()), } } @@ -2142,7 +2225,7 @@ async fn make_test_ep_store( let ep_store = cx.new(|cx| { let mut ep_store = EditPredictionStore::new(client, project.read(cx).user_store(), cx); - ep_store.set_edit_prediction_model(EditPredictionModel::Zeta1); + ep_store.set_edit_prediction_model(EditPredictionModel::Zeta); let worktrees = project.read(cx).worktrees(cx).collect::>(); for worktree in worktrees { @@ -2241,7 +2324,7 @@ async fn test_unauthenticated_without_custom_url_blocks_prediction_impl(cx: &mut cx.background_executor.run_until_parked(); let completion_task = ep_store.update(cx, |ep_store, cx| { - ep_store.set_edit_prediction_model(EditPredictionModel::Zeta1); + ep_store.set_edit_prediction_model(EditPredictionModel::Zeta); ep_store.request_prediction(&project, &buffer, cursor, Default::default(), cx) }); @@ -2536,6 +2619,181 @@ async fn test_diagnostic_jump_excludes_collaborator_regions(cx: &mut TestAppCont ); } +#[gpui::test] +async fn test_edit_prediction_settled(cx: &mut TestAppContext) { + let (ep_store, _requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + + // Buffer with two clearly separated regions: + // Region A = lines 0-9 (offsets 0..50) + // Region B = lines 20-29 (offsets 105..155) + // A big gap in between so edits in one region never overlap the other. + let mut content = String::new(); + for i in 0..30 { + content.push_str(&format!("line {i:02}\n")); + } + + fs.insert_tree( + "/root", + json!({ + "foo.md": content.clone() + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + + type SettledEventRecord = (EditPredictionId, String); + let settled_events: Arc>> = Arc::new(Mutex::new(Vec::new())); + + ep_store.update(cx, |ep_store, cx| { + ep_store.register_buffer(&buffer, &project, cx); + + let settled_events = settled_events.clone(); + ep_store.settled_event_callback = Some(Box::new(move |id, text| { + settled_events.lock().push((id, text)); + })); + }); + + // --- Phase 1: edit in region A and enqueue prediction A --- + + buffer.update(cx, |buffer, cx| { + // Edit at the start of line 0. + buffer.edit(vec![(0..0, "ADDED ")], None, cx); + }); + cx.run_until_parked(); + + let snapshot_a = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + + // Region A: first 10 lines of the buffer. + let editable_region_a = 0..snapshot_a.point_to_offset(Point::new(10, 0)); + + ep_store.update(cx, |ep_store, cx| { + ep_store.enqueue_settled_prediction( + EditPredictionId("prediction-a".into()), + &project, + &buffer, + &snapshot_a, + editable_region_a.clone(), + None, + cx, + ); + }); + + // --- Phase 2: repeatedly edit in region A to keep it unsettled --- + + // Let the worker process the channel message before we start advancing. + cx.run_until_parked(); + + let mut region_a_edit_offset = 5; + for _ in 0..3 { + // Edit inside region A (not at the boundary) so `last_edit_at` is + // updated before the worker's next wake. + buffer.update(cx, |buffer, cx| { + buffer.edit( + vec![(region_a_edit_offset..region_a_edit_offset, "x")], + None, + cx, + ); + }); + region_a_edit_offset += 1; + cx.run_until_parked(); + + cx.executor() + .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 2); + cx.run_until_parked(); + assert!( + settled_events.lock().is_empty(), + "no settled events should fire while region A is still being edited" + ); + } + + // Still nothing settled. + assert!(settled_events.lock().is_empty()); + + // --- Phase 3: edit in distinct region B, enqueue prediction B --- + // Advance a small amount so B's quiescence window starts later than A's, + // but not so much that A settles (A's last edit was at the start of + // iteration 3, and it needs a full Q to settle). + cx.executor() + .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 4); + cx.run_until_parked(); + assert!(settled_events.lock().is_empty()); + + let snapshot_b = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let line_20_offset = snapshot_b.point_to_offset(Point::new(20, 0)); + + buffer.update(cx, |buffer, cx| { + buffer.edit(vec![(line_20_offset..line_20_offset, "NEW ")], None, cx); + }); + cx.run_until_parked(); + + let snapshot_b2 = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let editable_region_b = line_20_offset..snapshot_b2.point_to_offset(Point::new(25, 0)); + + ep_store.update(cx, |ep_store, cx| { + ep_store.enqueue_settled_prediction( + EditPredictionId("prediction-b".into()), + &project, + &buffer, + &snapshot_b2, + editable_region_b.clone(), + None, + cx, + ); + }); + + cx.run_until_parked(); + assert!( + settled_events.lock().is_empty(), + "neither prediction should have settled yet" + ); + + // --- Phase 4: let enough time pass for region A to settle --- + // A's last edit was at T_a (during the last loop iteration). The worker is + // sleeping until T_a + Q. We advance just enough to reach that wake time + // (Q/4 since we already advanced Q/4 in phase 3 on top of the loop's + // 3*Q/2). At that point A has been quiet for Q and settles, but B was + // enqueued only Q/4 ago and stays pending. + cx.executor() + .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 4); + cx.run_until_parked(); + + { + let events = settled_events.lock().clone(); + assert_eq!( + events.len(), + 1, + "prediction and capture_sample for A should have settled, got: {events:?}" + ); + assert_eq!(events[0].0, EditPredictionId("prediction-a".into())); + } + + // --- Phase 5: let more time pass for region B to settle --- + // B's last edit was Q/4 before A settled. The worker rescheduled to + // B's last_edit_at + Q, which is 3Q/4 from now. + cx.executor() + .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE * 3 / 4); + cx.run_until_parked(); + + { + let events = settled_events.lock().clone(); + assert_eq!( + events.len(), + 2, + "both prediction and capture_sample settled events should be emitted for each request, got: {events:?}" + ); + assert_eq!(events[1].0, EditPredictionId("prediction-b".into())); + } +} + #[ctor::ctor] fn init_logger() { zlog::init_test(); diff --git a/crates/edit_prediction/src/fim.rs b/crates/edit_prediction/src/fim.rs index 7ba6c6bef77c5b2229d1b3a4072e8070e5c4a6f1..02053aae7154acdfa22a01a4f84d6b732a9ca696 100644 --- a/crates/edit_prediction/src/fim.rs +++ b/crates/edit_prediction/src/fim.rs @@ -1,6 +1,7 @@ use crate::{ - EditPredictionId, EditPredictionModelInput, cursor_excerpt, prediction::EditPredictionResult, - zeta, + EditPredictionId, EditPredictionModelInput, cursor_excerpt, + open_ai_compatible::{self, load_open_ai_compatible_api_key_if_needed}, + prediction::EditPredictionResult, }; use anyhow::{Context as _, Result, anyhow}; use gpui::{App, AppContext as _, Entity, Task}; @@ -58,6 +59,8 @@ pub fn request_prediction( return Task::ready(Err(anyhow!("Unsupported edit prediction provider for FIM"))); }; + let api_key = load_open_ai_compatible_api_key_if_needed(provider, cx); + let result = cx.background_spawn(async move { let (excerpt_range, _) = cursor_excerpt::editable_and_context_ranges_for_cursor_position( cursor_point, @@ -72,18 +75,17 @@ pub fn request_prediction( events, related_files: Vec::new(), cursor_offset_in_excerpt: cursor_offset - excerpt_offset_range.start, - editable_range_in_excerpt: cursor_offset - excerpt_offset_range.start - ..cursor_offset - excerpt_offset_range.start, cursor_path: full_path.clone(), excerpt_start_row: Some(excerpt_range.start.row), cursor_excerpt: snapshot .text_for_range(excerpt_range) .collect::() .into(), - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: Default::default(), + experiment: None, in_open_source_repo: false, can_collect_data: false, + repo_url: None, }; let prefix = inputs.cursor_excerpt[..inputs.cursor_offset_in_excerpt].to_string(); @@ -92,12 +94,14 @@ pub fn request_prediction( let stop_tokens = get_fim_stop_tokens(); let max_tokens = settings.max_output_tokens; - let (response_text, request_id) = zeta::send_custom_server_request( + + let (response_text, request_id) = open_ai_compatible::send_custom_server_request( provider, &settings, prompt, max_tokens, stop_tokens, + api_key, &http_client, ) .await?; @@ -141,6 +145,7 @@ pub fn request_prediction( output.buffer_snapshotted_at, output.response_received_at, output.inputs, + None, cx, ) .await, diff --git a/crates/edit_prediction/src/mercury.rs b/crates/edit_prediction/src/mercury.rs index 4187881639d8c363582f7a2c7603f2bb51e09fa7..f61219e2f71d5efbb2fb67250b58b0a5a090e9a8 100644 --- a/crates/edit_prediction/src/mercury.rs +++ b/crates/edit_prediction/src/mercury.rs @@ -16,7 +16,7 @@ use release_channel::AppVersion; use serde::Serialize; use std::{mem, ops::Range, path::Path, sync::Arc, time::Instant}; -use zeta_prompt::ZetaPromptInput; +use zeta_prompt::{ExcerptRanges, ZetaPromptInput}; const MERCURY_API_URL: &str = "https://api.inceptionlabs.ai/v1/edit/completions"; const MAX_REWRITE_TOKENS: usize = 150; @@ -83,6 +83,12 @@ impl Mercury { let editable_offset_range = editable_range.to_offset(&snapshot); + let editable_range_in_excerpt = (editable_offset_range.start + - context_offset_range.start) + ..(editable_offset_range.end - context_offset_range.start); + let context_range_in_excerpt = + 0..(context_offset_range.end - context_offset_range.start); + let inputs = zeta_prompt::ZetaPromptInput { events, related_files, @@ -93,14 +99,20 @@ impl Mercury { .text_for_range(context_range) .collect::() .into(), - editable_range_in_excerpt: (editable_offset_range.start - - context_offset_range.start) - ..(editable_offset_range.end - context_offset_range.start), + experiment: None, excerpt_start_row: Some(context_start_row), - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: ExcerptRanges { + editable_150: editable_range_in_excerpt.clone(), + editable_180: editable_range_in_excerpt.clone(), + editable_350: editable_range_in_excerpt.clone(), + editable_150_context_350: context_range_in_excerpt.clone(), + editable_180_context_350: context_range_in_excerpt.clone(), + editable_350_context_150: context_range_in_excerpt.clone(), + ..Default::default() + }, in_open_source_repo: false, can_collect_data: false, + repo_url: None, }; let prompt = build_prompt(&inputs); @@ -218,6 +230,7 @@ impl Mercury { buffer_snapshotted_at, response_received_at, inputs, + None, cx, ) .await, @@ -272,19 +285,18 @@ fn build_prompt(inputs: &ZetaPromptInput) -> String { prompt.push_str(inputs.cursor_path.as_os_str().to_string_lossy().as_ref()); prompt.push('\n'); - prompt.push_str(&inputs.cursor_excerpt[0..inputs.editable_range_in_excerpt.start]); + let editable_range = &inputs.excerpt_ranges.editable_350; + prompt.push_str(&inputs.cursor_excerpt[0..editable_range.start]); push_delimited(prompt, CODE_TO_EDIT_START..CODE_TO_EDIT_END, |prompt| { prompt.push_str( - &inputs.cursor_excerpt - [inputs.editable_range_in_excerpt.start..inputs.cursor_offset_in_excerpt], + &inputs.cursor_excerpt[editable_range.start..inputs.cursor_offset_in_excerpt], ); prompt.push_str(CURSOR_TAG); prompt.push_str( - &inputs.cursor_excerpt - [inputs.cursor_offset_in_excerpt..inputs.editable_range_in_excerpt.end], + &inputs.cursor_excerpt[inputs.cursor_offset_in_excerpt..editable_range.end], ); }); - prompt.push_str(&inputs.cursor_excerpt[inputs.editable_range_in_excerpt.end..]); + prompt.push_str(&inputs.cursor_excerpt[editable_range.end..]); }, ); diff --git a/crates/edit_prediction/src/open_ai_compatible.rs b/crates/edit_prediction/src/open_ai_compatible.rs new file mode 100644 index 0000000000000000000000000000000000000000..ca378ba1fd0bc9bdbb3e85c7610e1b94c1be388f --- /dev/null +++ b/crates/edit_prediction/src/open_ai_compatible.rs @@ -0,0 +1,133 @@ +use anyhow::{Context as _, Result}; +use cloud_llm_client::predict_edits_v3::{RawCompletionRequest, RawCompletionResponse}; +use futures::AsyncReadExt as _; +use gpui::{App, AppContext as _, Entity, Global, SharedString, Task, http_client}; +use language::language_settings::{OpenAiCompatibleEditPredictionSettings, all_language_settings}; +use language_model::{ApiKeyState, EnvVar, env_var}; +use std::sync::Arc; + +pub fn open_ai_compatible_api_url(cx: &App) -> SharedString { + all_language_settings(None, cx) + .edit_predictions + .open_ai_compatible_api + .as_ref() + .map(|settings| settings.api_url.clone()) + .unwrap_or_default() + .into() +} + +pub const OPEN_AI_COMPATIBLE_CREDENTIALS_USERNAME: &str = "openai-compatible-api-token"; +pub static OPEN_AI_COMPATIBLE_TOKEN_ENV_VAR: std::sync::LazyLock = + env_var!("ZED_OPEN_AI_COMPATIBLE_EDIT_PREDICTION_API_KEY"); + +struct GlobalOpenAiCompatibleApiKey(Entity); + +impl Global for GlobalOpenAiCompatibleApiKey {} + +pub fn open_ai_compatible_api_token(cx: &mut App) -> Entity { + if let Some(global) = cx.try_global::() { + return global.0.clone(); + } + + let entity = cx.new(|cx| { + ApiKeyState::new( + open_ai_compatible_api_url(cx), + OPEN_AI_COMPATIBLE_TOKEN_ENV_VAR.clone(), + ) + }); + cx.set_global(GlobalOpenAiCompatibleApiKey(entity.clone())); + entity +} + +pub fn load_open_ai_compatible_api_token( + cx: &mut App, +) -> Task> { + let api_url = open_ai_compatible_api_url(cx); + open_ai_compatible_api_token(cx).update(cx, |key_state, cx| { + key_state.load_if_needed(api_url, |s| s, cx) + }) +} + +pub fn load_open_ai_compatible_api_key_if_needed( + provider: settings::EditPredictionProvider, + cx: &mut App, +) -> Option> { + if provider != settings::EditPredictionProvider::OpenAiCompatibleApi { + return None; + } + _ = load_open_ai_compatible_api_token(cx); + let url = open_ai_compatible_api_url(cx); + return open_ai_compatible_api_token(cx).read(cx).key(&url); +} + +pub(crate) async fn send_custom_server_request( + provider: settings::EditPredictionProvider, + settings: &OpenAiCompatibleEditPredictionSettings, + prompt: String, + max_tokens: u32, + stop_tokens: Vec, + api_key: Option>, + http_client: &Arc, +) -> Result<(String, String)> { + match provider { + settings::EditPredictionProvider::Ollama => { + let response = crate::ollama::make_request( + settings.clone(), + prompt, + stop_tokens, + http_client.clone(), + ) + .await?; + Ok((response.response, response.created_at)) + } + _ => { + let request = RawCompletionRequest { + model: settings.model.clone(), + prompt, + max_tokens: Some(max_tokens), + temperature: None, + stop: stop_tokens + .into_iter() + .map(std::borrow::Cow::Owned) + .collect(), + environment: None, + }; + + let request_body = serde_json::to_string(&request)?; + let mut http_request_builder = http_client::Request::builder() + .method(http_client::Method::POST) + .uri(settings.api_url.as_ref()) + .header("Content-Type", "application/json"); + + if let Some(api_key) = api_key { + http_request_builder = + http_request_builder.header("Authorization", format!("Bearer {}", api_key)); + } + + let http_request = + http_request_builder.body(http_client::AsyncBody::from(request_body))?; + + let mut response = http_client.send(http_request).await?; + let status = response.status(); + + if !status.is_success() { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + anyhow::bail!("custom server error: {} - {}", status, body); + } + + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + + let parsed: RawCompletionResponse = + serde_json::from_str(&body).context("Failed to parse completion response")?; + let text = parsed + .choices + .into_iter() + .next() + .map(|choice| choice.text) + .unwrap_or_default(); + Ok((text, parsed.id)) + } + } +} diff --git a/crates/edit_prediction/src/prediction.rs b/crates/edit_prediction/src/prediction.rs index 750b1a435ae4a7a281ef41973e1f6d0d2158445e..263409043b397e2df1ac32514a0ce76656fbefe1 100644 --- a/crates/edit_prediction/src/prediction.rs +++ b/crates/edit_prediction/src/prediction.rs @@ -41,6 +41,7 @@ impl EditPredictionResult { buffer_snapshotted_at: Instant, response_received_at: Instant, inputs: ZetaPromptInput, + model_version: Option, cx: &mut AsyncApp, ) -> Self { if edits.is_empty() { @@ -79,6 +80,7 @@ impl EditPredictionResult { buffer: edited_buffer.clone(), buffer_snapshotted_at, response_received_at, + model_version, }), } } @@ -95,6 +97,7 @@ pub struct EditPrediction { pub buffer_snapshotted_at: Instant, pub response_received_at: Instant, pub inputs: zeta_prompt::ZetaPromptInput, + pub model_version: Option, } impl EditPrediction { @@ -150,18 +153,19 @@ mod tests { snapshot: cx.read(|cx| buffer.read(cx).snapshot()), buffer: buffer.clone(), edit_preview, + model_version: None, inputs: ZetaPromptInput { events: vec![], related_files: vec![], cursor_path: Path::new("path.txt").into(), cursor_offset_in_excerpt: 0, cursor_excerpt: "".into(), - editable_range_in_excerpt: 0..0, excerpt_start_row: None, - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: Default::default(), + experiment: None, in_open_source_repo: false, can_collect_data: false, + repo_url: None, }, buffer_snapshotted_at: Instant::now(), response_received_at: Instant::now(), diff --git a/crates/edit_prediction/src/sweep_ai.rs b/crates/edit_prediction/src/sweep_ai.rs index 1253916487894d757c74293c21f4ace1c681cd11..d8ce180801aa8902bfff79044cabaae7570ed05f 100644 --- a/crates/edit_prediction/src/sweep_ai.rs +++ b/crates/edit_prediction/src/sweep_ai.rs @@ -215,14 +215,21 @@ impl SweepAi { related_files: inputs.related_files.clone(), cursor_path: full_path.clone(), cursor_excerpt: request_body.file_contents.clone().into(), - // we actually don't know - editable_range_in_excerpt: 0..inputs.snapshot.len(), cursor_offset_in_excerpt: request_body.cursor_position, excerpt_start_row: Some(0), - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: zeta_prompt::ExcerptRanges { + editable_150: 0..inputs.snapshot.len(), + editable_180: 0..inputs.snapshot.len(), + editable_350: 0..inputs.snapshot.len(), + editable_150_context_350: 0..inputs.snapshot.len(), + editable_180_context_350: 0..inputs.snapshot.len(), + editable_350_context_150: 0..inputs.snapshot.len(), + ..Default::default() + }, + experiment: None, in_open_source_repo: false, can_collect_data: false, + repo_url: None, }; send_started_event( @@ -303,6 +310,7 @@ impl SweepAi { buffer_snapshotted_at, response_received_at, inputs, + None, cx, ) .await, diff --git a/crates/edit_prediction/src/udiff.rs b/crates/edit_prediction/src/udiff.rs index e1e475c85526befb5549571cf7b7a2e1ef10c3d8..14be1991d34e985067f5ad8729fd7ac8485211db 100644 --- a/crates/edit_prediction/src/udiff.rs +++ b/crates/edit_prediction/src/udiff.rs @@ -266,6 +266,66 @@ pub fn strip_diff_metadata(diff: &str) -> String { result } +/// Find all byte offsets where `hunk.context` occurs as a substring of `text`. +/// +/// If no exact matches are found and the context ends with `'\n'` but `text` +/// does not, retries without the trailing newline, accepting only a match at +/// the very end of `text`. When this fallback fires, the hunk's context is +/// trimmed and its edit ranges are clamped so that downstream code doesn't +/// index past the end of the matched region. This handles diffs that are +/// missing a `\ No newline at end of file` marker: the parser always appends +/// `'\n'` via `writeln!`, so the context can have a trailing newline that +/// doesn't exist in the source text. +fn find_context_candidates(text: &str, hunk: &mut Hunk) -> Vec { + let candidates: Vec = text + .match_indices(&hunk.context) + .map(|(offset, _)| offset) + .collect(); + + if !candidates.is_empty() { + return candidates; + } + + if hunk.context.ends_with('\n') && !hunk.context.is_empty() { + let old_len = hunk.context.len(); + hunk.context.pop(); + let new_len = hunk.context.len(); + + if !hunk.context.is_empty() { + let candidates: Vec = text + .match_indices(&hunk.context) + .filter(|(offset, _)| offset + new_len == text.len()) + .map(|(offset, _)| offset) + .collect(); + + if !candidates.is_empty() { + for edit in &mut hunk.edits { + let touched_phantom = edit.range.end > new_len; + edit.range.start = edit.range.start.min(new_len); + edit.range.end = edit.range.end.min(new_len); + if touched_phantom { + // The replacement text was also written with a + // trailing '\n' that corresponds to the phantom + // newline we just removed from the context. + if edit.text.ends_with('\n') { + edit.text.pop(); + } + } + } + return candidates; + } + + // Restore if fallback didn't help either. + hunk.context.push('\n'); + debug_assert_eq!(hunk.context.len(), old_len); + } else { + hunk.context.push('\n'); + } + } + + Vec::new() +} + /// Given multiple candidate offsets where context matches, use line numbers to disambiguate. /// Returns the offset that matches the expected line, or None if no match or no line number available. fn disambiguate_by_line_number( @@ -305,15 +365,11 @@ pub fn apply_diff_to_string_with_hunk_offset( while let Some(event) = diff.next().context("Failed to parse diff")? { match event { DiffEvent::Hunk { - hunk, + mut hunk, path: _, status: _, } => { - // Find all matches of the context in the text - let candidates: Vec = text - .match_indices(&hunk.context) - .map(|(offset, _)| offset) - .collect(); + let candidates = find_context_candidates(&text, &mut hunk); let hunk_offset = disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| { @@ -348,7 +404,7 @@ pub fn edits_for_diff(content: &str, diff_str: &str) -> Result while let Some(event) = diff.next()? { match event { DiffEvent::Hunk { - hunk, + mut hunk, path: _, status: _, } => { @@ -356,11 +412,7 @@ pub fn edits_for_diff(content: &str, diff_str: &str) -> Result return Ok(Vec::new()); } - // Find all matches of the context in the content - let candidates: Vec = content - .match_indices(&hunk.context) - .map(|(offset, _)| offset) - .collect(); + let candidates = find_context_candidates(content, &mut hunk); let Some(context_offset) = disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| { @@ -611,7 +663,7 @@ impl<'a> DiffParser<'a> { } fn resolve_hunk_edits_in_buffer( - hunk: Hunk, + mut hunk: Hunk, buffer: &TextBufferSnapshot, ranges: &[Range], status: FileStatus, @@ -623,7 +675,7 @@ fn resolve_hunk_edits_in_buffer( for range in ranges { let range = range.to_offset(buffer); let text = buffer.text_for_range(range.clone()).collect::(); - for (ix, _) in text.match_indices(&hunk.context) { + for ix in find_context_candidates(&text, &mut hunk) { candidates.push(range.start + ix); } } @@ -1513,4 +1565,185 @@ mod tests { "#} ); } + + #[test] + fn test_apply_diff_to_string_no_trailing_newline() { + // Text without trailing newline; diff generated without + // `\ No newline at end of file` marker. + let text = "line1\nline2\nline3"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + line1 + -line2 + +replaced + line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nreplaced\nline3"); + } + + #[test] + fn test_apply_diff_to_string_trailing_newline_present() { + // When text has a trailing newline, exact matching still works and + // the fallback is never needed. + let text = "line1\nline2\nline3\n"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + line1 + -line2 + +replaced + line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nreplaced\nline3\n"); + } + + #[test] + fn test_apply_diff_to_string_deletion_at_end_no_trailing_newline() { + // Deletion of the last line when text has no trailing newline. + // The edit range must be clamped so it doesn't index past the + // end of the text. + let text = "line1\nline2\nline3"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,2 @@ + line1 + line2 + -line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nline2\n"); + } + + #[test] + fn test_apply_diff_to_string_replace_last_line_no_trailing_newline() { + // Replace the last line when text has no trailing newline. + let text = "aaa\nbbb\nccc"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + aaa + bbb + -ccc + +ddd + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "aaa\nbbb\nddd"); + } + + #[test] + fn test_apply_diff_to_string_multibyte_no_trailing_newline() { + // Multi-byte UTF-8 characters near the end; ensures char boundary + // safety when the fallback clamps edit ranges. + let text = "hello\n세계"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,2 @@ + hello + -세계 + +world + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "hello\nworld"); + } + + #[test] + fn test_find_context_candidates_no_false_positive_mid_text() { + // The stripped fallback must only match at the end of text, not in + // the middle where a real newline exists. + let text = "aaa\nbbb\nccc\n"; + let mut hunk = Hunk { + context: "bbb\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + // Exact match at offset 4 — the fallback is not used. + assert_eq!(candidates, vec![4]); + } + + #[test] + fn test_find_context_candidates_fallback_at_end() { + let text = "aaa\nbbb"; + let mut hunk = Hunk { + context: "bbb\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + assert_eq!(candidates, vec![4]); + // Context should be stripped. + assert_eq!(hunk.context, "bbb"); + } + + #[test] + fn test_find_context_candidates_no_fallback_mid_text() { + // "bbb" appears mid-text followed by a newline, so the exact + // match succeeds. Verify the stripped fallback doesn't produce a + // second, spurious candidate. + let text = "aaa\nbbb\nccc"; + let mut hunk = Hunk { + context: "bbb\nccc\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + // No exact match (text ends without newline after "ccc"), but the + // stripped context "bbb\nccc" matches at offset 4, which is the end. + assert_eq!(candidates, vec![4]); + assert_eq!(hunk.context, "bbb\nccc"); + } + + #[test] + fn test_find_context_candidates_clamps_edit_ranges() { + let text = "aaa\nbbb"; + let mut hunk = Hunk { + context: "aaa\nbbb\n".into(), + edits: vec![Edit { + range: 4..8, // "bbb\n" — end points at the trailing \n + text: "ccc\n".into(), + }], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + assert_eq!(candidates, vec![0]); + // Edit range end should be clamped to 7 (new context length). + assert_eq!(hunk.edits[0].range, 4..7); + } + + #[test] + fn test_edits_for_diff_no_trailing_newline() { + let content = "foo\nbar\nbaz"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + foo + -bar + +qux + baz + "}; + + let result = edits_for_diff(content, diff).unwrap(); + assert_eq!(result.len(), 1); + let (range, text) = &result[0]; + assert_eq!(&content[range.clone()], "bar"); + assert_eq!(text, "qux"); + } } diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index 658071c9ccfbdf64a9a1ebead7724774cd5cc40e..3d111bfd9394a90e87a70e24ae96eb69a58afe91 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -2,28 +2,30 @@ use crate::cursor_excerpt::compute_excerpt_ranges; use crate::prediction::EditPredictionResult; use crate::{ CurrentEditPrediction, DebugEvent, EditPredictionFinishedDebugEvent, EditPredictionId, - EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore, ollama, + EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore, StoredEvent, }; -use anyhow::{Context as _, Result}; -use cloud_llm_client::predict_edits_v3::{RawCompletionRequest, RawCompletionResponse}; +use anyhow::Result; +use cloud_llm_client::predict_edits_v3::RawCompletionRequest; use cloud_llm_client::{AcceptEditPredictionBody, EditPredictionRejectReason}; use edit_prediction_types::PredictedCursorPosition; -use futures::AsyncReadExt as _; -use gpui::{App, AppContext as _, Task, http_client, prelude::*}; -use language::language_settings::{OpenAiCompatibleEditPredictionSettings, all_language_settings}; +use gpui::{App, AppContext as _, Task, prelude::*}; +use language::language_settings::all_language_settings; use language::{BufferSnapshot, ToOffset as _, ToPoint, text_diff}; use release_channel::AppVersion; +use settings::EditPredictionPromptFormat; use text::{Anchor, Bias}; -use std::env; -use std::ops::Range; -use std::{path::Path, sync::Arc, time::Instant}; +use std::{env, ops::Range, path::Path, sync::Arc, time::Instant}; use zeta_prompt::{ - CURSOR_MARKER, EditPredictionModelKind, ZetaFormat, clean_zeta2_model_output, - format_zeta_prompt, get_prefill, prompt_input_contains_special_tokens, + CURSOR_MARKER, ZetaFormat, clean_zeta2_model_output, format_zeta_prompt, get_prefill, + output_with_context_for_format, prompt_input_contains_special_tokens, zeta1::{self, EDITABLE_REGION_END_MARKER}, }; +use crate::open_ai_compatible::{ + load_open_ai_compatible_api_key_if_needed, send_custom_server_request, +}; + pub fn request_prediction_with_zeta( store: &mut EditPredictionStore, EditPredictionModelInput { @@ -35,9 +37,11 @@ pub fn request_prediction_with_zeta( debug_tx, trigger, project, + can_collect_data, + is_open_source, .. }: EditPredictionModelInput, - preferred_model: Option, + capture_data: Option>, cx: &mut Context, ) -> Task>> { let settings = &all_language_settings(None, cx).edit_predictions; @@ -53,24 +57,35 @@ pub fn request_prediction_with_zeta( let http_client = cx.http_client(); let buffer_snapshotted_at = Instant::now(); let raw_config = store.zeta2_raw_config().cloned(); + let preferred_experiment = store.preferred_experiment().map(|s| s.to_owned()); + let open_ai_compatible_api_key = load_open_ai_compatible_api_key_if_needed(provider, cx); let excerpt_path: Arc = snapshot .file() .map(|file| -> Arc { file.full_path(cx).into() }) .unwrap_or_else(|| Arc::from(Path::new("untitled"))); + let repo_url = if can_collect_data { + let buffer_id = buffer.read(cx).remote_id(); + project + .read(cx) + .git_store() + .read(cx) + .repository_and_path_for_buffer_id(buffer_id, cx) + .and_then(|(repo, _)| repo.read(cx).default_remote_url()) + } else { + None + }; + let client = store.client.clone(); let llm_token = store.llm_token.clone(); + let organization_id = store + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); let app_version = AppVersion::global(cx); - let is_open_source = snapshot - .file() - .map_or(false, |file| store.is_file_open_source(&project, file, cx)) - && events.iter().all(|event| event.in_open_source_repo()) - && related_files.iter().all(|file| file.in_open_source_repo); - - let can_collect_data = is_open_source && store.is_data_collection_enabled(cx); - let request_task = cx.background_spawn({ async move { let zeta_version = raw_config @@ -79,38 +94,25 @@ pub fn request_prediction_with_zeta( .unwrap_or(ZetaFormat::default()); let cursor_offset = position.to_offset(&snapshot); - let (editable_offset_range, prompt_input) = zeta2_prompt_input( + let editable_range_in_excerpt: Range; + let (full_context_offset_range, prompt_input) = zeta2_prompt_input( &snapshot, related_files, events, excerpt_path, cursor_offset, - zeta_version, - preferred_model, + preferred_experiment, is_open_source, can_collect_data, + repo_url, ); if prompt_input_contains_special_tokens(&prompt_input, zeta_version) { return Ok((None, None)); } - let is_zeta1 = preferred_model == Some(EditPredictionModelKind::Zeta1); - let excerpt_ranges = prompt_input - .excerpt_ranges - .as_ref() - .ok_or_else(|| anyhow::anyhow!("excerpt_ranges missing from prompt input"))?; - if let Some(debug_tx) = &debug_tx { - let prompt = if is_zeta1 { - zeta1::format_zeta1_from_input( - &prompt_input, - excerpt_ranges.editable_350.clone(), - excerpt_ranges.editable_350_context_150.clone(), - ) - } else { - format_zeta_prompt(&prompt_input, zeta_version) - }; + let prompt = format_zeta_prompt(&prompt_input, zeta_version); debug_tx .unbounded_send(DebugEvent::EditPredictionStarted( EditPredictionStartedDebugEvent { @@ -124,81 +126,105 @@ pub fn request_prediction_with_zeta( log::trace!("Sending edit prediction request"); - let (request_id, output_text, usage) = + let (request_id, output_text, model_version, usage) = if let Some(custom_settings) = &custom_server_settings { let max_tokens = custom_settings.max_output_tokens * 4; - if is_zeta1 { - let ranges = excerpt_ranges; - let prompt = zeta1::format_zeta1_from_input( - &prompt_input, - ranges.editable_350.clone(), - ranges.editable_350_context_150.clone(), - ); - let stop_tokens = vec![ - EDITABLE_REGION_END_MARKER.to_string(), - format!("{EDITABLE_REGION_END_MARKER}\n"), - format!("{EDITABLE_REGION_END_MARKER}\n\n"), - format!("{EDITABLE_REGION_END_MARKER}\n\n\n"), - ]; - - let (response_text, request_id) = send_custom_server_request( - provider, - custom_settings, - prompt, - max_tokens, - stop_tokens, - &http_client, - ) - .await?; - - let request_id = EditPredictionId(request_id.into()); - let output_text = zeta1::clean_zeta1_model_output(&response_text); - - (request_id, output_text, None) - } else { - let prompt = format_zeta_prompt(&prompt_input, zeta_version); - let prefill = get_prefill(&prompt_input, zeta_version); - let prompt = format!("{prompt}{prefill}"); - - let (response_text, request_id) = send_custom_server_request( - provider, - custom_settings, - prompt, - max_tokens, - vec![], - &http_client, - ) - .await?; - - let request_id = EditPredictionId(request_id.into()); - let output_text = if response_text.is_empty() { - None - } else { - let output = format!("{prefill}{response_text}"); - Some(clean_zeta2_model_output(&output, zeta_version).to_string()) - }; - - (request_id, output_text, None) + match custom_settings.prompt_format { + EditPredictionPromptFormat::Zeta => { + let ranges = &prompt_input.excerpt_ranges; + let prompt = zeta1::format_zeta1_from_input( + &prompt_input, + ranges.editable_350.clone(), + ranges.editable_350_context_150.clone(), + ); + editable_range_in_excerpt = ranges.editable_350.clone(); + let stop_tokens = vec![ + EDITABLE_REGION_END_MARKER.to_string(), + format!("{EDITABLE_REGION_END_MARKER}\n"), + format!("{EDITABLE_REGION_END_MARKER}\n\n"), + format!("{EDITABLE_REGION_END_MARKER}\n\n\n"), + ]; + + let (response_text, request_id) = send_custom_server_request( + provider, + custom_settings, + prompt, + max_tokens, + stop_tokens, + open_ai_compatible_api_key.clone(), + &http_client, + ) + .await?; + + let request_id = EditPredictionId(request_id.into()); + let output_text = zeta1::clean_zeta1_model_output(&response_text); + + (request_id, output_text, None, None) + } + EditPredictionPromptFormat::Zeta2 => { + let prompt = format_zeta_prompt(&prompt_input, zeta_version); + let prefill = get_prefill(&prompt_input, zeta_version); + let prompt = format!("{prompt}{prefill}"); + + editable_range_in_excerpt = zeta_prompt::excerpt_range_for_format( + zeta_version, + &prompt_input.excerpt_ranges, + ) + .0; + + let (response_text, request_id) = send_custom_server_request( + provider, + custom_settings, + prompt, + max_tokens, + vec![], + open_ai_compatible_api_key.clone(), + &http_client, + ) + .await?; + + let request_id = EditPredictionId(request_id.into()); + let output_text = if response_text.is_empty() { + None + } else { + let output = format!("{prefill}{response_text}"); + Some(clean_zeta2_model_output(&output, zeta_version).to_string()) + }; + + (request_id, output_text, None, None) + } + _ => anyhow::bail!("unsupported prompt format"), } } else if let Some(config) = &raw_config { let prompt = format_zeta_prompt(&prompt_input, config.format); let prefill = get_prefill(&prompt_input, config.format); let prompt = format!("{prompt}{prefill}"); + let environment = config + .environment + .clone() + .or_else(|| Some(config.format.to_string().to_lowercase())); let request = RawCompletionRequest { model: config.model_id.clone().unwrap_or_default(), prompt, temperature: None, stop: vec![], max_tokens: Some(2048), - environment: Some(config.format.to_string().to_lowercase()), + environment, }; + editable_range_in_excerpt = zeta_prompt::excerpt_range_for_format( + config.format, + &prompt_input.excerpt_ranges, + ) + .1; + let (mut response, usage) = EditPredictionStore::send_raw_llm_request( request, client, None, llm_token, + organization_id, app_version, ) .await?; @@ -210,13 +236,14 @@ pub fn request_prediction_with_zeta( clean_zeta2_model_output(&output, config.format).to_string() }); - (request_id, output_text, usage) + (request_id, output_text, None, usage) } else { // Use V3 endpoint - server handles model/version selection and suffix stripping let (response, usage) = EditPredictionStore::send_v3_request( prompt_input.clone(), client, llm_token, + organization_id, app_version, trigger, ) @@ -228,7 +255,10 @@ pub fn request_prediction_with_zeta( } else { Some(response.output) }; - (request_id, output_text, usage) + editable_range_in_excerpt = response.editable_range; + let model_version = response.model_version; + + (request_id, output_text, model_version, usage) }; let received_response_at = Instant::now(); @@ -236,9 +266,28 @@ pub fn request_prediction_with_zeta( log::trace!("Got edit prediction response"); let Some(mut output_text) = output_text else { - return Ok((Some((request_id, None)), usage)); + return Ok((Some((request_id, None, model_version)), usage)); }; + let editable_range_in_buffer = editable_range_in_excerpt.start + + full_context_offset_range.start + ..editable_range_in_excerpt.end + full_context_offset_range.start; + + let mut old_text = snapshot + .text_for_range(editable_range_in_buffer.clone()) + .collect::(); + + // For the hashline format, the model may return <|set|>/<|insert|> + // edit commands instead of a full replacement. Apply them against + // the original editable region to produce the full replacement text. + // This must happen before cursor marker stripping because the cursor + // marker is embedded inside edit command content. + if let Some(rewritten_output) = + output_with_context_for_format(zeta_version, &old_text, &output_text)? + { + output_text = rewritten_output; + } + // Client-side cursor marker processing (applies to both raw and v3 responses) let cursor_offset_in_output = output_text.find(CURSOR_MARKER); if let Some(offset) = cursor_offset_in_output { @@ -258,10 +307,6 @@ pub fn request_prediction_with_zeta( .ok(); } - let mut old_text = snapshot - .text_for_range(editable_offset_range.clone()) - .collect::(); - if !output_text.is_empty() && !output_text.ends_with('\n') { output_text.push('\n'); } @@ -272,7 +317,7 @@ pub fn request_prediction_with_zeta( let (edits, cursor_position) = compute_edits_and_cursor_position( old_text, &output_text, - editable_offset_range.start, + editable_range_in_buffer.start, cursor_offset_in_output, &snapshot, ); @@ -287,7 +332,9 @@ pub fn request_prediction_with_zeta( edits, cursor_position, received_response_at, + editable_range_in_buffer, )), + model_version, )), usage, )) @@ -295,7 +342,7 @@ pub fn request_prediction_with_zeta( }); cx.spawn(async move |this, cx| { - let Some((id, prediction)) = + let Some((id, prediction, model_version)) = EditPredictionStore::handle_api_response(&this, request_task.await, cx)? else { return Ok(None); @@ -308,6 +355,7 @@ pub fn request_prediction_with_zeta( edits, cursor_position, received_response_at, + editable_range_in_buffer, )) = prediction else { return Ok(Some(EditPredictionResult { @@ -316,6 +364,47 @@ pub fn request_prediction_with_zeta( })); }; + if can_collect_data { + let weak_this = this.clone(); + let id = id.clone(); + let edited_buffer = edited_buffer.clone(); + let edited_buffer_snapshot = edited_buffer_snapshot.clone(); + let example_task = capture_data.and_then(|stored_events| { + cx.update(|cx| { + crate::capture_example( + project.clone(), + edited_buffer.clone(), + position, + stored_events, + false, + cx, + ) + }) + }); + cx.spawn(async move |cx| { + let example_spec = if let Some(task) = example_task { + task.await.ok() + } else { + None + }; + + weak_this + .update(cx, |this, cx| { + this.enqueue_settled_prediction( + id.clone(), + &project, + &edited_buffer, + &edited_buffer_snapshot, + editable_range_in_buffer, + example_spec, + cx, + ); + }) + .ok(); + }) + .detach(); + } + Ok(Some( EditPredictionResult::new( id, @@ -326,6 +415,7 @@ pub fn request_prediction_with_zeta( buffer_snapshotted_at, received_response_at, inputs, + model_version, cx, ) .await, @@ -339,11 +429,11 @@ pub fn zeta2_prompt_input( events: Vec>, excerpt_path: Arc, cursor_offset: usize, - zeta_format: ZetaFormat, - preferred_model: Option, + preferred_experiment: Option, is_open_source: bool, can_collect_data: bool, -) -> (std::ops::Range, zeta_prompt::ZetaPromptInput) { + repo_url: Option, +) -> (Range, zeta_prompt::ZetaPromptInput) { let cursor_point = cursor_offset.to_point(snapshot); let (full_context, full_context_offset_range, excerpt_ranges) = @@ -358,13 +448,6 @@ pub fn zeta2_prompt_input( let full_context_start_offset = full_context_offset_range.start; let full_context_start_row = full_context.start.row; - let editable_offset_range = match preferred_model { - Some(EditPredictionModelKind::Zeta1) => excerpt_ranges.editable_350.clone(), - _ => zeta_prompt::excerpt_range_for_format(zeta_format, &excerpt_ranges).0, - }; - let absolute_editable_range = full_context_start_offset + editable_offset_range.start - ..full_context_start_offset + editable_offset_range.end; - let cursor_offset_in_excerpt = cursor_offset - full_context_start_offset; let prompt_input = zeta_prompt::ZetaPromptInput { @@ -373,77 +456,17 @@ pub fn zeta2_prompt_input( .text_for_range(full_context) .collect::() .into(), - editable_range_in_excerpt: editable_offset_range, cursor_offset_in_excerpt, excerpt_start_row: Some(full_context_start_row), events, related_files, - excerpt_ranges: Some(excerpt_ranges), - preferred_model, + excerpt_ranges, + experiment: preferred_experiment, in_open_source_repo: is_open_source, can_collect_data, + repo_url, }; - (absolute_editable_range, prompt_input) -} - -pub(crate) async fn send_custom_server_request( - provider: settings::EditPredictionProvider, - settings: &OpenAiCompatibleEditPredictionSettings, - prompt: String, - max_tokens: u32, - stop_tokens: Vec, - http_client: &Arc, -) -> Result<(String, String)> { - match provider { - settings::EditPredictionProvider::Ollama => { - let response = - ollama::make_request(settings.clone(), prompt, stop_tokens, http_client.clone()) - .await?; - Ok((response.response, response.created_at)) - } - _ => { - let request = RawCompletionRequest { - model: settings.model.clone(), - prompt, - max_tokens: Some(max_tokens), - temperature: None, - stop: stop_tokens - .into_iter() - .map(std::borrow::Cow::Owned) - .collect(), - environment: None, - }; - - let request_body = serde_json::to_string(&request)?; - let http_request = http_client::Request::builder() - .method(http_client::Method::POST) - .uri(settings.api_url.as_ref()) - .header("Content-Type", "application/json") - .body(http_client::AsyncBody::from(request_body))?; - - let mut response = http_client.send(http_request).await?; - let status = response.status(); - - if !status.is_success() { - let mut body = String::new(); - response.body_mut().read_to_string(&mut body).await?; - anyhow::bail!("custom server error: {} - {}", status, body); - } - - let mut body = String::new(); - response.body_mut().read_to_string(&mut body).await?; - - let parsed: RawCompletionResponse = - serde_json::from_str(&body).context("Failed to parse completion response")?; - let text = parsed - .choices - .into_iter() - .next() - .map(|choice| choice.text) - .unwrap_or_default(); - Ok((text, parsed.id)) - } - } + (full_context_offset_range, prompt_input) } pub(crate) fn edit_prediction_accepted( @@ -457,9 +480,15 @@ pub(crate) fn edit_prediction_accepted( } let request_id = current_prediction.prediction.id.to_string(); + let model_version = current_prediction.prediction.model_version; let require_auth = custom_accept_url.is_none(); let client = store.client.clone(); let llm_token = store.llm_token.clone(); + let organization_id = store + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); let app_version = AppVersion::global(cx); cx.background_spawn(async move { @@ -476,6 +505,7 @@ pub(crate) fn edit_prediction_accepted( let req = builder.uri(url.as_ref()).body( serde_json::to_string(&AcceptEditPredictionBody { request_id: request_id.clone(), + model_version: model_version.clone(), })? .into(), ); @@ -483,6 +513,7 @@ pub(crate) fn edit_prediction_accepted( }, client, llm_token, + organization_id, app_version, require_auth, ) diff --git a/crates/edit_prediction_cli/evals/.zed/settings.json b/crates/edit_prediction_cli/evals/.zed/settings.json index f1e74a3aee3b9cd6bb41ec3a87a30c7ad016e379..708c4b864dca9145718fb0b9f6e5457ec705c60b 100644 --- a/crates/edit_prediction_cli/evals/.zed/settings.json +++ b/crates/edit_prediction_cli/evals/.zed/settings.json @@ -1,3 +1,4 @@ { "remove_trailing_whitespace_on_save": false, + "soft_wrap": "none", } diff --git a/crates/edit_prediction_cli/evals/vscode--add-async-and-await.md b/crates/edit_prediction_cli/evals/vscode--add-async-and-await.md new file mode 100644 index 0000000000000000000000000000000000000000..ee070bdb9d7ca98fcf65febac6cbae2e62e530ff --- /dev/null +++ b/crates/edit_prediction_cli/evals/vscode--add-async-and-await.md @@ -0,0 +1,88 @@ ++++ +repository_url = "https://github.com/microsoft/vscode" +revision = "29e6da6efa2287aaa981635a475d425ff4fd5d5c" ++++ + +## Edit History + +```diff +--- a/src/vs/workbench/contrib/debug/browser/debugCommands.ts ++++ b/src/vs/workbench/contrib/debug/browser/debugCommands.ts +@@ -304,8 +304,8 @@ CommandsRegistry.registerCommand({ + + CommandsRegistry.registerCommand({ + id: REVERSE_CONTINUE_ID, +- handler: (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => { +- getThreadAndRun(accessor, context, thread => thread.reverseContinue()); ++ handler: async (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => { ++ await getThreadAndRun(accessor, context, thread => thread.reverseContinue()); + } + }); +--- a/src/vs/workbench/contrib/debug/browser/debugCommands.ts ++++ b/src/vs/workbench/contrib/debug/browser/debugCommands.ts +@@ -311,11 +311,11 @@ CommandsRegistry.registerCommand({ + + CommandsRegistry.registerCommand({ + id: STEP_BACK_ID, +- handler: (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => { ++ handler: async (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => { + const contextKeyService = accessor.get(IContextKeyService); + if (CONTEXT_DISASSEMBLY_VIEW_FOCUS.getValue(contextKeyService)) { +- getThreadAndRun(accessor, context, (thread: IThread) => thread.stepBack('instruction')); ++ await getThreadAndRun(accessor, context, (thread: IThread) => thread.stepBack('instruction')); + } else { +- getThreadAndRun(accessor, context, (thread: IThread) => thread.stepBack()); ++ await getThreadAndRun(accessor, context, (thread: IThread) => thread.stepBack()); + } + } + }); +--- a/src/vs/workbench/contrib/debug/browser/debugCommands.ts ++++ b/src/vs/workbench/contrib/debug/browser/debugCommands.ts +@@ -323,8 +323,8 @@ CommandsRegistry.registerCommand({ + + CommandsRegistry.registerCommand({ + id: TERMINATE_THREAD_ID, +- handler: (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => { +- getThreadAndRun(accessor, context, thread => thread.terminate()); ++ handler: async (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => { ++ await getThreadAndRun(accessor, context, thread => thread.terminate()); + } + }); +``` + +## Cursor Position + +```src/vs/workbench/contrib/debug/browser/debugCommands.ts + weight: KeybindingWeight.WorkbenchContrib, + primary: isWeb ? (KeyMod.Alt | KeyCode.F10) : KeyCode.F10, // Browsers do not allow F10 to be binded so we have to bind an alternative + when: CONTEXT_DEBUG_STATE.isEqualTo('stopped'), + handler: (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => { + // ^[CURSOR_POSITION] + const contextKeyService = accessor.get(IContextKeyService); + if (CONTEXT_DISASSEMBLY_VIEW_FOCUS.getValue(contextKeyService)) { + getThreadAndRun(accessor, context, (thread: IThread) => thread.next('instruction')); + } else { +``` + +## Expected Patch + +```diff +--- a/src/vs/workbench/contrib/debug/browser/debugCommands.ts ++++ b/src/vs/workbench/contrib/debug/browser/debugCommands.ts +@@ -467,10 +467,10 @@ KeybindingsRegistry.registerCommandAndKeybindingRule({ + weight: KeybindingWeight.WorkbenchContrib, + primary: isWeb ? (KeyMod.Alt | KeyCode.F10) : KeyCode.F10, // Browsers do not allow F10 to be binded so we have to bind an alternative + when: CONTEXT_DEBUG_STATE.isEqualTo('stopped'), +- handler: (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => { ++ handler: async (accessor: ServicesAccessor, _: string, context: CallStackContext | unknown) => { + const contextKeyService = accessor.get(IContextKeyService); + if (CONTEXT_DISASSEMBLY_VIEW_FOCUS.getValue(contextKeyService)) { +- getThreadAndRun(accessor, context, (thread: IThread) => thread.next('instruction')); ++ await getThreadAndRun(accessor, context, (thread: IThread) => thread.next('instruction')); + } else { +- getThreadAndRun(accessor, context, (thread: IThread) => thread.next()); ++ await getThreadAndRun(accessor, context, (thread: IThread) => thread.next()); + } + } + }); +``` diff --git a/crates/edit_prediction_cli/evals/vscode--add-class-decorator.md b/crates/edit_prediction_cli/evals/vscode--add-class-decorator.md new file mode 100644 index 0000000000000000000000000000000000000000..1fd1feb90e24ac52b05139b7fb2bffebb6ce84d6 --- /dev/null +++ b/crates/edit_prediction_cli/evals/vscode--add-class-decorator.md @@ -0,0 +1,74 @@ ++++ +repository_url = "https://github.com/microsoft/vscode" +revision = "6f6e26fcdf0a7ca5084e0da284cd7a5b2d41ae4d" ++++ + +## Edit History + +```diff +--- a/src/vs/workbench/api/common/extHostTypes.ts ++++ b/src/vs/workbench/api/common/extHostTypes.ts +@@ -18,6 +18,14 @@ import { FileSystemProviderErrorCode, markAsFileSystemProviderError } from 'vs/ + import type * as vscode from 'vscode'; + ++function es5ClassCompat(target: Function): any { ++ ///@ts-expect-error ++ function _() { return Reflect.construct(target, arguments, this.constructor); } ++ Object.defineProperty(_, 'name', Object.getOwnPropertyDescriptor(target, 'name')!); ++ Object.setPrototypeOf(_, target); ++ Object.setPrototypeOf(_.prototype, target.prototype); ++ return _; ++} ++ ++@es5ClassCompat + export class Disposable { +--- a/src/vs/workbench/api/common/extHostTypes.ts ++++ b/src/vs/workbench/api/common/extHostTypes.ts +@@ -50,6 +58,7 @@ export class Disposable { + } + } + ++@es5ClassCompat + export class Position { + + static Min(...positions: Position[]): Position { +--- a/src/vs/workbench/api/common/extHostTypes.ts ++++ b/src/vs/workbench/api/common/extHostTypes.ts +@@ -220,6 +229,7 @@ export class Position { + } + } + ++@es5ClassCompat + export class Range { + + static isRange(thing: any): thing is vscode.Range { +``` + +## Cursor Position + +```src/vs/workbench/api/common/extHostTypes.ts + Prepend = 3 +} + +export class TextEdit { +// <[CURSOR_POSITION] + + static isTextEdit(thing: any): thing is TextEdit { + if (thing instanceof TextEdit) { + return true; +``` + +## Expected Patch + +```diff +--- a/src/vs/workbench/api/common/extHostTypes.ts ++++ b/src/vs/workbench/api/common/extHostTypes.ts +@@ -475,6 +485,7 @@ export enum EnvironmentVariableMutatorType { + Prepend = 3 + } + ++@es5ClassCompat + export class TextEdit { + + static isTextEdit(thing: any): thing is TextEdit { +``` diff --git a/crates/edit_prediction_cli/evals/vscode--add-interface-method.md b/crates/edit_prediction_cli/evals/vscode--add-interface-method.md new file mode 100644 index 0000000000000000000000000000000000000000..898ebd3bd82bb189baf75527628bb99b7f6345c4 --- /dev/null +++ b/crates/edit_prediction_cli/evals/vscode--add-interface-method.md @@ -0,0 +1,113 @@ ++++ +repository_url = "https://github.com/microsoft/vscode" +revision = "b64eaf598008e2d600a81d846108f72cb37b48e2" ++++ + +## Edit History + +```diff +--- a/src/vs/platform/window/electron-main/window.ts ++++ b/src/vs/platform/window/electron-main/window.ts +@@ -1,49 +1,50 @@ + export interface ICodeWindow extends IDisposable { + + readonly onWillLoad: Event; + readonly onDidSignalReady: Event; ++ readonly onDidTriggerSystemContextMenu: Event<{ x: number; y: number }>; + readonly onDidClose: Event; + readonly onDidDestroy: Event; + + readonly whenClosedOrLoaded: Promise; +--- a/src/vs/platform/windows/electron-main/window.ts ++++ b/src/vs/platform/windows/electron-main/window.ts +@@ -63,60 +63,63 @@ const enum ReadyState { + export class CodeWindow extends Disposable implements ICodeWindow { + + //#region Events + + private readonly _onWillLoad = this._register(new Emitter()); + readonly onWillLoad = this._onWillLoad.event; + + private readonly _onDidSignalReady = this._register(new Emitter()); + readonly onDidSignalReady = this._onDidSignalReady.event; + ++ private readonly _onDidTriggerSystemContextMenu = this._register(new Emitter<{ x: number; y: number }>()); ++ readonly onDidTriggerSystemContextMenu = this._onDidTriggerSystemContextMenu.event; ++ + private readonly _onDidClose = this._register(new Emitter()); + readonly onDidClose = this._onDidClose.event; + + private readonly _onDidDestroy = this._register(new Emitter()); + readonly onDidDestroy = this._onDidDestroy.event; + + //#endregion +--- a/src/vs/platform/windows/electron-main/windows.ts ++++ b/src/vs/platform/windows/electron-main/windows.ts +@@ -1,54 +1,55 @@ + export interface IWindowsMainService { + + readonly _serviceBrand: undefined; + + readonly onDidChangeWindowsCount: Event; + + readonly onDidOpenWindow: Event; + readonly onDidSignalReadyWindow: Event; ++ readonly onDidTriggerSystemContextMenu: Event<{ window: ICodeWindow; x: number; y: number }>; + readonly onDidDestroyWindow: Event; +--- a/src/vs/platform/windows/electron-main/windowsMainService.ts ++++ b/src/vs/platform/windows/electron-main/windowsMainService.ts +@@ -160,60 +160,63 @@ interface ISingleFolderWorkspacePathToOpen extends IPathToOpen { + export class WindowsMainService extends Disposable implements IWindowsMainService { + + declare readonly _serviceBrand: undefined; + + private static readonly WINDOWS: ICodeWindow[] = []; + + private readonly _onDidOpenWindow = this._register(new Emitter()); + readonly onDidOpenWindow = this._onDidOpenWindow.event; + + private readonly _onDidSignalReadyWindow = this._register(new Emitter()); + readonly onDidSignalReadyWindow = this._onDidSignalReadyWindow.event; + + private readonly _onDidDestroyWindow = this._register(new Emitter()); + readonly onDidDestroyWindow = this._onDidDestroyWindow.event; + + private readonly _onDidChangeWindowsCount = this._register(new Emitter()); + readonly onDidChangeWindowsCount = this._onDidChangeWindowsCount.event; + ++ private readonly _onDidTriggerSystemContextMenu = this._register(new Emitter<{ window: ICodeWindow; x: number; y: number }>()); ++ readonly onDidTriggerSystemContextMenu = this._onDidTriggerSystemContextMenu.event; ++ + private readonly windowsStateHandler = this._register(new WindowsStateHandler(this, this.stateMainService, this.lifecycleMainService, this.logService, this.configurationService)); +``` + +## Cursor Position + +```src/vs/platform/windows/test/electron-main/windowsFinder.test.ts + function createTestCodeWindow(options: { lastFocusTime: number; openedFolderUri?: URI; openedWorkspace?: IWorkspaceIdentifier }): ICodeWindow { + return new class implements ICodeWindow { + onWillLoad: Event = Event.None; + onDidSignalReady: Event = Event.None; + // <[CURSOR_POSITION] + onDidClose: Event = Event.None; + onDidDestroy: Event = Event.None; + whenClosedOrLoaded: Promise = Promise.resolve(); + id: number = -1; +``` + +## Expected Patch + +```diff +--- a/src/vs/platform/windows/test/electron-main/windowsFinder.test.ts ++++ b/src/vs/platform/windows/test/electron-main/windowsFinder.test.ts +@@ -7,60 +7,61 @@ import * as assert from 'assert'; + function createTestCodeWindow(options: { lastFocusTime: number; openedFolderUri?: URI; openedWorkspace?: IWorkspaceIdentifier }): ICodeWindow { + return new class implements ICodeWindow { + onWillLoad: Event = Event.None; ++ onDidTriggerSystemContextMenu: Event<{ x: number; y: number }> = Event.None; + onDidSignalReady: Event = Event.None; + onDidClose: Event = Event.None; + onDidDestroy: Event = Event.None; + whenClosedOrLoaded: Promise = Promise.resolve(); + id: number = -1; +``` diff --git a/crates/edit_prediction_cli/evals/vscode--log-object-property.md b/crates/edit_prediction_cli/evals/vscode--log-object-property.md new file mode 100644 index 0000000000000000000000000000000000000000..1c60b84f0107c54ea8bd89084dccbfdf785fb932 --- /dev/null +++ b/crates/edit_prediction_cli/evals/vscode--log-object-property.md @@ -0,0 +1,56 @@ ++++ +repository_url = "https://github.com/microsoft/vscode" +revision = "e28a92fc1fbe9de11eca2f8ad19899334bff8525" ++++ + +This prediction requires the model to see the `IDiffComputationResult` type definition. + +## Edit History + +```diff +--- a/src/vs/editor/browser/widget/diffEditorWidget.ts ++++ b/src/vs/editor/browser/widget/diffEditorWidget.ts +@@ -1117,6 +1117,7 @@ + && currentModifiedModel === this._modifiedEditor.getModel() + ) { + this._setState(editorBrowser.DiffEditorState.DiffComputed); ++ console.log("did quit:") + this._diffComputationResult = result; + this._updateDecorationsRunner.schedule(); + this._onDidUpdateDiff.fire(); +``` + +## Cursor Position + +```src/vs/editor/browser/widget/diffEditorWidget.ts + if (currentToken === this._diffComputationToken + && currentOriginalModel === this._originalEditor.getModel() + && currentModifiedModel === this._modifiedEditor.getModel() + ) { + this._setState(editorBrowser.DiffEditorState.DiffComputed); + console.log("did quit:") + // ^[CURSOR_POSITION] + this._diffComputationResult = result; + this._updateDecorationsRunner.schedule(); + this._onDidUpdateDiff.fire(); + } +``` + +## Expected Patch + +```diff +--- a/src/vs/editor/browser/widget/diffEditorWidget.ts ++++ b/src/vs/editor/browser/widget/diffEditorWidget.ts +@@ -1115,10 +1115,10 @@ + if (currentToken === this._diffComputationToken + && currentOriginalModel === this._originalEditor.getModel() + && currentModifiedModel === this._modifiedEditor.getModel() + ) { + this._setState(editorBrowser.DiffEditorState.DiffComputed); +- console.log("did quit:") ++ console.log("did quit:", result.quitEarly) + this._diffComputationResult = result; + this._updateDecorationsRunner.schedule(); + this._onDidUpdateDiff.fire(); + } +``` diff --git a/crates/edit_prediction_cli/evals/zed--add-eprintln.md b/crates/edit_prediction_cli/evals/zed--add-eprintln.md index d4252810b5f97df0991de3015c19e12138e8a27b..467bfd5151996bc98d00145bfebef62f89c5e37e 100644 --- a/crates/edit_prediction_cli/evals/zed--add-eprintln.md +++ b/crates/edit_prediction_cli/evals/zed--add-eprintln.md @@ -1,43 +1,37 @@ +++ repository_url = "git@github.com:zed-industries/zed" -revision = "780a87dd98f26816876d12e2728933b17faca78d" +revision = "b7090c9fae7390a82021b994994c0f587744d96c" +++ +This example shows the model's preference for making conservative predictions, and ability to place +the cursor within the predicted output. + ## Edit History ```diff --- a/crates/edit_prediction_ui/src/rate_prediction_modal.rs +++ b/crates/edit_prediction_ui/src/rate_prediction_modal.rs -@@ -206,6 +206,7 @@ - self.select_next_edit(&Default::default(), window, cx); - self.confirm(&Default::default(), window, cx); - +@@ -144,7 +144,7 @@ + fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) { + epr - cx.notify(); - } - + let next_index = self + .ep_store + .read(cx) ``` ## Cursor Position ```crates/edit_prediction_ui/src/rate_prediction_modal.rs - let current_completion = self - .active_prediction - .as_ref() - .map(|completion| completion.prediction.clone()); - self.select_completion(current_completion, false, window, cx); - self.select_next_edit(&Default::default(), window, cx); - self.confirm(&Default::default(), window, cx); - + fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) { epr // ^[CURSOR_POSITION] - cx.notify(); - } - - pub fn thumbs_down_active( - &mut self, - _: &ThumbsDownActivePrediction, - window: &mut Window, + let next_index = self + .ep_store + .read(cx) + .shown_predictions() + .skip(self.selected_index) + .enumerate() + .skip(1) // Skip straight to the next item ``` ## Expected Patch @@ -45,12 +39,16 @@ revision = "780a87dd98f26816876d12e2728933b17faca78d" ```diff --- a/crates/edit_prediction_ui/src/rate_prediction_modal.rs +++ b/crates/edit_prediction_ui/src/rate_prediction_modal.rs -@@ -201,16 +201,16 @@ - self.confirm(&Default::default(), window, cx); - +@@ -144,14 +144,14 @@ + fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) { - epr + eprintln!(""); # ^[CURSOR_POSITION] - cx.notify(); - } + let next_index = self + .ep_store + .read(cx) + .shown_predictions() + .skip(self.selected_index) + .enumerate() + .skip(1) // Skip straight to the next item ``` diff --git a/crates/edit_prediction_cli/evals/zed--change-match-arm.md b/crates/edit_prediction_cli/evals/zed--change-match-arm.md new file mode 100644 index 0000000000000000000000000000000000000000..042e2730cc352d9c90739a3fe3ea20438755896b --- /dev/null +++ b/crates/edit_prediction_cli/evals/zed--change-match-arm.md @@ -0,0 +1,68 @@ ++++ +repository_url = "git@github.com:zed-industries/zed" +revision = "be5763632dccb33470ca233c36ccd9e5e790e3b2" ++++ + +This prediction requires the model to see the `project::Event` enum. + +## Edit History + +```diff +--- a/crates/edit_prediction/src/edit_prediction.rs ++++ b/crates/edit_prediction/src/edit_prediction.rs +@@ -1035,7 +1035,7 @@ + project_state.recent_paths.push_front(path); + } + } +- project::Event::DiagnosticsUpdated { .. } => { ++ project::Event::Disk { .. } => { + if cx.has_flag::() { + self.refresh_prediction_from_diagnostics( + project, +``` + +## Cursor Position + +```crates/edit_prediction/src/edit_prediction.rs + { + project_state.recent_paths.remove(ix); + } + project_state.recent_paths.push_front(path); + } + } + project::Event::Disk { .. } => { + // ^[CURSOR_POSITION] + if cx.has_flag::() { + self.refresh_prediction_from_diagnostics( + project, +``` + +## Expected Patch + +```diff +--- a/crates/edit_prediction/src/edit_prediction.rs ++++ b/crates/edit_prediction/src/edit_prediction.rs +@@ -1032,10 +1032,10 @@ + project_state.recent_paths.push_front(path); + } + } +- project::Event::Disk { .. } => { ++ project::Event::DiskBasedDiagnosticsFinished { .. } => { + if cx.has_flag::() { + self.refresh_prediction_from_diagnostics( + project, +``` + +```diff +--- a/crates/edit_prediction/src/edit_prediction.rs ++++ b/crates/edit_prediction/src/edit_prediction.rs +@@ -1032,10 +1032,10 @@ + project_state.recent_paths.push_front(path); + } + } +- project::Event::Disk { .. } => { ++ project::Event::DiskBasedDiagnosticsStarted { .. } => { + if cx.has_flag::() { + self.refresh_prediction_from_diagnostics( + project, +``` diff --git a/crates/edit_prediction_cli/src/anthropic_client.rs b/crates/edit_prediction_cli/src/anthropic_client.rs index 784fa711b0058e3d2884460f6ca6f5300fc44a9a..869635c53a15e5c3f6cdaca7632a3e99f0b0bec1 100644 --- a/crates/edit_prediction_cli/src/anthropic_client.rs +++ b/crates/edit_prediction_cli/src/anthropic_client.rs @@ -50,6 +50,7 @@ impl PlainLlmClient { metadata: None, output_config: None, stop_sequences: Vec::new(), + speed: None, temperature: None, top_k: None, top_p: None, @@ -89,6 +90,7 @@ impl PlainLlmClient { metadata: None, output_config: None, stop_sequences: Vec::new(), + speed: None, temperature: None, top_k: None, top_p: None, @@ -578,6 +580,7 @@ impl BatchingLlmClient { temperature: None, top_k: None, top_p: None, + speed: None, }; let custom_id = format!("req_hash_{}", hash); diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs index d8fd613ee8d6e1323c8ca0521ca67c837e9fb225..f36eaf2799166d6fbd2b7b212003a1a0644b82c4 100644 --- a/crates/edit_prediction_cli/src/format_prompt.rs +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -9,10 +9,11 @@ use anyhow::{Context as _, Result, anyhow}; use edit_prediction::udiff; use gpui::AsyncApp; use similar::DiffableStr; +use std::ops::Range; use std::sync::Arc; -use std::{fmt::Write as _, ops::Range}; use zeta_prompt::{ - ZetaFormat, excerpt_range_for_format, format_zeta_prompt, resolve_cursor_region, + ZetaFormat, encode_patch_as_output_for_format, excerpt_range_for_format, format_zeta_prompt, + output_end_marker_for_format, resolve_cursor_region, }; pub async fn run_format_prompt( @@ -36,12 +37,8 @@ pub async fn run_format_prompt( step_progress.set_substatus("formatting teacher prompt"); let zeta_format = ZetaFormat::default(); - let excerpt_ranges = prompt_inputs - .excerpt_ranges - .as_ref() - .context("prompt_inputs must have excerpt_ranges")?; let (editable_range, context_range) = - excerpt_range_for_format(zeta_format, excerpt_ranges); + excerpt_range_for_format(zeta_format, &prompt_inputs.excerpt_ranges); let prompt = TeacherPrompt::format_prompt(example, editable_range, context_range); example.prompt = Some(ExamplePrompt { @@ -57,18 +54,22 @@ pub async fn run_format_prompt( let prompt = format_zeta_prompt(prompt_inputs, zeta_format); let prefill = zeta_prompt::get_prefill(prompt_inputs, zeta_format); - let (expected_patch, expected_cursor_offset) = example + let expected_output = example .spec .expected_patches_with_cursor_positions() .into_iter() .next() - .context("expected patches is empty")?; - let expected_output = zeta2_output_for_patch( - prompt_inputs, - &expected_patch, - expected_cursor_offset, - zeta_format, - )?; + .and_then(|(expected_patch, expected_cursor_offset)| { + zeta2_output_for_patch( + prompt_inputs, + &expected_patch, + expected_cursor_offset, + zeta_format, + ) + .ok() + }) + .unwrap_or_default(); + let rejected_output = example.spec.rejected_patch.as_ref().and_then(|patch| { zeta2_output_for_patch(prompt_inputs, patch, None, zeta_format).ok() }); @@ -101,6 +102,12 @@ pub fn zeta2_output_for_patch( old_editable_region.push('\n'); } + if let Some(encoded_output) = + encode_patch_as_output_for_format(version, &old_editable_region, patch, cursor_offset)? + { + return Ok(encoded_output); + } + let (mut result, first_hunk_offset) = udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable_region).with_context( || { @@ -120,16 +127,11 @@ pub fn zeta2_output_for_patch( result.insert_str(offset, zeta_prompt::CURSOR_MARKER); } - match version { - ZetaFormat::V0120GitMergeMarkers - | ZetaFormat::V0131GitMergeMarkersPrefix - | ZetaFormat::V0211SeedCoder => { - if !result.ends_with('\n') { - result.push('\n'); - } - result.push_str(zeta_prompt::v0120_git_merge_markers::END_MARKER); + if let Some(end_marker) = output_end_marker_for_format(version) { + if !result.ends_with('\n') { + result.push('\n'); } - _ => (), + result.push_str(end_marker); } Ok(result) @@ -258,7 +260,6 @@ impl TeacherPrompt { pub fn format_context(example: &Example) -> String { let related_files = example.prompt_inputs.as_ref().map(|pi| &pi.related_files); - let Some(related_files) = related_files else { return "(No context)".to_string(); }; @@ -267,27 +268,10 @@ impl TeacherPrompt { return "(No context)".to_string(); } - let mut prompt = String::new(); - for file in related_files { - let path_str = file.path.to_string_lossy(); - writeln!(&mut prompt, "`````{path_str}").ok(); - - let mut prev_row = 0; - for excerpt in &file.excerpts { - if excerpt.row_range.start > prev_row { - prompt.push_str("…\n"); - } - prompt.push_str(&excerpt.text); - prompt.push('\n'); - prev_row = excerpt.row_range.end; - } - if prev_row < file.max_row { - prompt.push_str("…\n"); - } - prompt.push_str("\n`````\n"); - } - - prompt + let prefix = "`````"; + let suffix = "`````\n\n"; + let max_tokens = 1024; + zeta_prompt::format_related_files_within_budget(related_files, &prefix, &suffix, max_tokens) } fn format_cursor_excerpt( diff --git a/crates/edit_prediction_cli/src/git.rs b/crates/edit_prediction_cli/src/git.rs index dea6637d4330b671e4b59c436a933450a762328c..59ce3aba40eb162313035cbfe1c9356488ba23ed 100644 --- a/crates/edit_prediction_cli/src/git.rs +++ b/crates/edit_prediction_cli/src/git.rs @@ -91,7 +91,7 @@ pub async fn ensure_repo_cloned(repo_url: &str) -> Result { } // Always fetch to get latest commits - run_git(&repo_path, &["fetch", "origin"]).await?; + run_git(&repo_path, &["fetch", "--depth", "1000", "origin"]).await?; // Check if we have a valid HEAD, if not checkout FETCH_HEAD let has_head = run_git(&repo_path, &["rev-parse", "HEAD"]).await.is_ok(); diff --git a/crates/edit_prediction_cli/src/load_project.rs b/crates/edit_prediction_cli/src/load_project.rs index 680af6f0168c766c6066a91a8f57fe4573b46403..df458770519be5accd72f33a56893bb13c9b88a9 100644 --- a/crates/edit_prediction_cli/src/load_project.rs +++ b/crates/edit_prediction_cli/src/load_project.rs @@ -93,21 +93,19 @@ pub async fn run_load_project( let cursor_offset_in_excerpt = cursor_offset - full_context_offset_range.start; let excerpt_start_row = Some(full_context_point_range.start.row); - let editable_range_in_excerpt = excerpt_ranges.editable_350.clone(); - ( ZetaPromptInput { cursor_path: example.spec.cursor_path.clone(), cursor_excerpt, - editable_range_in_excerpt, cursor_offset_in_excerpt, excerpt_start_row, events, related_files: existing_related_files, - excerpt_ranges: Some(excerpt_ranges), - preferred_model: None, + excerpt_ranges, in_open_source_repo: false, can_collect_data: false, + experiment: None, + repo_url: None, }, language_name, ) diff --git a/crates/edit_prediction_cli/src/main.rs b/crates/edit_prediction_cli/src/main.rs index 03f94a4dc47388c9a56169f2be0280af33dc6f1d..8bb4b2a8e2f50d448fc314a70e2fc94cfa2c3d71 100644 --- a/crates/edit_prediction_cli/src/main.rs +++ b/crates/edit_prediction_cli/src/main.rs @@ -39,6 +39,7 @@ use zeta_prompt::ZetaFormat; use reqwest_client::ReqwestClient; use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use std::env; use std::fmt::Display; use std::fs::{File, OpenOptions}; use std::hash::{Hash, Hasher}; @@ -54,6 +55,7 @@ use crate::load_project::run_load_project; use crate::paths::{FAILED_EXAMPLES_DIR, RUN_DIR}; use crate::predict::run_prediction; use crate::progress::Progress; +use crate::pull_examples::{fetch_settled_examples_after, parse_settled_after_input}; use crate::retrieve_context::run_context_retrieval; use crate::score::run_scoring; use crate::split_commit::SplitCommitArgs; @@ -131,6 +133,10 @@ Inputs can be file paths or special specifiers: Fetch rejected edit predictions from Snowflake after the given RFC3339 timestamp. These are predictions that were shown to users but rejected (useful for DPO training). + settled-after:{timestamp} + Fetch settled stream examples from Snowflake after the given RFC3339 timestamp. + These are examples from the edit prediction settled stream. + rated-after:{timestamp} Fetch user-rated edit predictions from Snowflake after the given RFC3339 timestamp. These are predictions that users explicitly rated as positive or negative via the @@ -165,6 +171,9 @@ Examples: # Read user-rated predictions ep read rated-after:2025-01-01T00:00:00Z -o rated.jsonl + # Read settled stream examples + ep read settled-after:2025-01-01T00:00:00Z -o settled.jsonl + # Read only positively rated predictions ep read rated-positive-after:2025-01-01T00:00:00Z -o positive.jsonl @@ -294,6 +303,9 @@ struct EvalArgs { /// Path to write summary scores as JSON #[clap(long)] summary_json: Option, + /// Print all individual example lines (default: up to 20) + #[clap(long)] + verbose: bool, } #[derive(Clone, Copy, Default, Debug, PartialEq, Eq, Hash)] @@ -346,6 +358,7 @@ enum PredictionProvider { Mercury, Zeta1, Zeta2(ZetaFormat), + Baseten(ZetaFormat), Teacher(TeacherBackend), TeacherNonBatching(TeacherBackend), Repair, @@ -364,6 +377,7 @@ impl std::fmt::Display for PredictionProvider { PredictionProvider::Mercury => write!(f, "mercury"), PredictionProvider::Zeta1 => write!(f, "zeta1"), PredictionProvider::Zeta2(format) => write!(f, "zeta2:{format}"), + PredictionProvider::Baseten(format) => write!(f, "baseten:{format}"), PredictionProvider::Teacher(backend) => write!(f, "teacher:{backend}"), PredictionProvider::TeacherNonBatching(backend) => { write!(f, "teacher-non-batching:{backend}") @@ -403,6 +417,13 @@ impl std::str::FromStr for PredictionProvider { Ok(PredictionProvider::TeacherNonBatching(backend)) } "repair" => Ok(PredictionProvider::Repair), + "baseten" => { + let format = arg + .map(ZetaFormat::parse) + .transpose()? + .unwrap_or(ZetaFormat::default()); + Ok(PredictionProvider::Baseten(format)) + } _ => { anyhow::bail!( "unknown provider `{provider}`. Valid options: sweep, mercury, zeta1, zeta2, zeta2:, teacher, teacher:, teacher-non-batching, repair\n\ @@ -631,6 +652,7 @@ async fn load_examples( let mut captured_after_timestamps = Vec::new(); let mut rejected_after_timestamps = Vec::new(); let mut requested_after_timestamps = Vec::new(); + let mut settled_after_timestamps = Vec::new(); let mut rated_after_inputs: Vec<(String, Option)> = Vec::new(); let mut file_inputs = Vec::new(); @@ -647,6 +669,8 @@ async fn load_examples( pull_examples::parse_requested_after_input(input_string.as_ref()) { requested_after_timestamps.push(timestamp.to_string()); + } else if let Some(timestamp) = parse_settled_after_input(input_string.as_ref()) { + settled_after_timestamps.push(timestamp.to_string()); } else if let Some((timestamp, rating_filter)) = pull_examples::parse_rated_after_input(input_string.as_ref()) { @@ -714,6 +738,21 @@ async fn load_examples( examples.append(&mut requested_examples); } + if !settled_after_timestamps.is_empty() { + settled_after_timestamps.sort(); + + let mut settled_examples = fetch_settled_examples_after( + http_client.clone(), + &settled_after_timestamps, + max_rows_per_timestamp, + remaining_offset, + background_executor.clone(), + Some(MIN_CAPTURE_VERSION), + ) + .await?; + examples.append(&mut settled_examples); + } + if !rated_after_inputs.is_empty() { rated_after_inputs.sort(); @@ -897,8 +936,18 @@ fn main() { } Command::Synthesize(synth_args) => { - let Some(output_dir) = args.output else { - panic!("output dir is required"); + let output_dir = if let Some(output_dir) = args.output { + output_dir + } else { + let default_output_dir = env::current_dir() + .unwrap() + .join("crates/edit_prediction_cli/evals-generated"); + if default_output_dir.parent().unwrap().exists() { + std::fs::create_dir(&default_output_dir).ok(); + default_output_dir + } else { + panic!("output dir is required"); + } }; let config = SynthesizeConfig { repo_urls: synth_args.repos.clone(), @@ -1238,7 +1287,7 @@ fn main() { match &command { Command::Eval(args) => { let examples = finished_examples.lock().unwrap(); - score::print_report(&examples); + score::print_report(&examples, args.verbose); if let Some(summary_path) = &args.summary_json { score::write_summary_json(&examples, summary_path)?; } diff --git a/crates/edit_prediction_cli/src/metrics.rs b/crates/edit_prediction_cli/src/metrics.rs index fc870c36c9c62f4d74486ddd4b2d35176b00bb5c..1bfd8e542fa3d74b55f091d2ac13aa22883f6a2f 100644 --- a/crates/edit_prediction_cli/src/metrics.rs +++ b/crates/edit_prediction_cli/src/metrics.rs @@ -76,14 +76,21 @@ impl ClassificationMetrics { } enum ChrfWhitespace { + /// Preserve whitespace as-is #[allow(unused)] Unchanged, + + /// Ignore all whitespace differences + #[allow(unused)] Ignore, + + /// Collapse whitespace into single spaces + Collapse, } const CHR_F_CHAR_ORDER: usize = 6; const CHR_F_BETA: f64 = 2.0; -const CHR_F_WHITESPACE: ChrfWhitespace = ChrfWhitespace::Ignore; +const CHR_F_WHITESPACE: ChrfWhitespace = ChrfWhitespace::Collapse; /// Computes a delta-chrF score that compares two sets of edits. /// @@ -196,9 +203,34 @@ fn filter_whitespace_chars(text: &str) -> Vec { match CHR_F_WHITESPACE { ChrfWhitespace::Unchanged => text.chars().collect(), ChrfWhitespace::Ignore => text.chars().filter(|c| !c.is_whitespace()).collect(), + ChrfWhitespace::Collapse => collapse_whitespace(text.chars()), } } +/// Collapse whitespace into single spaces. +/// Newlines and spaces are collapsed separately. +fn collapse_whitespace(chars: impl Iterator) -> Vec { + let mut result = Vec::new(); + let mut last_whitespace = None; + for c in chars { + if c.is_whitespace() && c != '\n' { + if last_whitespace != Some(' ') { + result.push(' '); + last_whitespace = Some(' '); + } + } else if c == '\n' { + if last_whitespace != Some('\n') { + result.push(c); + last_whitespace = Some('\n'); + } + } else { + result.push(c); + last_whitespace = None; + } + } + result +} + /// Extract only the changed regions between two texts, with context for n-gram boundaries. /// /// Returns (original_affected_region, modified_affected_region) as Vec. @@ -269,15 +301,15 @@ fn count_ngrams_from_chars(chars: &[char], n: usize) -> Counts { #[allow(dead_code)] fn chr_f_ngram_counts(text: &str) -> Vec { - // Ignore whitespace. The original chrF implementation skips all - // whitespace. We should consider compressing multiple consecutive - // spaces into one -- this may reflect our task more closely. let text = match CHR_F_WHITESPACE { ChrfWhitespace::Unchanged => text.to_string(), ChrfWhitespace::Ignore => text .chars() .filter(|c| !c.is_whitespace()) .collect::(), + ChrfWhitespace::Collapse => collapse_whitespace(text.chars()) + .into_iter() + .collect::(), }; (1..=CHR_F_CHAR_ORDER) @@ -1175,4 +1207,14 @@ index abc123..def456 100644 assert!(counts.deleted_tokens >= 2); assert!(counts.inserted_tokens >= 2); } + + #[test] + fn test_whitespace_collapse() { + let text = "abc \n\n\n 123"; + let collapsed = collapse_whitespace(text.chars()); + assert_eq!( + collapsed, + vec!['a', 'b', 'c', ' ', '\n', ' ', '1', '2', '3'] + ); + } } diff --git a/crates/edit_prediction_cli/src/parse_output.rs b/crates/edit_prediction_cli/src/parse_output.rs index 4b8af44785c1781de772f569c012ee64eee48aad..2c066b8b32b3eaab54ad6e3b3bcb0796ff27f950 100644 --- a/crates/edit_prediction_cli/src/parse_output.rs +++ b/crates/edit_prediction_cli/src/parse_output.rs @@ -6,7 +6,11 @@ use crate::{ }; use anyhow::{Context as _, Result}; use edit_prediction::example_spec::encode_cursor_in_patch; -use zeta_prompt::{CURSOR_MARKER, ZetaFormat}; +use zeta_prompt::{ + CURSOR_MARKER, ZetaFormat, clean_extracted_region_for_format, + current_region_markers_for_format, output_end_marker_for_format, + output_with_context_for_format, +}; pub fn run_parse_output(example: &mut Example) -> Result<()> { example @@ -51,22 +55,7 @@ pub fn parse_prediction_output( } fn extract_zeta2_current_region(prompt: &str, format: ZetaFormat) -> Result { - let (current_marker, end_marker) = match format { - ZetaFormat::V0112MiddleAtEnd => ("<|fim_middle|>current\n", "<|fim_middle|>updated"), - ZetaFormat::V0113Ordered | ZetaFormat::V0114180EditableRegion => { - ("<|fim_middle|>current\n", "<|fim_suffix|>") - } - ZetaFormat::V0120GitMergeMarkers - | ZetaFormat::V0131GitMergeMarkersPrefix - | ZetaFormat::V0211Prefill => ( - zeta_prompt::v0120_git_merge_markers::START_MARKER, - zeta_prompt::v0120_git_merge_markers::SEPARATOR, - ), - ZetaFormat::V0211SeedCoder => ( - zeta_prompt::seed_coder::START_MARKER, - zeta_prompt::seed_coder::SEPARATOR, - ), - }; + let (current_marker, end_marker) = current_region_markers_for_format(format); let start = prompt.find(current_marker).with_context(|| { format!( @@ -82,8 +71,7 @@ fn extract_zeta2_current_region(prompt: &str, format: ZetaFormat) -> Result { - zeta_prompt::v0131_git_merge_markers_prefix::END_MARKER - } - ZetaFormat::V0120GitMergeMarkers => zeta_prompt::v0120_git_merge_markers::END_MARKER, - ZetaFormat::V0112MiddleAtEnd - | ZetaFormat::V0113Ordered - | ZetaFormat::V0114180EditableRegion => "", - ZetaFormat::V0211SeedCoder => zeta_prompt::seed_coder::END_MARKER, - }; - if !suffix.is_empty() { + if let Some(marker) = output_end_marker_for_format(format) { new_text = new_text - .strip_suffix(suffix) + .strip_suffix(marker) .unwrap_or(&new_text) .to_string(); } diff --git a/crates/edit_prediction_cli/src/predict.rs b/crates/edit_prediction_cli/src/predict.rs index e02fcbdb425a62fb478b8be36fdd034eede27622..94e28d00da2d61f63b59364304c3b9b4276e15f7 100644 --- a/crates/edit_prediction_cli/src/predict.rs +++ b/crates/edit_prediction_cli/src/predict.rs @@ -6,14 +6,18 @@ use crate::{ headless::EpAppState, load_project::run_load_project, openai_client::OpenAiClient, + parse_output::parse_prediction_output, paths::{LATEST_EXAMPLE_RUN_DIR, RUN_DIR}, - progress::{ExampleProgress, InfoStyle, Step}, + progress::{ExampleProgress, InfoStyle, Step, StepProgress}, retrieve_context::run_context_retrieval, }; use anyhow::Context as _; +use cloud_llm_client::predict_edits_v3::{RawCompletionRequest, RawCompletionResponse}; use edit_prediction::{DebugEvent, EditPredictionStore, Zeta2RawConfig}; -use futures::{FutureExt as _, StreamExt as _, future::Shared}; +use futures::{AsyncReadExt as _, FutureExt as _, StreamExt as _, future::Shared}; use gpui::{AppContext as _, AsyncApp, Task}; +use http_client::{AsyncBody, HttpClient, Method}; +use reqwest_client::ReqwestClient; use std::{ fs, sync::{ @@ -79,6 +83,22 @@ pub async fn run_prediction( .await; } + if let PredictionProvider::Baseten(format) = provider { + run_format_prompt( + example, + &FormatPromptArgs { + provider: PredictionProvider::Zeta2(format), + }, + app_state.clone(), + example_progress, + cx, + ) + .await?; + + let step_progress = example_progress.start(Step::Predict); + return predict_baseten(example, format, &step_progress).await; + } + run_load_project(example, app_state.clone(), example_progress, cx.clone()).await?; run_context_retrieval(example, app_state.clone(), example_progress, cx.clone()).await?; @@ -110,13 +130,14 @@ pub async fn run_prediction( ep_store.update(&mut cx, |store, _cx| { let model = match provider { - PredictionProvider::Zeta1 => edit_prediction::EditPredictionModel::Zeta1, - PredictionProvider::Zeta2(_) => edit_prediction::EditPredictionModel::Zeta2, + PredictionProvider::Zeta1 => edit_prediction::EditPredictionModel::Zeta, + PredictionProvider::Zeta2(_) => edit_prediction::EditPredictionModel::Zeta, PredictionProvider::Sweep => edit_prediction::EditPredictionModel::Sweep, PredictionProvider::Mercury => edit_prediction::EditPredictionModel::Mercury, PredictionProvider::Teacher(..) | PredictionProvider::TeacherNonBatching(..) - | PredictionProvider::Repair => { + | PredictionProvider::Repair + | PredictionProvider::Baseten(_) => { unreachable!() } }; @@ -127,7 +148,12 @@ pub async fn run_prediction( if let PredictionProvider::Zeta2(format) = provider { if format != ZetaFormat::default() { let model_id = std::env::var("ZED_ZETA_MODEL").ok(); - store.set_zeta2_raw_config(Zeta2RawConfig { model_id, format }); + let environment = std::env::var("ZED_ZETA_ENVIRONMENT").ok(); + store.set_zeta2_raw_config(Zeta2RawConfig { + model_id, + environment, + format, + }); } } }); @@ -364,7 +390,7 @@ async fn predict_anthropic( .await? else { // Request stashed for batched processing - return Ok(()); + continue; }; let actual_output = response @@ -438,7 +464,7 @@ async fn predict_openai( .await? else { // Request stashed for batched processing - return Ok(()); + continue; }; let actual_output = response @@ -480,6 +506,89 @@ async fn predict_openai( Ok(()) } +pub async fn predict_baseten( + example: &mut Example, + format: ZetaFormat, + step_progress: &StepProgress, +) -> anyhow::Result<()> { + let model_id = + std::env::var("ZED_ZETA_MODEL").context("ZED_ZETA_MODEL environment variable required")?; + + let api_key = + std::env::var("BASETEN_API_KEY").context("BASETEN_API_KEY environment variable not set")?; + + let prompt = example.prompt.as_ref().context("Prompt is required")?; + let prompt_text = prompt.input.clone(); + let prefill = prompt.prefill.clone().unwrap_or_default(); + + step_progress.set_substatus("running prediction via baseten"); + + let environment: String = <&'static str>::from(&format).to_lowercase(); + let url = format!( + "https://model-{model_id}.api.baseten.co/environments/{environment}/sync/v1/completions" + ); + + let request_body = RawCompletionRequest { + model: model_id, + prompt: prompt_text.clone(), + max_tokens: Some(2048), + temperature: Some(0.), + stop: vec![], + environment: None, + }; + + let body_bytes = + serde_json::to_vec(&request_body).context("Failed to serialize request body")?; + + let http_client: Arc = Arc::new(ReqwestClient::new()); + let request = http_client::Request::builder() + .method(Method::POST) + .uri(&url) + .header("Content-Type", "application/json") + .header("Authorization", format!("Api-Key {api_key}")) + .body(AsyncBody::from(body_bytes))?; + + let mut response = http_client.send(request).await?; + let status = response.status(); + + let mut body = String::new(); + response + .body_mut() + .read_to_string(&mut body) + .await + .context("Failed to read Baseten response body")?; + + if !status.is_success() { + anyhow::bail!("Baseten API returned {status}: {body}"); + } + + let completion: RawCompletionResponse = + serde_json::from_str(&body).context("Failed to parse Baseten response")?; + + let actual_output = completion + .choices + .into_iter() + .next() + .map(|choice| choice.text) + .unwrap_or_default(); + + let actual_output = format!("{prefill}{actual_output}"); + + let (actual_patch, actual_cursor) = + parse_prediction_output(example, &actual_output, PredictionProvider::Zeta2(format))?; + + let prediction = ExamplePrediction { + actual_patch: Some(actual_patch), + actual_output, + actual_cursor, + error: None, + provider: PredictionProvider::Baseten(format), + }; + + example.predictions.push(prediction); + Ok(()) +} + pub async fn sync_batches(provider: Option<&PredictionProvider>) -> anyhow::Result<()> { match provider { Some(PredictionProvider::Teacher(backend)) => match backend { diff --git a/crates/edit_prediction_cli/src/pull_examples.rs b/crates/edit_prediction_cli/src/pull_examples.rs index cacfc9bb679acdcb3c709736c6e4b5e79af861e8..cccd351dcdeda0dbf059d851a44b02bc1e558654 100644 --- a/crates/edit_prediction_cli/src/pull_examples.rs +++ b/crates/edit_prediction_cli/src/pull_examples.rs @@ -5,24 +5,25 @@ use http_client::{AsyncBody, HttpClient, Method, Request}; use indoc::indoc; use serde::Deserialize; use serde_json::{Value as JsonValue, json}; +use std::fmt::Write as _; use std::io::Read; use std::sync::Arc; use std::time::Duration; use telemetry_events::EditPredictionRating; -use zeta_prompt::ZetaPromptInput; +use zeta_prompt::{ZetaFormat, ZetaPromptInput, excerpt_range_for_format}; use crate::example::Example; use crate::progress::{InfoStyle, Progress, Step}; const EDIT_PREDICTION_DEPLOYMENT_EVENT: &str = "Edit Prediction Deployment"; use edit_prediction::example_spec::{ExampleSpec, TelemetrySource}; -use std::fmt::Write as _; pub(crate) const SNOWFLAKE_SUCCESS_CODE: &str = "090001"; pub(crate) const SNOWFLAKE_ASYNC_IN_PROGRESS_CODE: &str = "333334"; const PREDICTIVE_EDIT_REQUESTED_EVENT: &str = "Predictive Edit Requested"; const PREDICTIVE_EDIT_REJECTED_EVENT: &str = "Predictive Edit Rejected"; const EDIT_PREDICTION_RATED_EVENT: &str = "Edit Prediction Rated"; +const EDIT_PREDICTION_SETTLED_EVENT: &str = "Edit Prediction Settled"; /// Minimum Zed version for filtering captured examples. /// For example, `MinCaptureVersion { minor: 224, patch: 1 }` means only pull examples @@ -33,7 +34,8 @@ pub struct MinCaptureVersion { pub patch: u32, } -const DEFAULT_STATEMENT_TIMEOUT_SECONDS: u64 = 120; +const DEFAULT_STATEMENT_TIMEOUT_SECONDS: u64 = 240; +const SETTLED_STATEMENT_TIMEOUT_SECONDS: u64 = 240; pub(crate) const POLL_INTERVAL: Duration = Duration::from_secs(2); pub(crate) const MAX_POLL_ATTEMPTS: usize = 120; @@ -52,6 +54,11 @@ pub fn parse_requested_after_input(input: &str) -> Option<&str> { input.strip_prefix("requested-after:") } +/// Parse an input token of the form `settled-after:{timestamp}`. +pub fn parse_settled_after_input(input: &str) -> Option<&str> { + input.strip_prefix("settled-after:") +} + /// Parse an input token of the form `rated-after:{timestamp}`, `rated-positive-after:{timestamp}`, /// or `rated-negative-after:{timestamp}`. /// Returns `(timestamp, Option)` where `None` means all ratings. @@ -145,6 +152,103 @@ async fn run_sql_with_polling( Ok(response) } +struct SnowflakeConfig { + token: String, + base_url: String, + role: Option, +} + +async fn fetch_examples_with_query( + http_client: Arc, + step_progress: &crate::progress::StepProgress, + background_executor: BackgroundExecutor, + statement: &str, + bindings: JsonValue, + timeout_seconds: u64, + required_columns: &[&str], + parse_response: for<'a> fn( + &'a SnowflakeStatementResponse, + &'a std::collections::HashMap, + ) -> Result + 'a>>, +) -> Result> { + let snowflake = SnowflakeConfig { + token: std::env::var("EP_SNOWFLAKE_API_KEY") + .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?, + base_url: std::env::var("EP_SNOWFLAKE_BASE_URL").context( + "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://.snowflakecomputing.com)", + )?, + role: std::env::var("EP_SNOWFLAKE_ROLE").ok(), + }; + let request = json!({ + "statement": statement, + "timeout": timeout_seconds, + "database": "EVENTS", + "schema": "PUBLIC", + "warehouse": "DBT", + "role": snowflake.role.as_deref(), + "bindings": bindings + }); + + let response = run_sql_with_polling( + http_client.clone(), + &snowflake.base_url, + &snowflake.token, + &request, + step_progress, + background_executor, + ) + .await?; + + let total_rows = response + .result_set_meta_data + .as_ref() + .and_then(|meta| meta.num_rows) + .unwrap_or(response.data.len() as i64); + let partition_count = response + .result_set_meta_data + .as_ref() + .map(|meta| meta.partition_info.len()) + .unwrap_or(1) + .max(1); + + step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal); + step_progress.set_substatus("parsing"); + + let column_indices = get_column_indices(&response.result_set_meta_data, required_columns); + + let mut parsed_examples = Vec::with_capacity(total_rows as usize); + parsed_examples.extend(parse_response(&response, &column_indices)?); + + if partition_count > 1 { + let statement_handle = response + .statement_handle + .as_ref() + .context("response has multiple partitions but no statementHandle")?; + + for partition in 1..partition_count { + step_progress.set_substatus(format!( + "fetching partition {}/{}", + partition + 1, + partition_count + )); + + let partition_response = fetch_partition( + http_client.clone(), + &snowflake.base_url, + &snowflake.token, + statement_handle, + partition, + ) + .await?; + + parsed_examples.extend(parse_response(&partition_response, &column_indices)?); + } + } + + step_progress.set_substatus("done"); + Ok(parsed_examples) +} + pub(crate) async fn fetch_partition( http_client: Arc, base_url: &str, @@ -298,13 +402,6 @@ pub async fn fetch_rejected_examples_after( let progress = Progress::global(); - let token = std::env::var("EP_SNOWFLAKE_API_KEY") - .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?; - let base_url = std::env::var("EP_SNOWFLAKE_BASE_URL").context( - "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://.snowflakecomputing.com)", - )?; - let role = std::env::var("EP_SNOWFLAKE_ROLE").ok(); - let mut all_examples = Vec::new(); for after_date in after_timestamps.iter() { @@ -312,10 +409,11 @@ pub async fn fetch_rejected_examples_after( let step_progress = progress.start(Step::PullExamples, &step_progress_name); step_progress.set_substatus("querying"); - // Join rejected events with their corresponding request events to get the full context. - // We filter for V3 sampling data which contains the structured input we need. - // We also filter for predictions that were actually shown to the user (was_shown = true) - // to focus on explicit user rejections rather than implicit cancellations. + let min_minor_str = min_capture_version.map(|version| version.minor.to_string()); + let min_patch_str = min_capture_version.map(|version| version.patch.to_string()); + let min_minor_str_ref = min_minor_str.as_deref(); + let min_patch_str_ref = min_patch_str.as_deref(); + let statement = indoc! {r#" SELECT req.event_properties:request_id::string AS request_id, @@ -348,58 +446,25 @@ pub async fn fetch_rejected_examples_after( OFFSET ? "#}; - let min_minor_str = min_capture_version.map(|v| v.minor.to_string()); - let min_patch_str = min_capture_version.map(|v| v.patch.to_string()); - let min_minor_str_ref = min_minor_str.as_deref(); - let min_patch_str_ref = min_patch_str.as_deref(); - let request = json!({ - "statement": statement, - "timeout": DEFAULT_STATEMENT_TIMEOUT_SECONDS, - "database": "EVENTS", - "schema": "PUBLIC", - "warehouse": "DBT", - "role": role, - "bindings": { - "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT }, - "2": { "type": "TEXT", "value": PREDICTIVE_EDIT_REJECTED_EVENT }, - "3": { "type": "TEXT", "value": after_date }, - "4": { "type": "FIXED", "value": min_minor_str_ref }, - "5": { "type": "FIXED", "value": min_minor_str_ref }, - "6": { "type": "FIXED", "value": min_minor_str_ref }, - "7": { "type": "FIXED", "value": min_patch_str_ref }, - "8": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, - "9": { "type": "FIXED", "value": offset.to_string() } - } + let bindings = json!({ + "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT }, + "2": { "type": "TEXT", "value": PREDICTIVE_EDIT_REJECTED_EVENT }, + "3": { "type": "TEXT", "value": after_date }, + "4": { "type": "FIXED", "value": min_minor_str_ref }, + "5": { "type": "FIXED", "value": min_minor_str_ref }, + "6": { "type": "FIXED", "value": min_minor_str_ref }, + "7": { "type": "FIXED", "value": min_patch_str_ref }, + "8": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, + "9": { "type": "FIXED", "value": offset.to_string() } }); - let response = run_sql_with_polling( + let examples = fetch_examples_with_query( http_client.clone(), - &base_url, - &token, - &request, &step_progress, background_executor.clone(), - ) - .await?; - - let total_rows = response - .result_set_meta_data - .as_ref() - .and_then(|m| m.num_rows) - .unwrap_or(response.data.len() as i64); - - let num_partitions = response - .result_set_meta_data - .as_ref() - .map(|m| m.partition_info.len()) - .unwrap_or(1) - .max(1); - - step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal); - step_progress.set_substatus("parsing"); - - let column_indices = get_column_indices( - &response.result_set_meta_data, + statement, + bindings, + DEFAULT_STATEMENT_TIMEOUT_SECONDS, &[ "request_id", "device_id", @@ -411,40 +476,11 @@ pub async fn fetch_rejected_examples_after( "reason", "zed_version", ], - ); - - all_examples.extend(rejected_examples_from_response(&response, &column_indices)?); - - if num_partitions > 1 { - let statement_handle = response - .statement_handle - .as_ref() - .context("response has multiple partitions but no statementHandle")?; - - for partition in 1..num_partitions { - step_progress.set_substatus(format!( - "fetching partition {}/{}", - partition + 1, - num_partitions - )); - - let partition_response = fetch_partition( - http_client.clone(), - &base_url, - &token, - statement_handle, - partition, - ) - .await?; - - all_examples.extend(rejected_examples_from_response( - &partition_response, - &column_indices, - )?); - } - } + rejected_examples_from_response, + ) + .await?; - step_progress.set_substatus("done"); + all_examples.extend(examples); } Ok(all_examples) @@ -464,13 +500,6 @@ pub async fn fetch_requested_examples_after( let progress = Progress::global(); - let token = std::env::var("EP_SNOWFLAKE_API_KEY") - .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?; - let base_url = std::env::var("EP_SNOWFLAKE_BASE_URL").context( - "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://.snowflakecomputing.com)", - )?; - let role = std::env::var("EP_SNOWFLAKE_ROLE").ok(); - let mut all_examples = Vec::new(); for after_date in after_timestamps.iter() { @@ -478,6 +507,11 @@ pub async fn fetch_requested_examples_after( let step_progress = progress.start(Step::PullExamples, &step_progress_name); step_progress.set_substatus("querying"); + let min_minor_str = min_capture_version.map(|version| version.minor.to_string()); + let min_patch_str = min_capture_version.map(|version| version.patch.to_string()); + let min_minor_str_ref = min_minor_str.as_deref(); + let min_patch_str_ref = min_patch_str.as_deref(); + let statement = indoc! {r#" SELECT req.event_properties:request_id::string AS request_id, @@ -502,95 +536,123 @@ pub async fn fetch_requested_examples_after( OFFSET ? "#}; - let min_minor_str = min_capture_version.map(|v| v.minor.to_string()); - let min_patch_str = min_capture_version.map(|v| v.patch.to_string()); - let min_minor_str_ref = min_minor_str.as_deref(); - let min_patch_str_ref = min_patch_str.as_deref(); - let request = json!({ - "statement": statement, - "timeout": DEFAULT_STATEMENT_TIMEOUT_SECONDS, - "database": "EVENTS", - "schema": "PUBLIC", - "warehouse": "DBT", - "role": role, - "bindings": { - "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT }, - "2": { "type": "TEXT", "value": after_date }, - "3": { "type": "FIXED", "value": min_minor_str_ref }, - "4": { "type": "FIXED", "value": min_minor_str_ref }, - "5": { "type": "FIXED", "value": min_minor_str_ref }, - "6": { "type": "FIXED", "value": min_patch_str_ref }, - "7": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, - "8": { "type": "FIXED", "value": offset.to_string() } - } + let bindings = json!({ + "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT }, + "2": { "type": "TEXT", "value": after_date }, + "3": { "type": "FIXED", "value": min_minor_str_ref }, + "4": { "type": "FIXED", "value": min_minor_str_ref }, + "5": { "type": "FIXED", "value": min_minor_str_ref }, + "6": { "type": "FIXED", "value": min_patch_str_ref }, + "7": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, + "8": { "type": "FIXED", "value": offset.to_string() } }); - let response = run_sql_with_polling( + let examples = fetch_examples_with_query( http_client.clone(), - &base_url, - &token, - &request, &step_progress, background_executor.clone(), + statement, + bindings, + DEFAULT_STATEMENT_TIMEOUT_SECONDS, + &["request_id", "device_id", "time", "input", "zed_version"], + requested_examples_from_response, ) .await?; - let total_rows = response - .result_set_meta_data - .as_ref() - .and_then(|m| m.num_rows) - .unwrap_or(response.data.len() as i64); + all_examples.extend(examples); + } - let num_partitions = response - .result_set_meta_data - .as_ref() - .map(|m| m.partition_info.len()) - .unwrap_or(1) - .max(1); + Ok(all_examples) +} - step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal); - step_progress.set_substatus("parsing"); +pub async fn fetch_settled_examples_after( + http_client: Arc, + after_timestamps: &[String], + max_rows_per_timestamp: usize, + offset: usize, + background_executor: BackgroundExecutor, + min_capture_version: Option, +) -> Result> { + if after_timestamps.is_empty() { + return Ok(Vec::new()); + } - let column_indices = get_column_indices( - &response.result_set_meta_data, - &["request_id", "device_id", "time", "input", "zed_version"], - ); + let progress = Progress::global(); - all_examples.extend(requested_examples_from_response( - &response, - &column_indices, - )?); + let mut all_examples = Vec::new(); - if num_partitions > 1 { - let statement_handle = response - .statement_handle - .as_ref() - .context("response has multiple partitions but no statementHandle")?; - - for partition in 1..num_partitions { - step_progress.set_substatus(format!( - "fetching partition {}/{}", - partition + 1, - num_partitions - )); - - let partition_response = fetch_partition( - http_client.clone(), - &base_url, - &token, - statement_handle, - partition, - ) - .await?; - - all_examples.extend(requested_examples_from_response( - &partition_response, - &column_indices, - )?); - } - } + for after_date in after_timestamps.iter() { + let step_progress_name = format!("settled>{after_date}"); + let step_progress = progress.start(Step::PullExamples, &step_progress_name); + step_progress.set_substatus("querying"); + + let _ = min_capture_version; + + let statement = indoc! {r#" + WITH requested AS ( + SELECT + req.event_properties:request_id::string AS request_id, + req.device_id::string AS device_id, + req.time AS req_time, + req.time::string AS time, + req.event_properties:input AS input, + req.event_properties:format::string AS requested_format, + req.event_properties:output::string AS requested_output, + req.event_properties:zed_version::string AS zed_version + FROM events req + WHERE req.event_type = ? + AND req.event_properties:version = 'V3' + AND req.event_properties:input:can_collect_data = true + AND req.time > TRY_TO_TIMESTAMP_NTZ(?) + ) + SELECT + req.request_id AS request_id, + req.device_id AS device_id, + req.time AS time, + req.input AS input, + req.requested_output AS requested_output, + settled.event_properties:settled_editable_region::string AS settled_editable_region, + req.requested_format AS requested_format, + req.zed_version AS zed_version + FROM requested req + INNER JOIN events settled + ON req.request_id = settled.event_properties:request_id::string + WHERE settled.event_type = ? + ORDER BY req.req_time ASC + LIMIT ? + OFFSET ? + "#}; + + let bindings = json!({ + "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT }, + "2": { "type": "TEXT", "value": after_date }, + "3": { "type": "TEXT", "value": EDIT_PREDICTION_SETTLED_EVENT }, + "4": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, + "5": { "type": "FIXED", "value": offset.to_string() } + }); + + let examples = fetch_examples_with_query( + http_client.clone(), + &step_progress, + background_executor.clone(), + statement, + bindings, + SETTLED_STATEMENT_TIMEOUT_SECONDS, + &[ + "request_id", + "device_id", + "time", + "input", + "requested_output", + "settled_editable_region", + "requested_format", + "zed_version", + ], + settled_examples_from_response, + ) + .await?; - step_progress.set_substatus("done"); + all_examples.extend(examples); } Ok(all_examples) @@ -610,13 +672,6 @@ pub async fn fetch_rated_examples_after( let progress = Progress::global(); - let token = std::env::var("EP_SNOWFLAKE_API_KEY") - .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?; - let base_url = std::env::var("EP_SNOWFLAKE_BASE_URL").context( - "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://.snowflakecomputing.com)", - )?; - let role = std::env::var("EP_SNOWFLAKE_ROLE").ok(); - let mut all_examples = Vec::new(); for (after_date, rating_filter) in inputs.iter() { @@ -629,7 +684,7 @@ pub async fn fetch_rated_examples_after( let step_progress = progress.start(Step::PullExamples, &step_progress_name); step_progress.set_substatus("querying"); - let rating_value = rating_filter.as_ref().map(|r| match r { + let rating_value = rating_filter.as_ref().map(|rating| match rating { EditPredictionRating::Positive => "Positive", EditPredictionRating::Negative => "Negative", }); @@ -660,7 +715,7 @@ pub async fn fetch_rated_examples_after( AND rated.event_properties:inputs IS NOT NULL AND rated.event_properties:inputs:cursor_excerpt IS NOT NULL AND rated.event_properties:output IS NOT NULL - AND rated.event_properties:can_collect_data = true + AND rated.event_properties:inputs:can_collect_data = true ORDER BY rated.time ASC LIMIT ? OFFSET ? @@ -677,44 +732,13 @@ pub async fn fetch_rated_examples_after( "8": { "type": "FIXED", "value": offset.to_string() } }); - let request = json!({ - "statement": statement, - "timeout": DEFAULT_STATEMENT_TIMEOUT_SECONDS, - "database": "EVENTS", - "schema": "PUBLIC", - "warehouse": "DBT", - "role": role, - "bindings": bindings - }); - - let response = run_sql_with_polling( + let examples = fetch_examples_with_query( http_client.clone(), - &base_url, - &token, - &request, &step_progress, background_executor.clone(), - ) - .await?; - - let total_rows = response - .result_set_meta_data - .as_ref() - .and_then(|m| m.num_rows) - .unwrap_or(response.data.len() as i64); - - let num_partitions = response - .result_set_meta_data - .as_ref() - .map(|m| m.partition_info.len()) - .unwrap_or(1) - .max(1); - - step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal); - step_progress.set_substatus("parsing"); - - let column_indices = get_column_indices( - &response.result_set_meta_data, + statement, + bindings, + DEFAULT_STATEMENT_TIMEOUT_SECONDS, &[ "request_id", "inputs", @@ -727,40 +751,11 @@ pub async fn fetch_rated_examples_after( "environment", "zed_version", ], - ); - - all_examples.extend(rated_examples_from_response(&response, &column_indices)?); - - if num_partitions > 1 { - let statement_handle = response - .statement_handle - .as_ref() - .context("response has multiple partitions but no statementHandle")?; - - for partition in 1..num_partitions { - step_progress.set_substatus(format!( - "fetching partition {}/{}", - partition + 1, - num_partitions - )); - - let partition_response = fetch_partition( - http_client.clone(), - &base_url, - &token, - statement_handle, - partition, - ) - .await?; - - all_examples.extend(rated_examples_from_response( - &partition_response, - &column_indices, - )?); - } - } + rated_examples_from_response, + ) + .await?; - step_progress.set_substatus("done"); + all_examples.extend(examples); } Ok(all_examples) @@ -769,7 +764,7 @@ pub async fn fetch_rated_examples_after( fn rated_examples_from_response<'a>( response: &'a SnowflakeStatementResponse, column_indices: &'a std::collections::HashMap, -) -> Result + 'a> { +) -> Result + 'a>> { if let Some(code) = &response.code { if code != SNOWFLAKE_SUCCESS_CODE { anyhow::bail!( @@ -828,11 +823,11 @@ fn rated_examples_from_response<'a>( let environment = get_string("environment"); let zed_version = get_string("zed_version"); - match (inputs, output.clone(), rating.clone(), device_id.clone(), time.clone()) { - (Some(inputs), Some(output), Some(rating), Some(device_id), Some(time)) => { + match (inputs, output.clone(), rating.clone(), time.clone()) { + (Some(inputs), Some(output), Some(rating), Some(time)) => { Some(build_rated_example( request_id, - device_id, + device_id.unwrap_or_default(), time, inputs, output, @@ -845,11 +840,10 @@ fn rated_examples_from_response<'a>( } _ => { log::warn!( - "skipping row {row_index}: missing fields - inputs={:?} output={:?} rating={:?} device_id={:?} time={:?}", + "skipping row {row_index}: missing fields - inputs={:?} output={:?} rating={:?} time={:?}", inputs_json.is_some(), output.is_some(), rating.is_some(), - device_id.is_some(), time.is_some(), ); None @@ -857,7 +851,7 @@ fn rated_examples_from_response<'a>( } }); - Ok(iter) + Ok(Box::new(iter)) } fn build_rated_example( @@ -917,7 +911,7 @@ fn build_rated_example( fn requested_examples_from_response<'a>( response: &'a SnowflakeStatementResponse, column_indices: &'a std::collections::HashMap, -) -> Result + 'a> { +) -> Result + 'a>> { if let Some(code) = &response.code { if code != SNOWFLAKE_SUCCESS_CODE { anyhow::bail!( @@ -986,13 +980,190 @@ fn requested_examples_from_response<'a>( } }); - Ok(iter) + Ok(Box::new(iter)) +} + +fn settled_examples_from_response<'a>( + response: &'a SnowflakeStatementResponse, + column_indices: &'a std::collections::HashMap, +) -> Result + 'a>> { + if let Some(code) = &response.code { + if code != SNOWFLAKE_SUCCESS_CODE { + anyhow::bail!( + "snowflake sql api returned error code={code} message={}", + response.message.as_deref().unwrap_or("") + ); + } + } + + let iter = response + .data + .iter() + .enumerate() + .filter_map(move |(row_index, data_row)| { + let get_value = |name: &str| -> Option { + let index = column_indices.get(name).copied()?; + let value = data_row.get(index)?; + if value.is_null() { + None + } else { + Some(value.clone()) + } + }; + + let get_string = |name: &str| -> Option { + match get_value(name)? { + JsonValue::String(s) => Some(s), + other => Some(other.to_string()), + } + }; + + let parse_json_value = |_: &str, raw: Option<&JsonValue>| -> Option { + let value = raw?; + match value { + JsonValue::String(s) => serde_json::from_str::(s).ok(), + other => Some(other.clone()), + } + }; + + let request_id_str = get_string("request_id"); + let device_id = get_string("device_id"); + let time = get_string("time"); + let input_raw = get_value("input"); + let input_json = parse_json_value("input", input_raw.as_ref()); + let input: Option = input_json + .as_ref() + .and_then(|parsed| serde_json::from_value(parsed.clone()).ok()); + let requested_output = get_string("requested_output"); + let settled_editable_region = get_string("settled_editable_region"); + let requested_format = + get_string("requested_format").and_then(|s| ZetaFormat::parse(&s).ok()); + let zed_version = get_string("zed_version"); + + match ( + request_id_str.clone(), + device_id.clone(), + time.clone(), + input.clone(), + requested_output.clone(), + settled_editable_region.clone(), + requested_format, + ) { + ( + Some(request_id), + Some(device_id), + Some(time), + Some(input), + Some(requested_output), + Some(settled_editable_region), + Some(requested_format), + ) => Some(build_settled_example( + request_id, + device_id, + time, + input, + requested_output, + settled_editable_region, + requested_format, + zed_version, + )), + _ => { + let mut missing_fields = Vec::new(); + + if request_id_str.is_none() { + missing_fields.push("request_id"); + } + if device_id.is_none() { + missing_fields.push("device_id"); + } + if time.is_none() { + missing_fields.push("time"); + } + if input_raw.is_none() || input_json.is_none() || input.is_none() { + missing_fields.push("input"); + } + if requested_output.is_none() { + missing_fields.push("requested_output"); + } + if settled_editable_region.is_none() { + missing_fields.push("settled_editable_region"); + } + if requested_format.is_none() { + missing_fields.push("requested_format"); + } + + log::warn!( + "skipping settled row {row_index}: [{}]", + missing_fields.join(", "), + ); + None + } + } + }); + + Ok(Box::new(iter)) +} + +fn build_settled_example( + request_id: String, + device_id: String, + time: String, + input: ZetaPromptInput, + requested_output: String, + settled_editable_region: String, + requested_format: ZetaFormat, + zed_version: Option, +) -> Example { + let requested_editable_range = + excerpt_range_for_format(requested_format, &input.excerpt_ranges).0; + + let base_cursor_excerpt = input.cursor_excerpt.to_string(); + + let requested_range_is_valid = requested_editable_range.start <= requested_editable_range.end + && requested_editable_range.end <= base_cursor_excerpt.len(); + let mut example = build_example_from_snowflake( + request_id.clone(), + device_id, + time, + input, + vec!["settled".to_string()], + None, + zed_version, + ); + + if !requested_range_is_valid { + log::warn!( + "skipping malformed requested range for request {}: requested={:?} (base_len={})", + request_id, + requested_editable_range, + base_cursor_excerpt.len(), + ); + return example; + } + + let settled_replacement = settled_editable_region.as_str(); + let rejected_patch = build_output_patch( + &example.spec.cursor_path, + &base_cursor_excerpt, + &requested_editable_range, + &requested_output, + ); + let expected_patch = build_output_patch( + &example.spec.cursor_path, + &base_cursor_excerpt, + &requested_editable_range, + settled_replacement, + ); + + example.spec.expected_patches = vec![expected_patch]; + example.spec.rejected_patch = Some(rejected_patch); + example } fn rejected_examples_from_response<'a>( response: &'a SnowflakeStatementResponse, column_indices: &'a std::collections::HashMap, -) -> Result + 'a> { +) -> Result + 'a>> { if let Some(code) = &response.code { if code != SNOWFLAKE_SUCCESS_CODE { anyhow::bail!( @@ -1077,7 +1248,7 @@ fn rejected_examples_from_response<'a>( } }); - Ok(iter) + Ok(Box::new(iter)) } fn build_rejected_example( @@ -1093,7 +1264,7 @@ fn build_rejected_example( let rejected_patch = build_output_patch( &input.cursor_path, input.cursor_excerpt.as_ref(), - &input.editable_range_in_excerpt, + &input.excerpt_ranges.editable_350, &output, ); let mut example = build_example_from_snowflake( diff --git a/crates/edit_prediction_cli/src/retrieve_context.rs b/crates/edit_prediction_cli/src/retrieve_context.rs index 18ee3c1b0ec1456b02bb145c98e669b777048385..a5fb00b39a67a15a7afcced897b4d109f1f3406f 100644 --- a/crates/edit_prediction_cli/src/retrieve_context.rs +++ b/crates/edit_prediction_cli/src/retrieve_context.rs @@ -85,46 +85,79 @@ async fn wait_for_language_servers_to_start( ) -> anyhow::Result<()> { let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); - let (language_server_ids, mut starting_language_server_ids) = - buffer.update(cx, |buffer, cx| { - lsp_store.update(cx, |lsp_store, cx| { - let ids = lsp_store.language_servers_for_local_buffer(buffer, cx); - let starting_ids = ids - .iter() - .copied() - .filter(|id| !lsp_store.language_server_statuses.contains_key(&id)) - .collect::>(); - (ids, starting_ids) - }) + // Determine which servers exist for this buffer, and which are still starting. + let mut servers_pending_start = HashSet::default(); + let mut servers_pending_diagnostics = HashSet::default(); + buffer.update(cx, |buffer, cx| { + lsp_store.update(cx, |lsp_store, cx| { + let ids = lsp_store.language_servers_for_local_buffer(buffer, cx); + for &id in &ids { + match lsp_store.language_server_statuses.get(&id) { + None => { + servers_pending_start.insert(id); + servers_pending_diagnostics.insert(id); + } + Some(status) if status.has_pending_diagnostic_updates => { + servers_pending_diagnostics.insert(id); + } + Some(_) => {} + } + } }); + }); - step_progress.set_substatus(format!("waiting for {} LSPs", language_server_ids.len())); + step_progress.set_substatus(format!( + "waiting for {} LSPs", + servers_pending_diagnostics.len() + )); - let timeout_duration = if starting_language_server_ids.is_empty() { + let timeout_duration = if servers_pending_start.is_empty() { Duration::from_secs(30) } else { Duration::from_secs(60 * 5) }; - let timeout = cx.background_executor().timer(timeout_duration).shared(); - let (mut tx, mut rx) = mpsc::channel(language_server_ids.len()); - let added_subscription = cx.subscribe(project, { + let (mut started_tx, mut started_rx) = mpsc::channel(servers_pending_start.len().max(1)); + let (mut diag_tx, mut diag_rx) = mpsc::channel(servers_pending_diagnostics.len().max(1)); + let subscriptions = [cx.subscribe(&lsp_store, { let step_progress = step_progress.clone(); - move |_, event, _| match event { - project::Event::LanguageServerAdded(language_server_id, name, _) => { + move |lsp_store, event, cx| match event { + project::LspStoreEvent::LanguageServerAdded(id, name, _) => { step_progress.set_substatus(format!("LSP started: {}", name)); - tx.try_send(*language_server_id).ok(); + started_tx.try_send(*id).ok(); + } + project::LspStoreEvent::DiskBasedDiagnosticsFinished { language_server_id } => { + let name = lsp_store + .read(cx) + .language_server_adapter_for_id(*language_server_id) + .unwrap() + .name(); + step_progress.set_substatus(format!("LSP idle: {}", name)); + diag_tx.try_send(*language_server_id).ok(); + } + project::LspStoreEvent::LanguageServerUpdate { + message: + client::proto::update_language_server::Variant::WorkProgress( + client::proto::LspWorkProgress { + message: Some(message), + .. + }, + ), + .. + } => { + step_progress.set_substatus(message.clone()); } _ => {} } - }); + })]; - while !starting_language_server_ids.is_empty() { + // Phase 1: wait for all servers to start. + while !servers_pending_start.is_empty() { futures::select! { - language_server_id = rx.next() => { - if let Some(id) = language_server_id { - starting_language_server_ids.remove(&id); + id = started_rx.next() => { + if let Some(id) = id { + servers_pending_start.remove(&id); } }, _ = timeout.clone().fuse() => { @@ -133,67 +166,17 @@ async fn wait_for_language_servers_to_start( } } - drop(added_subscription); - - let (mut tx, mut rx) = mpsc::channel(language_server_ids.len()); - let subscriptions = [ - cx.subscribe(&lsp_store, { - let step_progress = step_progress.clone(); - move |_, event, _| { - if let project::LspStoreEvent::LanguageServerUpdate { - message: - client::proto::update_language_server::Variant::WorkProgress( - client::proto::LspWorkProgress { - message: Some(message), - .. - }, - ), - .. - } = event - { - step_progress.set_substatus(message.clone()); - } - } - }), - cx.subscribe(project, { - let step_progress = step_progress.clone(); - let lsp_store = lsp_store.clone(); - move |_, event, cx| match event { - project::Event::DiskBasedDiagnosticsFinished { language_server_id } => { - let lsp_store = lsp_store.read(cx); - let name = lsp_store - .language_server_adapter_for_id(*language_server_id) - .unwrap() - .name(); - step_progress.set_substatus(format!("LSP idle: {}", name)); - tx.try_send(*language_server_id).ok(); - } - _ => {} - } - }), - ]; - + // Save the buffer so the server sees the current content and kicks off diagnostics. project .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) .await?; - let mut pending_language_server_ids = lsp_store.read_with(cx, |lsp_store, _| { - language_server_ids - .iter() - .copied() - .filter(|id| { - lsp_store - .language_server_statuses - .get(id) - .is_some_and(|status| status.has_pending_diagnostic_updates) - }) - .collect::>() - }); - while !pending_language_server_ids.is_empty() { + // Phase 2: wait for all servers to finish their diagnostic pass. + while !servers_pending_diagnostics.is_empty() { futures::select! { - language_server_id = rx.next() => { - if let Some(id) = language_server_id { - pending_language_server_ids.remove(&id); + id = diag_rx.next() => { + if let Some(id) = id { + servers_pending_diagnostics.remove(&id); } }, _ = timeout.clone().fuse() => { diff --git a/crates/edit_prediction_cli/src/reversal_tracking.rs b/crates/edit_prediction_cli/src/reversal_tracking.rs index cc558939e9aecf826afce77d6205b0ff49ab87bc..cb955dbdf7dd2375395e8c0ecd52df849e33fb38 100644 --- a/crates/edit_prediction_cli/src/reversal_tracking.rs +++ b/crates/edit_prediction_cli/src/reversal_tracking.rs @@ -655,6 +655,7 @@ mod tests { use super::*; use edit_prediction::udiff::apply_diff_to_string; use indoc::indoc; + use zeta_prompt::ExcerptRanges; fn make_test_prompt_inputs( content: &str, @@ -664,15 +665,23 @@ mod tests { ZetaPromptInput { cursor_path: Arc::from(Path::new("src/test.rs")), cursor_excerpt: content.into(), - editable_range_in_excerpt: 0..content.len(), cursor_offset_in_excerpt: 0, excerpt_start_row, events, related_files: Vec::new(), - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: ExcerptRanges { + editable_150: 0..content.len(), + editable_180: 0..content.len(), + editable_350: 0..content.len(), + editable_150_context_350: 0..content.len(), + editable_180_context_350: 0..content.len(), + editable_350_context_150: 0..content.len(), + ..Default::default() + }, + experiment: None, in_open_source_repo: false, can_collect_data: false, + repo_url: None, } } diff --git a/crates/edit_prediction_cli/src/score.rs b/crates/edit_prediction_cli/src/score.rs index 8436dc4a4b26206eb41bafd5b9de8645cb0abb5e..b6f745114f6dd2a091b95b724ee53869a04a8c4e 100644 --- a/crates/edit_prediction_cli/src/score.rs +++ b/crates/edit_prediction_cli/src/score.rs @@ -217,7 +217,8 @@ fn compute_cursor_metrics( } } -pub fn print_report(examples: &[Example]) { +pub fn print_report(examples: &[Example], verbose: bool) { + const MAX_EXAMPLES_DEFAULT: usize = 20; use crate::metrics::ClassificationMetrics; const LINE_WIDTH: usize = 101; @@ -250,6 +251,9 @@ pub fn print_report(examples: &[Example]) { let mut patch_deleted_tokens: Vec = Vec::new(); let mut predictions_with_patch: usize = 0; + let mut printed_lines: usize = 0; + let mut skipped_lines: usize = 0; + for example in examples { for (score_idx, score) in example.score.iter().enumerate() { let exact_lines = ClassificationMetrics { @@ -284,18 +288,23 @@ pub fn print_report(examples: &[Example]) { (None, _) => "-".to_string(), }; - println!( - "{:<40} {:>8.2} {:>5} {:>6.1}% {:>6.1}% {:>7} {:>7} {:>6} {:>5}", - truncate_name(&example.spec.name, 40), - score.delta_chr_f, - score.braces_disbalance, - exact_lines.f1() * 100.0, - score.reversal_ratio * 100.0, - qa_reverts_str, - qa_conf_str, - cursor_str, - wrong_er_str - ); + if verbose || printed_lines < MAX_EXAMPLES_DEFAULT { + println!( + "{:<40} {:>8.2} {:>5} {:>6.1}% {:>6.1}% {:>7} {:>7} {:>6} {:>5}", + truncate_name(&example.spec.name, 40), + score.delta_chr_f, + score.braces_disbalance, + exact_lines.f1() * 100.0, + score.reversal_ratio * 100.0, + qa_reverts_str, + qa_conf_str, + cursor_str, + wrong_er_str + ); + printed_lines += 1; + } else { + skipped_lines += 1; + } all_delta_chr_f_scores.push(score.delta_chr_f); all_reversal_ratios.push(score.reversal_ratio); @@ -358,6 +367,13 @@ pub fn print_report(examples: &[Example]) { } } + if skipped_lines > 0 { + println!( + "{:<40} (use --verbose to see all {} examples)", + format!("... and {} more", skipped_lines), + printed_lines + skipped_lines + ); + } println!("{}", separator); if !all_delta_chr_f_scores.is_empty() { diff --git a/crates/edit_prediction_cli/src/split_dataset.rs b/crates/edit_prediction_cli/src/split_dataset.rs index b34d7c14c6646442359459ef8d4450dae0b9c40e..f1e0a672695cb940f3c368f71fec3b16a64524a1 100644 --- a/crates/edit_prediction_cli/src/split_dataset.rs +++ b/crates/edit_prediction_cli/src/split_dataset.rs @@ -1,29 +1,34 @@ //! `ep split` implementation. //! //! This command splits a JSONL dataset into multiple files based on size specifications, -//! with stratification by repository URL (if the field is present). +//! with optional stratification by a JSON field. //! //! # Usage //! //! ```text -//! ep split [input.jsonl] = = ... +//! ep split [--stratify=] [input.jsonl] = = ... //! ``` //! //! If `input.jsonl` is not provided or is `-`, reads from stdin. //! //! # Size specifications //! -//! - `80%` - percentage of total (repositories if stratified, examples otherwise) -//! - `100` - absolute count of repositories (if stratified) or examples +//! - `80%` - percentage of total examples (lines) +//! - `100` - approximate absolute count of examples (lines) //! - `rest` - all remaining items (only one split can use this) //! //! # Stratification //! -//! When examples have a `repository_url` field, the split is stratified by repository. -//! This ensures each output file contains examples from non-overlapping repositories. -//! Size specifications apply to the number of repositories, not individual examples. +//! The `--stratify` flag controls how examples are grouped before splitting: //! -//! Examples without `repository_url` are distributed proportionally across all outputs. +//! - `cursor-path` (default): group by the `cursor_path` JSON field +//! - `repo`: group by the `repository_url` JSON field +//! - `none`: no grouping, split individual examples +//! +//! When stratifying, the split ensures each output file contains examples from +//! non-overlapping groups. Size specifications always apply to the number of +//! examples (lines), with whole groups assigned greedily to meet the target. +//! Examples missing the stratification field are treated as individual groups. use anyhow::{Context as _, Result, bail}; use clap::Args; @@ -38,23 +43,27 @@ use std::path::{Path, PathBuf}; /// `ep split` CLI args. #[derive(Debug, Args, Clone)] #[command( - about = "Split a JSONL dataset into multiple files (stratified by repository_url if present)", + about = "Split a JSONL dataset into multiple files with optional stratification", after_help = r#"SIZE SPECIFICATIONS: % Percentage of total (e.g., 80%) Absolute number (e.g., 100) rest All remaining items (only one output can use this) - When stratifying by repository_url, sizes apply to repositories, not examples. + Sizes always apply to examples (lines). When stratifying, whole groups + are assigned greedily to approximate the target count. EXAMPLES: - # Split 80% train, 20% validation + # Split 80% train, 20% validation (default: stratify by cursor_path) ep split input.jsonl train.jsonl=80% valid.jsonl=rest # Split into train/valid/test ep split input.jsonl train.jsonl=80% valid.jsonl=10% test.jsonl=rest - # Use absolute counts (100 repos to train, rest to valid) - ep split input.jsonl train.jsonl=100 valid.jsonl=rest + # Stratify by repository_url instead of cursor_path + ep split --stratify=repo input.jsonl train.jsonl=80% valid.jsonl=rest + + # No stratification (split by individual examples) + ep split --stratify=none input.jsonl train.jsonl=80% valid.jsonl=rest # Read from stdin cat input.jsonl | ep split train.jsonl=80% valid.jsonl=rest @@ -62,14 +71,15 @@ EXAMPLES: # Reproducible split with seed ep split --seed 42 input.jsonl train.jsonl=80% valid.jsonl=rest - # Disable stratification (split by examples, not repositories) - ep split --no-stratify input.jsonl train.jsonl=80% valid.jsonl=rest - STRATIFICATION: - When examples have a "repository_url" field, the split ensures each output - file contains examples from non-overlapping repositories. This prevents - data leakage between train/test splits. Use --no-stratify to disable this - behavior and split by individual examples instead. + Controls how examples are grouped before splitting: + cursor-path Group by "cursor_path" field (default) + repo Group by "repository_url" field + none No grouping, split individual examples + + When stratifying, the split ensures each output file contains examples + from non-overlapping groups. This prevents data leakage between + train/test splits. "# )] pub struct SplitArgs { @@ -77,9 +87,19 @@ pub struct SplitArgs { #[arg(long)] pub seed: Option, - /// Disable stratification by repository_url (split by examples instead) - #[arg(long)] - pub no_stratify: bool, + /// Stratification field for splitting the dataset + #[arg(long, default_value = "cursor-path")] + pub stratify: Stratify, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, clap::ValueEnum, strum::Display)] +pub enum Stratify { + #[strum(serialize = "cursor_path")] + CursorPath, + #[strum(serialize = "repo")] + Repo, + #[strum(serialize = "none")] + None, } #[derive(Debug, Clone)] @@ -142,29 +162,6 @@ fn read_lines_from_input(input: Option<&Path>) -> Result> { Ok(lines) } -fn get_repository_url(line: &str) -> Option { - let value: Value = serde_json::from_str(line).ok()?; - value - .get("repository_url") - .and_then(|v| v.as_str()) - .map(|s| s.to_string()) -} - -fn group_lines_by_repo(lines: Vec) -> (HashMap>, Vec) { - let mut by_repo: HashMap> = HashMap::new(); - let mut without_repo: Vec = Vec::new(); - - for line in lines { - if let Some(repo_url) = get_repository_url(&line) { - by_repo.entry(repo_url).or_default().push(line); - } else { - without_repo.push(line); - } - } - - (by_repo, without_repo) -} - fn compute_split_counts(specs: &[SplitSpec], total: usize) -> Result> { let mut counts = vec![0usize; specs.len()]; let mut remaining = total; @@ -261,26 +258,20 @@ pub fn run_split(args: &SplitArgs, inputs: &[PathBuf]) -> Result<()> { return Ok(()); } - let (by_repo, without_repo) = group_lines_by_repo(lines); - let has_repos = !by_repo.is_empty() && !args.no_stratify; + let mut grouped_lines = group_lines(&lines, args.stratify); - if args.no_stratify && !by_repo.is_empty() { + if args.stratify != Stratify::None { eprintln!( - "Stratification disabled (--no-stratify), splitting {} examples by line", + "Stratifying by {} ({} unique groups, {} examples)", + args.stratify, + grouped_lines.len(), total_lines ); - } else if has_repos { + } else { eprintln!( - "Stratifying by repository_url ({} unique repositories, {} examples)", - by_repo.len(), - total_lines - without_repo.len() + "No stratification, splitting {} examples by line", + total_lines ); - if !without_repo.is_empty() { - eprintln!( - " + {} examples without repository_url (distributed proportionally)", - without_repo.len() - ); - } } let mut rng = match args.seed { @@ -288,53 +279,31 @@ pub fn run_split(args: &SplitArgs, inputs: &[PathBuf]) -> Result<()> { None => rand::rngs::StdRng::from_os_rng(), }; - let mut split_outputs: Vec> = vec![Vec::new(); specs.len()]; - - if has_repos { - let mut repos: Vec = by_repo.keys().cloned().collect(); - repos.shuffle(&mut rng); + grouped_lines.shuffle(&mut rng); - let repo_counts = compute_split_counts(&specs, repos.len())?; + let line_targets = compute_split_counts(&specs, total_lines)?; + let rest_index = specs.iter().position(|s| matches!(s.size, SplitSize::Rest)); + let mut split_outputs: Vec> = vec![Vec::new(); specs.len()]; + let mut group_iter = grouped_lines.into_iter(); - let mut repo_iter = repos.into_iter(); - for (split_idx, &count) in repo_counts.iter().enumerate() { - for _ in 0..count { - if let Some(repo) = repo_iter.next() { - if let Some(repo_lines) = by_repo.get(&repo) { - split_outputs[split_idx].extend(repo_lines.iter().cloned()); - } - } - } + for (split_idx, &target) in line_targets.iter().enumerate() { + if Some(split_idx) == rest_index { + continue; } - - if !without_repo.is_empty() { - let no_repo_counts = compute_split_counts(&specs, without_repo.len())?; - let mut no_repo_shuffled = without_repo; - no_repo_shuffled.shuffle(&mut rng); - - let mut line_iter = no_repo_shuffled.into_iter(); - for (split_idx, &count) in no_repo_counts.iter().enumerate() { - for _ in 0..count { - if let Some(line) = line_iter.next() { - split_outputs[split_idx].push(line); - } - } + let mut accumulated = 0; + while accumulated < target { + if let Some(group) = group_iter.next() { + accumulated += group.len(); + split_outputs[split_idx].extend(group); + } else { + break; } } - } else { - let line_counts = compute_split_counts(&specs, total_lines)?; - let mut all_lines: Vec = by_repo.into_values().flatten().collect(); - all_lines.extend(without_repo); - all_lines.shuffle(&mut rng); - - let mut line_iter = all_lines.into_iter(); + } - for (split_idx, &count) in line_counts.iter().enumerate() { - for _ in 0..count { - if let Some(line) = line_iter.next() { - split_outputs[split_idx].push(line); - } - } + if let Some(idx) = rest_index { + for group in group_iter { + split_outputs[idx].extend(group); } } @@ -346,6 +315,39 @@ pub fn run_split(args: &SplitArgs, inputs: &[PathBuf]) -> Result<()> { Ok(()) } +/// Groups lines by the specified stratification field. +/// +/// When `stratify` is `None`, each line becomes its own group. +/// When a line is missing the stratification field, it is also placed in its own group. +fn group_lines(lines: &[String], stratify: Stratify) -> Vec> { + if stratify == Stratify::None { + return lines.iter().map(|line| vec![line.clone()]).collect(); + } + + let field = match stratify { + Stratify::Repo => "repository_url", + Stratify::CursorPath => "cursor_path", + Stratify::None => unreachable!(), + }; + + let mut groups: HashMap> = HashMap::new(); + let mut ungrouped: Vec> = Vec::new(); + + for line in lines { + let key = serde_json::from_str::(line) + .ok() + .and_then(|v| v.get(field)?.as_str().map(|s| s.to_string())); + match key { + Some(key) => groups.entry(key).or_default().push(line.clone()), + None => ungrouped.push(vec![line.clone()]), + } + } + + let mut result: Vec> = groups.into_values().collect(); + result.extend(ungrouped); + result +} + #[cfg(test)] mod tests { use super::*; @@ -389,15 +391,11 @@ mod tests { } #[test] - fn test_get_repository_url() { - let line = r#"{"repository_url": "https://github.com/example/repo", "data": 123}"#; - assert_eq!( - get_repository_url(line), - Some("https://github.com/example/repo".to_string()) - ); - - let line_no_repo = r#"{"data": 123}"#; - assert_eq!(get_repository_url(line_no_repo), None); + fn test_group_lines_none() { + let lines = vec!["a".to_string(), "b".to_string(), "c".to_string()]; + let groups = group_lines(&lines, Stratify::None); + assert_eq!(groups.len(), 3); + assert!(groups.iter().all(|g| g.len() == 1)); } #[test] @@ -457,12 +455,30 @@ mod tests { r#"{"id": 4}"#.to_string(), ]; - let (by_repo, without_repo) = group_lines_by_repo(lines); + let groups = group_lines(&lines, Stratify::Repo); + + let grouped_count: usize = groups.iter().filter(|g| g.len() > 1).count(); + let ungrouped_count: usize = groups.iter().filter(|g| g.len() == 1).count(); + let total_lines: usize = groups.iter().map(|g| g.len()).sum(); - assert_eq!(by_repo.len(), 2); - assert_eq!(by_repo.get("repo1").unwrap().len(), 2); - assert_eq!(by_repo.get("repo2").unwrap().len(), 1); - assert_eq!(without_repo.len(), 1); + assert_eq!(grouped_count, 1); // repo1 has 2 lines + assert_eq!(ungrouped_count, 2); // repo2 (1 line) + line without repo + assert_eq!(total_lines, 4); + } + + #[test] + fn test_group_lines_by_cursor_path() { + let lines = vec![ + r#"{"cursor_path": "src/main.rs", "id": 1}"#.to_string(), + r#"{"cursor_path": "src/main.rs", "id": 2}"#.to_string(), + r#"{"cursor_path": "src/lib.rs", "id": 3}"#.to_string(), + ]; + + let groups = group_lines(&lines, Stratify::CursorPath); + + let total_lines: usize = groups.iter().map(|g| g.len()).sum(); + assert_eq!(groups.len(), 2); + assert_eq!(total_lines, 3); } #[test] @@ -484,7 +500,7 @@ mod tests { let args = SplitArgs { seed: Some(42), - no_stratify: false, + stratify: Stratify::Repo, }; let inputs = vec![ input.path().to_path_buf(), @@ -502,14 +518,18 @@ mod tests { assert_eq!(train_lines.len() + valid_lines.len(), 8); - let train_repos: std::collections::HashSet<_> = train_lines - .iter() - .filter_map(|l| get_repository_url(l)) - .collect(); - let valid_repos: std::collections::HashSet<_> = valid_lines - .iter() - .filter_map(|l| get_repository_url(l)) - .collect(); + let get_repo = |line: &str| -> Option { + let value: Value = serde_json::from_str(line).ok()?; + value + .get("repository_url") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()) + }; + + let train_repos: std::collections::HashSet<_> = + train_lines.iter().filter_map(|l| get_repo(l)).collect(); + let valid_repos: std::collections::HashSet<_> = + valid_lines.iter().filter_map(|l| get_repo(l)).collect(); assert!( train_repos.is_disjoint(&valid_repos), @@ -531,4 +551,54 @@ mod tests { ]; assert!(compute_split_counts(&specs, 100).is_err()); } + + #[test] + fn test_absolute_targets_lines_not_groups() { + // 5 repos × 3 lines each = 15 total lines. + // `train=6` should target ~6 lines (2 groups), NOT 6 groups (all 15 lines). + let input = create_temp_jsonl(&[ + r#"{"repository_url": "r1", "id": 1}"#, + r#"{"repository_url": "r1", "id": 2}"#, + r#"{"repository_url": "r1", "id": 3}"#, + r#"{"repository_url": "r2", "id": 4}"#, + r#"{"repository_url": "r2", "id": 5}"#, + r#"{"repository_url": "r2", "id": 6}"#, + r#"{"repository_url": "r3", "id": 7}"#, + r#"{"repository_url": "r3", "id": 8}"#, + r#"{"repository_url": "r3", "id": 9}"#, + r#"{"repository_url": "r4", "id": 10}"#, + r#"{"repository_url": "r4", "id": 11}"#, + r#"{"repository_url": "r4", "id": 12}"#, + r#"{"repository_url": "r5", "id": 13}"#, + r#"{"repository_url": "r5", "id": 14}"#, + r#"{"repository_url": "r5", "id": 15}"#, + ]); + + let temp_dir = tempfile::tempdir().unwrap(); + let train_path = temp_dir.path().join("train.jsonl"); + let valid_path = temp_dir.path().join("valid.jsonl"); + + let args = SplitArgs { + seed: Some(42), + stratify: Stratify::Repo, + }; + let inputs = vec![ + input.path().to_path_buf(), + PathBuf::from(format!("{}=6", train_path.display())), + PathBuf::from(format!("{}=rest", valid_path.display())), + ]; + + run_split(&args, &inputs).unwrap(); + + let train_content = std::fs::read_to_string(&train_path).unwrap(); + let valid_content = std::fs::read_to_string(&valid_path).unwrap(); + + let train_lines: Vec<&str> = train_content.lines().collect(); + let valid_lines: Vec<&str> = valid_content.lines().collect(); + + // With 3-line groups, train should get 2 groups (6 lines) to meet the + // target of 6, NOT 6 groups (which don't even exist). Valid gets the rest. + assert_eq!(train_lines.len(), 6); + assert_eq!(valid_lines.len(), 9); + } } diff --git a/crates/edit_prediction_cli/src/synthesize.rs b/crates/edit_prediction_cli/src/synthesize.rs index 3977804a8fc686e547d5b518bc64bd836a1afc7f..228690ae49eb1bdcedd8b8f7e0804c65a62213f5 100644 --- a/crates/edit_prediction_cli/src/synthesize.rs +++ b/crates/edit_prediction_cli/src/synthesize.rs @@ -284,7 +284,7 @@ fn should_skip_commit(commit: &CommitInfo) -> bool { .lines() .filter(|l| l.starts_with('+') || l.starts_with('-')) .count(); - lines_changed < 10 + lines_changed < 30 || lines_changed > 1000 || is_non_code_commit(commit) || is_rename_commit(commit) @@ -377,10 +377,13 @@ fn build_prompt(repo_url: &str, commit: &CommitInfo) -> String { indoc! {r#" You are analyzing a git commit to construct a realistic edit prediction example. - Your goal is to tell the story of a programmer's editing session: what sequence of changes did they make, and what change logically comes next? We use these examples to train a model to predict edits, so the quality of the EDIT HISTORY is what matters most. + Your goal is to tell the story of a programmer's editing session: what sequence + of changes did they make, and what change logically comes next? We use these examples + to train a model to predict edits, so the quality of the EDIT HISTORY is what matters most. An edit prediction example consists of: - 1. **Edit History**: 3-6 hunks showing what the programmer did BEFORE making the expected patch. This is the most important part - it must tell a coherent story of the changes leading up to the prediction. + 1. **Edit History**: 2-6 hunks showing what the programmer did BEFORE making the expected patch. + This is the most important part - it must tell a coherent story of the changes leading up to the prediction. 2. **Expected Patch**: One small hunk that logically follows from the edit history. Both single-file and multi-file patterns are acceptable. @@ -417,7 +420,7 @@ fn build_prompt(repo_url: &str, commit: &CommitInfo) -> String { First, THINK through whether this commit can support a good example: 1. What is the high-level pattern in this commit? - 2. Can you identify at least 4 related hunks (3 for edit history + 1 for expected patch)? + 2. Can you identify at least 3 related hunks (2 or more for edit history + 1 for expected patch)? 3. What would be the narrative? (First... then... then... finally predict...) 4. Which specific hunk should be the expected patch (the "punchline")? diff --git a/crates/edit_prediction_context/src/assemble_excerpts.rs b/crates/edit_prediction_context/src/assemble_excerpts.rs index 3366b1fdd0fed167368157175f0f88e579e310d3..97b83653f53e693218189a938b02b0411fa78a33 100644 --- a/crates/edit_prediction_context/src/assemble_excerpts.rs +++ b/crates/edit_prediction_context/src/assemble_excerpts.rs @@ -8,16 +8,18 @@ const MAX_OUTLINE_ITEM_BODY_SIZE: usize = 24; pub fn assemble_excerpt_ranges( buffer: &BufferSnapshot, - mut input_ranges: Vec>, -) -> Vec> { + input_ranges: Vec<(Range, usize)>, +) -> Vec<(Range, usize)> { + let mut input_ranges: Vec<(Range, usize)> = input_ranges + .into_iter() + .map(|(range, order)| (clip_range_to_lines(&range, false, buffer), order)) + .collect(); merge_ranges(&mut input_ranges); - let mut outline_ranges = Vec::new(); + let mut outline_ranges: Vec<(Range, usize)> = Vec::new(); let outline_items = buffer.outline_items_as_points_containing(0..buffer.len(), false, None); let mut outline_ix = 0; - for input_range in &mut input_ranges { - *input_range = clip_range_to_lines(input_range, false, buffer); - + for (input_range, input_order) in &mut input_ranges { while let Some(outline_item) = outline_items.get(outline_ix) { let item_range = clip_range_to_lines(&outline_item.range, false, buffer); @@ -36,6 +38,7 @@ pub fn assemble_excerpt_ranges( add_outline_item( item_range.clone(), body_range.clone(), + *input_order, buffer, &mut outline_ranges, ); @@ -57,6 +60,7 @@ pub fn assemble_excerpt_ranges( next_outline_item .body_range(buffer) .map(|body| clip_range_to_lines(&body, true, buffer)), + *input_order, buffer, &mut outline_ranges, ); @@ -70,12 +74,12 @@ pub fn assemble_excerpt_ranges( } } - input_ranges.extend_from_slice(&outline_ranges); + input_ranges.extend(outline_ranges); merge_ranges(&mut input_ranges); input_ranges .into_iter() - .map(|range| range.start.row..range.end.row) + .map(|(range, order)| (range.start.row..range.end.row, order)) .collect() } @@ -102,8 +106,9 @@ fn clip_range_to_lines( fn add_outline_item( mut item_range: Range, body_range: Option>, + order: usize, buffer: &BufferSnapshot, - outline_ranges: &mut Vec>, + outline_ranges: &mut Vec<(Range, usize)>, ) { if let Some(mut body_range) = body_range { if body_range.start.column > 0 { @@ -113,38 +118,39 @@ fn add_outline_item( let head_range = item_range.start..body_range.start; if head_range.start < head_range.end { - outline_ranges.push(head_range); + outline_ranges.push((head_range, order)); } let tail_range = body_range.end..item_range.end; if tail_range.start < tail_range.end { - outline_ranges.push(tail_range); + outline_ranges.push((tail_range, order)); } } else { item_range.start.column = 0; item_range.end.column = buffer.line_len(item_range.end.row); - outline_ranges.push(item_range); + outline_ranges.push((item_range, order)); } } -pub fn merge_ranges(ranges: &mut Vec>) { - ranges.sort_unstable_by(|a, b| a.start.cmp(&b.start).then(b.end.cmp(&a.end))); +pub fn merge_ranges(ranges: &mut Vec<(Range, usize)>) { + ranges.sort_unstable_by(|(a, _), (b, _)| a.start.cmp(&b.start).then(b.end.cmp(&a.end))); let mut index = 1; while index < ranges.len() { - let mut prev_range_end = ranges[index - 1].end; + let mut prev_range_end = ranges[index - 1].0.end; if prev_range_end.column > 0 { prev_range_end += Point::new(1, 0); } if (prev_range_end + Point::new(1, 0)) - .cmp(&ranges[index].start) + .cmp(&ranges[index].0.start) .is_ge() { let removed = ranges.remove(index); - if removed.end.cmp(&ranges[index - 1].end).is_gt() { - ranges[index - 1].end = removed.end; + if removed.0.end.cmp(&ranges[index - 1].0.end).is_gt() { + ranges[index - 1].0.end = removed.0.end; } + ranges[index - 1].1 = ranges[index - 1].1.min(removed.1); } else { index += 1; } diff --git a/crates/edit_prediction_context/src/edit_prediction_context.rs b/crates/edit_prediction_context/src/edit_prediction_context.rs index 5805e93330504fef1ce70e899d413faf9e89aed2..a44ff8b2e3e873c23c2eaa914298a4d50aee3bdc 100644 --- a/crates/edit_prediction_context/src/edit_prediction_context.rs +++ b/crates/edit_prediction_context/src/edit_prediction_context.rs @@ -39,6 +39,7 @@ struct RelatedBuffer { buffer: Entity, path: Arc, anchor_ranges: Vec>, + excerpt_orders: Vec, cached_file: Option, } @@ -174,21 +175,21 @@ impl RelatedExcerptStore { }; let buffer = project.get_open_buffer(&project_path, cx)?; let snapshot = buffer.read(cx).snapshot(); - let anchor_ranges = file - .excerpts - .iter() - .map(|excerpt| { - let start = snapshot.anchor_before(Point::new(excerpt.row_range.start, 0)); - let end_row = excerpt.row_range.end; - let end_col = snapshot.line_len(end_row); - let end = snapshot.anchor_after(Point::new(end_row, end_col)); - start..end - }) - .collect(); + let mut anchor_ranges = Vec::with_capacity(file.excerpts.len()); + let mut excerpt_orders = Vec::with_capacity(file.excerpts.len()); + for excerpt in &file.excerpts { + let start = snapshot.anchor_before(Point::new(excerpt.row_range.start, 0)); + let end_row = excerpt.row_range.end; + let end_col = snapshot.line_len(end_row); + let end = snapshot.anchor_after(Point::new(end_row, end_col)); + anchor_ranges.push(start..end); + excerpt_orders.push(excerpt.order); + } Some(RelatedBuffer { buffer, path: file.path.clone(), anchor_ranges, + excerpt_orders, cached_file: None, }) }) @@ -221,18 +222,55 @@ impl RelatedExcerptStore { cx.emit(RelatedExcerptStoreEvent::StartedRefresh); })?; - let identifiers = cx + let identifiers_with_ranks = cx .background_spawn(async move { - identifiers_for_position(&snapshot, position, identifier_line_count) + let cursor_offset = position.to_offset(&snapshot); + let identifiers = + identifiers_for_position(&snapshot, position, identifier_line_count); + + // Compute byte distance from cursor to each identifier, then sort by + // distance so we can assign ordinal ranks. Identifiers at the same + // distance share the same rank. + let mut identifiers_with_distance: Vec<(Identifier, usize)> = identifiers + .into_iter() + .map(|id| { + let start = id.range.start.to_offset(&snapshot); + let end = id.range.end.to_offset(&snapshot); + let distance = if cursor_offset < start { + start - cursor_offset + } else if cursor_offset > end { + cursor_offset - end + } else { + 0 + }; + (id, distance) + }) + .collect(); + identifiers_with_distance.sort_by_key(|(_, distance)| *distance); + + let mut cursor_distances: HashMap = HashMap::default(); + let mut current_rank = 0; + let mut previous_distance = None; + for (identifier, distance) in &identifiers_with_distance { + if previous_distance != Some(*distance) { + current_rank = cursor_distances.len(); + previous_distance = Some(*distance); + } + cursor_distances.insert(identifier.clone(), current_rank); + } + + (identifiers_with_distance, cursor_distances) }) .await; + let (identifiers_with_distance, cursor_distances) = identifiers_with_ranks; + let async_cx = cx.clone(); let start_time = Instant::now(); let futures = this.update(cx, |this, cx| { - identifiers + identifiers_with_distance .into_iter() - .filter_map(|identifier| { + .filter_map(|(identifier, _)| { let task = if let Some(entry) = this.cache.get(&identifier) { DefinitionTask::CacheHit(entry.clone()) } else { @@ -334,7 +372,8 @@ impl RelatedExcerptStore { } mean_definition_latency /= cache_miss_count.max(1) as u32; - let (new_cache, related_buffers) = rebuild_related_files(&project, new_cache, cx).await?; + let (new_cache, related_buffers) = + rebuild_related_files(&project, new_cache, &cursor_distances, cx).await?; if let Some(file) = &file { log::debug!( @@ -362,6 +401,7 @@ impl RelatedExcerptStore { async fn rebuild_related_files( project: &Entity, mut new_entries: HashMap>, + cursor_distances: &HashMap, cx: &mut AsyncApp, ) -> Result<(HashMap>, Vec)> { let mut snapshots = HashMap::default(); @@ -396,12 +436,18 @@ async fn rebuild_related_files( } } + let cursor_distances = cursor_distances.clone(); Ok(cx .background_spawn(async move { let mut ranges_by_buffer = - HashMap::, Vec>)>::default(); + HashMap::, Vec<(Range, usize)>)>::default(); let mut paths_by_buffer = HashMap::default(); - for entry in new_entries.values_mut() { + let mut min_rank_by_buffer = HashMap::::default(); + for (identifier, entry) in new_entries.iter_mut() { + let rank = cursor_distances + .get(identifier) + .copied() + .unwrap_or(usize::MAX); for definition in entry .definitions .iter() @@ -412,11 +458,16 @@ async fn rebuild_related_files( }; paths_by_buffer.insert(definition.buffer.entity_id(), definition.path.clone()); + let buffer_rank = min_rank_by_buffer + .entry(definition.buffer.entity_id()) + .or_insert(usize::MAX); + *buffer_rank = (*buffer_rank).min(rank); + ranges_by_buffer .entry(definition.buffer.entity_id()) .or_insert_with(|| (definition.buffer.clone(), Vec::new())) .1 - .push(definition.anchor_range.to_point(snapshot)); + .push((definition.anchor_range.to_point(snapshot), rank)); } } @@ -425,7 +476,7 @@ async fn rebuild_related_files( .filter_map(|(entity_id, (buffer, ranges))| { let snapshot = snapshots.get(&entity_id)?; let project_path = paths_by_buffer.get(&entity_id)?; - let row_ranges = assemble_excerpt_ranges(snapshot, ranges); + let assembled = assemble_excerpt_ranges(snapshot, ranges); let root_name = worktree_root_names.get(&project_path.worktree_id)?; let path: Arc = Path::new(&format!( @@ -435,20 +486,21 @@ async fn rebuild_related_files( )) .into(); - let anchor_ranges = row_ranges - .into_iter() - .map(|row_range| { - let start = snapshot.anchor_before(Point::new(row_range.start, 0)); - let end_col = snapshot.line_len(row_range.end); - let end = snapshot.anchor_after(Point::new(row_range.end, end_col)); - start..end - }) - .collect(); + let mut anchor_ranges = Vec::with_capacity(assembled.len()); + let mut excerpt_orders = Vec::with_capacity(assembled.len()); + for (row_range, order) in assembled { + let start = snapshot.anchor_before(Point::new(row_range.start, 0)); + let end_col = snapshot.line_len(row_range.end); + let end = snapshot.anchor_after(Point::new(row_range.end, end_col)); + anchor_ranges.push(start..end); + excerpt_orders.push(order); + } let mut related_buffer = RelatedBuffer { buffer, path, anchor_ranges, + excerpt_orders, cached_file: None, }; related_buffer.fill_cache(snapshot); @@ -456,7 +508,17 @@ async fn rebuild_related_files( }) .collect(); - related_buffers.sort_by_key(|related| related.path.clone()); + related_buffers.sort_by(|a, b| { + let rank_a = min_rank_by_buffer + .get(&a.buffer.entity_id()) + .copied() + .unwrap_or(usize::MAX); + let rank_b = min_rank_by_buffer + .get(&b.buffer.entity_id()) + .copied() + .unwrap_or(usize::MAX); + rank_a.cmp(&rank_b).then_with(|| a.path.cmp(&b.path)) + }); (new_entries, related_buffers) }) @@ -487,12 +549,14 @@ impl RelatedBuffer { let excerpts = self .anchor_ranges .iter() - .map(|range| { + .zip(self.excerpt_orders.iter()) + .map(|(range, &order)| { let start = range.start.to_point(buffer); let end = range.end.to_point(buffer); RelatedExcerpt { row_range: start.row..end.row, text: buffer.text_for_range(start..end).collect::().into(), + order, } }) .collect::>(); @@ -580,14 +644,12 @@ fn identifiers_for_position( let outer_range = ranges.first().map_or(0, |r| r.start)..ranges.last().map_or(buffer.len(), |r| r.end); - let mut captures = buffer - .syntax - .captures(outer_range.clone(), &buffer.text, |grammar| { - grammar - .highlights_config - .as_ref() - .map(|config| &config.query) - }); + let mut captures = buffer.captures(outer_range.clone(), |grammar| { + grammar + .highlights_config + .as_ref() + .map(|config| &config.query) + }); for range in ranges { captures.set_byte_range(range.start..outer_range.end); diff --git a/crates/edit_prediction_context/src/edit_prediction_context_tests.rs b/crates/edit_prediction_context/src/edit_prediction_context_tests.rs index b619fa729449f2e232a8c8231f416f5a15c5271f..01c4c76e82eb0851b7552b3d9117af1212a8b3da 100644 --- a/crates/edit_prediction_context/src/edit_prediction_context_tests.rs +++ b/crates/edit_prediction_context/src/edit_prediction_context_tests.rs @@ -48,6 +48,24 @@ async fn test_edit_prediction_context(cx: &mut TestAppContext) { assert_related_files( &excerpts, &[ + ( + "root/src/person.rs", + &[ + indoc! {" + pub struct Person { + first_name: String, + last_name: String, + email: String, + age: u32, + } + + impl Person { + pub fn get_first_name(&self) -> &str { + &self.first_name + }"}, + "}", + ], + ), ( "root/src/company.rs", &[indoc! {" @@ -71,24 +89,6 @@ async fn test_edit_prediction_context(cx: &mut TestAppContext) { }"}, ], ), - ( - "root/src/person.rs", - &[ - indoc! {" - pub struct Person { - first_name: String, - last_name: String, - email: String, - age: u32, - } - - impl Person { - pub fn get_first_name(&self) -> &str { - &self.first_name - }"}, - "}", - ], - ), ], ); }); @@ -112,6 +112,24 @@ async fn test_edit_prediction_context(cx: &mut TestAppContext) { assert_related_files( &excerpts, &[ + ( + "root/src/person.rs", + &[ + indoc! {" + pub struct Person { + first_name: String, + last_name: String, + email: String, + age: u32, + } + + impl Person { + pub fn get_first_name(&self) -> &str { + &self.first_name + }"}, + "}", + ], + ), ( "root/src/company.rs", &[indoc! {" @@ -136,24 +154,6 @@ async fn test_edit_prediction_context(cx: &mut TestAppContext) { }"}, ], ), - ( - "root/src/person.rs", - &[ - indoc! {" - pub struct Person { - first_name: String, - last_name: String, - email: String, - age: u32, - } - - impl Person { - pub fn get_first_name(&self) -> &str { - &self.first_name - }"}, - "}", - ], - ), ], ); }); @@ -290,20 +290,21 @@ fn test_assemble_excerpts(cx: &mut TestAppContext) { let (input, ranges) = marked_text_ranges(&input, false); let buffer = cx.new(|cx| Buffer::local(input, cx).with_language(rust_lang(), cx)); buffer.read_with(cx, |buffer, _cx| { - let ranges: Vec> = ranges + let ranges: Vec<(Range, usize)> = ranges .into_iter() - .map(|range| range.to_point(&buffer)) + .map(|range| (range.to_point(&buffer), 0)) .collect(); - let row_ranges = assemble_excerpt_ranges(&buffer.snapshot(), ranges); - let excerpts: Vec = row_ranges + let assembled = assemble_excerpt_ranges(&buffer.snapshot(), ranges); + let excerpts: Vec = assembled .into_iter() - .map(|row_range| { + .map(|(row_range, order)| { let start = Point::new(row_range.start, 0); let end = Point::new(row_range.end, buffer.line_len(row_range.end)); RelatedExcerpt { row_range, text: buffer.text_for_range(start..end).collect::().into(), + order, } }) .collect(); @@ -620,7 +621,6 @@ async fn test_type_definition_deduplication(cx: &mut TestAppContext) { assert_related_files( &excerpts, &[ - ("root/src/main.rs", &["fn work() {", "}"]), ( "root/src/types.rs", &[indoc! {" @@ -628,6 +628,194 @@ async fn test_type_definition_deduplication(cx: &mut TestAppContext) { value: i32, }"}], ), + ("root/src/main.rs", &["fn work() {", "}"]), + ], + ); + }); +} + +#[gpui::test] +async fn test_definitions_ranked_by_cursor_proximity(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + // helpers.rs has an impl block whose body exceeds the test + // MAX_OUTLINE_ITEM_BODY_SIZE (24 bytes), so assemble_excerpt_ranges + // splits it into header + individual children + closing brace. main.rs + // references two of the three methods on separate lines at varying + // distances from the cursor. This exercises: + // 1. File ordering by closest identifier rank. + // 2. Per-excerpt ordering within a file — child excerpts carry the rank + // of the identifier that discovered them. + // 3. Parent excerpt (impl header / closing brace) inheriting the minimum + // order of its children. + fs.insert_tree( + path!("/root"), + json!({ + "src": { + "helpers.rs": indoc! {r#" + pub struct Helpers { + value: i32, + } + + impl Helpers { + pub fn alpha(&self) -> i32 { + let intermediate = self.value; + intermediate + 1 + } + + pub fn beta(&self) -> i32 { + let intermediate = self.value; + intermediate + 2 + } + + pub fn gamma(&self) -> i32 { + let intermediate = self.value; + intermediate + 3 + } + } + "#}, + "main.rs": indoc! {r#" + use super::helpers::Helpers; + + fn process(h: Helpers) { + let a = h.alpha(); + let b = h.gamma(); + } + "#}, + }, + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let mut servers = setup_fake_lsp(&project, cx); + + let (buffer, _handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx) + }) + .await + .unwrap(); + + let _server = servers.next().await.unwrap(); + cx.run_until_parked(); + + // Place cursor on "h.alpha()". `alpha` is at distance 0, `gamma` is + // farther below. Both resolve to methods inside `impl Helpers` in + // helpers.rs. The impl header and closing brace excerpts should inherit + // the min order of their children (alpha's order). + let related_excerpt_store = cx.new(|cx| RelatedExcerptStore::new(&project, cx)); + related_excerpt_store.update(cx, |store, cx| { + let position = { + let buffer = buffer.read(cx); + let offset = buffer.text().find("h.alpha()").unwrap(); + buffer.anchor_before(offset) + }; + + store.set_identifier_line_count(1); + store.refresh(buffer.clone(), position, cx); + }); + + cx.executor().advance_clock(DEBOUNCE_DURATION); + related_excerpt_store.update(cx, |store, cx| { + let files = store.related_files(cx); + + // helpers.rs has 4 excerpts: the struct+impl header merged with + // the alpha method header (order 1 from alpha), alpha's closing + // brace (order 1), gamma's method header (order 6), and the + // gamma+impl closing brace (order 1, inherited from alpha which + // is also a child of the impl). + let alpha_order = 1; + let gamma_order = 6; + assert_related_files_with_orders( + &files, + &[ + ( + "root/src/helpers.rs", + &[ + ( + indoc! {" + pub struct Helpers { + value: i32, + } + + impl Helpers { + pub fn alpha(&self) -> i32 {"}, + alpha_order, + ), + (" }", alpha_order), + (" pub fn gamma(&self) -> i32 {", gamma_order), + ( + indoc! {" + } + }"}, + alpha_order, + ), + ], + ), + ( + "root/src/main.rs", + &[("fn process(h: Helpers) {", 8), ("}", 8)], + ), + ], + ); + }); + + // Now move cursor to "h.gamma()" — gamma becomes closest, reranking the + // excerpts so that the gamma method excerpt has the best order and the + // alpha method excerpt has a worse order. + related_excerpt_store.update(cx, |store, cx| { + let position = { + let buffer = buffer.read(cx); + let offset = buffer.text().find("h.gamma()").unwrap(); + buffer.anchor_before(offset) + }; + + store.set_identifier_line_count(1); + store.refresh(buffer.clone(), position, cx); + }); + + cx.executor().advance_clock(DEBOUNCE_DURATION); + related_excerpt_store.update(cx, |store, cx| { + let files = store.related_files(cx); + + // Now gamma is closest. The alpha method excerpts carry alpha's + // rank (3), and the gamma method excerpts carry gamma's rank (1). + // The impl closing brace merges with gamma's closing brace and + // inherits gamma's order (the best child). + let alpha_order = 3; + let gamma_order = 1; + assert_related_files_with_orders( + &files, + &[ + ( + "root/src/helpers.rs", + &[ + ( + indoc! {" + pub struct Helpers { + value: i32, + } + + impl Helpers { + pub fn alpha(&self) -> i32 {"}, + alpha_order, + ), + (" }", alpha_order), + (" pub fn gamma(&self) -> i32 {", gamma_order), + ( + indoc! {" + } + }"}, + gamma_order, + ), + ], + ), + ( + "root/src/main.rs", + &[("fn process(h: Helpers) {", 8), ("}", 8)], + ), ], ); }); @@ -788,30 +976,56 @@ fn test_project_1() -> serde_json::Value { } fn assert_related_files(actual_files: &[RelatedFile], expected_files: &[(&str, &[&str])]) { - let actual_files = actual_files + let expected_with_orders: Vec<(&str, Vec<(&str, usize)>)> = expected_files + .iter() + .map(|(path, texts)| (*path, texts.iter().map(|text| (*text, 0)).collect())) + .collect(); + let expected_refs: Vec<(&str, &[(&str, usize)])> = expected_with_orders + .iter() + .map(|(path, excerpts)| (*path, excerpts.as_slice())) + .collect(); + assert_related_files_impl(actual_files, &expected_refs, false) +} + +fn assert_related_files_with_orders( + actual_files: &[RelatedFile], + expected_files: &[(&str, &[(&str, usize)])], +) { + assert_related_files_impl(actual_files, expected_files, true) +} + +fn assert_related_files_impl( + actual_files: &[RelatedFile], + expected_files: &[(&str, &[(&str, usize)])], + check_orders: bool, +) { + let actual: Vec<(&str, Vec<(String, usize)>)> = actual_files .iter() .map(|file| { let excerpts = file .excerpts .iter() - .map(|excerpt| excerpt.text.to_string()) - .collect::>(); + .map(|excerpt| { + let order = if check_orders { excerpt.order } else { 0 }; + (excerpt.text.to_string(), order) + }) + .collect(); (file.path.to_str().unwrap(), excerpts) }) - .collect::>(); - let expected_excerpts = expected_files + .collect(); + let expected: Vec<(&str, Vec<(String, usize)>)> = expected_files .iter() - .map(|(path, texts)| { + .map(|(path, excerpts)| { ( *path, - texts + excerpts .iter() - .map(|line| line.to_string()) - .collect::>(), + .map(|(text, order)| (text.to_string(), *order)) + .collect(), ) }) - .collect::>(); - pretty_assertions::assert_eq!(actual_files, expected_excerpts) + .collect(); + pretty_assertions::assert_eq!(actual, expected) } fn assert_definitions(definitions: &[LocationLink], first_lines: &[&str], cx: &mut TestAppContext) { diff --git a/crates/edit_prediction_ui/Cargo.toml b/crates/edit_prediction_ui/Cargo.toml index d4a7c5d3ab800f54476a8e88914dcaaba3a26547..05afbabd2045e9bca591b6c2edba846e95953a4f 100644 --- a/crates/edit_prediction_ui/Cargo.toml +++ b/crates/edit_prediction_ui/Cargo.toml @@ -40,7 +40,6 @@ paths.workspace = true project.workspace = true regex.workspace = true settings.workspace = true -supermaven.workspace = true telemetry.workspace = true text.workspace = true theme.workspace = true diff --git a/crates/edit_prediction_ui/src/edit_prediction_button.rs b/crates/edit_prediction_ui/src/edit_prediction_button.rs index 729b901be1556f011c101258d34af9b98b45f272..5e9ec08b96442dc8c10e89aa43f891e7743f85ef 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_button.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_button.rs @@ -3,7 +3,7 @@ use client::{Client, UserStore, zed_urls}; use cloud_llm_client::UsageLimit; use codestral::{self, CodestralEditPredictionDelegate}; use copilot::Status; -use edit_prediction::{EditPredictionStore, Zeta2FeatureFlag}; +use edit_prediction::EditPredictionStore; use edit_prediction_types::EditPredictionDelegateHandle; use editor::{ Editor, MultiBufferOffset, SelectionEffects, actions::ShowEditPrediction, scroll::Autoscroll, @@ -22,15 +22,12 @@ use language::{ }; use project::{DisableAiSettings, Project}; use regex::Regex; -use settings::{ - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, Settings, SettingsStore, update_settings_file, -}; +use settings::{Settings, SettingsStore, update_settings_file}; use std::{ rc::Rc, sync::{Arc, LazyLock}, time::Duration, }; -use supermaven::{AccountStatus, Supermaven}; use ui::{ Clickable, ContextMenu, ContextMenuEntry, DocumentationSide, IconButton, IconButtonShape, Indicator, PopoverMenu, PopoverMenuHandle, ProgressBar, Tooltip, prelude::*, @@ -75,13 +72,6 @@ pub struct EditPredictionButton { project: WeakEntity, } -enum SupermavenButtonStatus { - Ready, - Errored(String), - NeedsActivation(String), - Initializing, -} - impl Render for EditPredictionButton { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { // Return empty div if AI is disabled @@ -188,101 +178,6 @@ impl Render for EditPredictionButton { .with_handle(self.popover_menu_handle.clone()), ) } - EditPredictionProvider::Supermaven => { - let Some(supermaven) = Supermaven::global(cx) else { - return div(); - }; - - let supermaven = supermaven.read(cx); - - let status = match supermaven { - Supermaven::Starting => SupermavenButtonStatus::Initializing, - Supermaven::FailedDownload { error } => { - SupermavenButtonStatus::Errored(error.to_string()) - } - Supermaven::Spawned(agent) => { - let account_status = agent.account_status.clone(); - match account_status { - AccountStatus::NeedsActivation { activate_url } => { - SupermavenButtonStatus::NeedsActivation(activate_url) - } - AccountStatus::Unknown => SupermavenButtonStatus::Initializing, - AccountStatus::Ready => SupermavenButtonStatus::Ready, - } - } - Supermaven::Error { error } => { - SupermavenButtonStatus::Errored(error.to_string()) - } - }; - - let icon = status.to_icon(); - let tooltip_text = status.to_tooltip(); - let has_menu = status.has_menu(); - let this = cx.weak_entity(); - let fs = self.fs.clone(); - let file = self.file.clone(); - let language = self.language.clone(); - let project = self.project.clone(); - - div().child( - PopoverMenu::new("supermaven") - .on_open({ - let file = file.clone(); - let language = language; - let project = project; - Rc::new(move |_window, cx| { - emit_edit_prediction_menu_opened( - "supermaven", - &file, - &language, - &project, - cx, - ); - }) - }) - .menu(move |window, cx| match &status { - SupermavenButtonStatus::NeedsActivation(activate_url) => { - Some(ContextMenu::build(window, cx, |menu, _, _| { - let fs = fs.clone(); - let activate_url = activate_url.clone(); - - menu.entry("Sign In", None, move |_, cx| { - cx.open_url(activate_url.as_str()) - }) - .entry( - "Use Zed AI", - None, - move |_, cx| { - set_completion_provider( - fs.clone(), - cx, - EditPredictionProvider::Zed, - ) - }, - ) - })) - } - SupermavenButtonStatus::Ready => this - .update(cx, |this, cx| { - this.build_supermaven_context_menu(window, cx) - }) - .ok(), - _ => None, - }) - .anchor(Corner::BottomRight) - .trigger_with_tooltip( - IconButton::new("supermaven-icon", icon), - move |window, cx| { - if has_menu { - Tooltip::for_action(tooltip_text.clone(), &ToggleMenu, cx) - } else { - Tooltip::text(tooltip_text.clone())(window, cx) - } - }, - ) - .with_handle(self.popover_menu_handle.clone()), - ) - } EditPredictionProvider::Codestral => { let enabled = self.editor_enabled.unwrap_or(true); let has_api_key = codestral::codestral_api_key(cx).is_some(); @@ -642,9 +537,15 @@ impl EditPredictionButton { edit_prediction::ollama::ensure_authenticated(cx); let sweep_api_token_task = edit_prediction::sweep_ai::load_sweep_api_token(cx); let mercury_api_token_task = edit_prediction::mercury::load_mercury_api_token(cx); + let open_ai_compatible_api_token_task = + edit_prediction::open_ai_compatible::load_open_ai_compatible_api_token(cx); cx.spawn(async move |this, cx| { - _ = futures::join!(sweep_api_token_task, mercury_api_token_task); + _ = futures::join!( + sweep_api_token_task, + mercury_api_token_task, + open_ai_compatible_api_token_task + ); this.update(cx, |_, cx| { cx.notify(); }) @@ -873,13 +774,7 @@ impl EditPredictionButton { menu = menu.separator().header("Privacy"); - if matches!( - provider, - EditPredictionProvider::Zed - | EditPredictionProvider::Experimental( - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, - ) - ) { + if matches!(provider, EditPredictionProvider::Zed) { if let Some(provider) = &self.edit_prediction_provider { let data_collection = provider.data_collection_state(cx); @@ -1120,21 +1015,6 @@ impl EditPredictionButton { }) } - fn build_supermaven_context_menu( - &self, - window: &mut Window, - cx: &mut Context, - ) -> Entity { - ContextMenu::build(window, cx, |menu, window, cx| { - let menu = self.build_language_settings_menu(menu, window, cx); - let menu = - self.add_provider_switching_section(menu, EditPredictionProvider::Supermaven, cx); - - menu.separator() - .action("Sign Out", supermaven::SignOut.boxed_clone()) - }) - } - fn build_codestral_context_menu( &self, window: &mut Window, @@ -1312,6 +1192,58 @@ impl EditPredictionButton { menu = self.build_language_settings_menu(menu, window, cx); } menu = self.add_provider_switching_section(menu, provider, cx); + + if cx.is_staff() { + if let Some(store) = EditPredictionStore::try_global(cx) { + let store = store.read(cx); + let experiments = store.available_experiments().to_vec(); + let preferred = store.preferred_experiment().map(|s| s.to_owned()); + let active = store.active_experiment().map(|s| s.to_owned()); + + let preferred_for_submenu = preferred.clone(); + menu = menu + .separator() + .submenu("Experiment", move |menu, _window, _cx| { + let mut menu = menu.toggleable_entry( + "Default", + preferred_for_submenu.is_none(), + IconPosition::Start, + None, + { + move |_window, cx| { + if let Some(store) = EditPredictionStore::try_global(cx) { + store.update(cx, |store, _cx| { + store.set_preferred_experiment(None); + }); + } + } + }, + ); + for experiment in &experiments { + let is_selected = active.as_deref() == Some(experiment.as_str()) + || preferred.as_deref() == Some(experiment.as_str()); + let experiment_name = experiment.clone(); + menu = menu.toggleable_entry( + experiment.clone(), + is_selected, + IconPosition::Start, + None, + move |_window, cx| { + if let Some(store) = EditPredictionStore::try_global(cx) { + store.update(cx, |store, _cx| { + store.set_preferred_experiment(Some( + experiment_name.clone(), + )); + }); + } + }, + ); + } + menu + }); + } + } + menu = menu.separator().item( ContextMenuEntry::new("Configure Providers") .icon(IconName::Settings) @@ -1384,33 +1316,6 @@ impl StatusItemView for EditPredictionButton { } } -impl SupermavenButtonStatus { - fn to_icon(&self) -> IconName { - match self { - SupermavenButtonStatus::Ready => IconName::Supermaven, - SupermavenButtonStatus::Errored(_) => IconName::SupermavenError, - SupermavenButtonStatus::NeedsActivation(_) => IconName::SupermavenInit, - SupermavenButtonStatus::Initializing => IconName::SupermavenInit, - } - } - - fn to_tooltip(&self) -> String { - match self { - SupermavenButtonStatus::Ready => "Supermaven is ready".to_string(), - SupermavenButtonStatus::Errored(error) => format!("Supermaven error: {}", error), - SupermavenButtonStatus::NeedsActivation(_) => "Supermaven needs activation".to_string(), - SupermavenButtonStatus::Initializing => "Supermaven initializing".to_string(), - } - } - - fn has_menu(&self) -> bool { - match self { - SupermavenButtonStatus::Ready | SupermavenButtonStatus::NeedsActivation(_) => true, - SupermavenButtonStatus::Errored(_) | SupermavenButtonStatus::Initializing => false, - } - } -} - async fn open_disabled_globs_setting_in_editor( workspace: WeakEntity, cx: &mut AsyncWindowContext, @@ -1494,12 +1399,6 @@ pub fn get_available_providers(cx: &mut App) -> Vec { providers.push(EditPredictionProvider::Zed); - if cx.has_flag::() { - providers.push(EditPredictionProvider::Experimental( - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, - )); - } - if let Some(app_state) = workspace::AppState::global(cx).upgrade() && copilot::GlobalCopilotAuth::try_get_or_init(app_state, cx) .is_some_and(|copilot| copilot.0.read(cx).is_authenticated()) @@ -1507,14 +1406,6 @@ pub fn get_available_providers(cx: &mut App) -> Vec { providers.push(EditPredictionProvider::Copilot); }; - if let Some(supermaven) = Supermaven::global(cx) { - if let Supermaven::Spawned(agent) = supermaven.read(cx) { - if matches!(agent.account_status, AccountStatus::Ready) { - providers.push(EditPredictionProvider::Supermaven); - } - } - } - if codestral::codestral_api_key(cx).is_some() { providers.push(EditPredictionProvider::Codestral); } diff --git a/crates/edit_prediction_ui/src/edit_prediction_context_view.rs b/crates/edit_prediction_ui/src/edit_prediction_context_view.rs index 6ad816c36ddea3f0493ce853fd6f0efd4b8e0dc7..48e74dcdcc102f9ed7844f1b8829e0182fe2c97b 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_context_view.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_context_view.rs @@ -8,14 +8,17 @@ use std::{ use anyhow::Result; use client::{Client, UserStore}; -use editor::{Editor, PathKey}; +use editor::{ + Editor, PathKey, + display_map::{BlockPlacement, BlockProperties, BlockStyle}, +}; use futures::StreamExt as _; use gpui::{ Animation, AnimationExt, App, AppContext as _, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement as _, IntoElement as _, ParentElement as _, SharedString, Styled as _, Task, TextAlign, Window, actions, div, pulsating_between, }; -use multi_buffer::MultiBuffer; +use multi_buffer::{Anchor, MultiBuffer}; use project::Project; use text::Point; use ui::{ @@ -165,8 +168,14 @@ impl EditPredictionContextView { } cx.spawn_in(window, async move |this, cx| { - let mut paths = Vec::new(); + let mut paths: Vec<(PathKey, _, Vec<_>, Vec, usize)> = Vec::new(); for (related_file, buffer) in related_files { + let orders = related_file + .excerpts + .iter() + .map(|excerpt| excerpt.order) + .collect::>(); + let min_order = orders.iter().copied().min().unwrap_or(usize::MAX); let point_ranges = related_file .excerpts .iter() @@ -175,20 +184,53 @@ impl EditPredictionContextView { }) .collect::>(); cx.update(|_, cx| { - let path = PathKey::for_buffer(&buffer, cx); - paths.push((path, buffer, point_ranges)); + let path = if let Some(file) = buffer.read(cx).file() { + PathKey::with_sort_prefix(min_order as u64, file.path().clone()) + } else { + PathKey::for_buffer(&buffer, cx) + }; + paths.push((path, buffer, point_ranges, orders, min_order)); })?; } + paths.sort_by_key(|(_, _, _, _, min_order)| *min_order); + + let mut excerpt_anchors_with_orders: Vec<(Anchor, usize)> = Vec::new(); + multibuffer.update(cx, |multibuffer, cx| { multibuffer.clear(cx); - for (path, buffer, ranges) in paths { - multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx); + for (path, buffer, ranges, orders, _) in paths { + let (anchor_ranges, _) = + multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx); + for (anchor_range, order) in anchor_ranges.into_iter().zip(orders) { + excerpt_anchors_with_orders.push((anchor_range.start, order)); + } } }); editor.update_in(cx, |editor, window, cx| { + let blocks = excerpt_anchors_with_orders + .into_iter() + .map(|(anchor, order)| { + let label = SharedString::from(format!("order: {order}")); + BlockProperties { + placement: BlockPlacement::Above(anchor), + height: Some(1), + style: BlockStyle::Sticky, + render: Arc::new(move |cx| { + div() + .pl(cx.anchor_x) + .text_ui_xs(cx) + .text_color(cx.editor_style.status.info) + .child(label.clone()) + .into_any_element() + }), + priority: 0, + } + }) + .collect::>(); + editor.insert_blocks(blocks, None, cx); editor.move_to_beginning(&Default::default(), window, cx); })?; diff --git a/crates/edit_prediction_ui/src/edit_prediction_ui.rs b/crates/edit_prediction_ui/src/edit_prediction_ui.rs index 1a6c030239631536e143000e2eef37fdd0e599c8..0735a8ccab69cfc812b84195adb14743167c651a 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_ui.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_ui.rs @@ -3,7 +3,7 @@ mod edit_prediction_context_view; mod rate_prediction_modal; use command_palette_hooks::CommandPaletteFilter; -use edit_prediction::{EditPredictionStore, ResetOnboarding, Zeta2FeatureFlag, capture_example}; +use edit_prediction::{EditPredictionStore, ResetOnboarding, capture_example}; use edit_prediction_context_view::EditPredictionContextView; use editor::Editor; use feature_flags::FeatureFlagAppExt as _; @@ -54,28 +54,25 @@ pub fn init(cx: &mut App) { capture_example_as_markdown(workspace, window, cx); }); workspace.register_action_renderer(|div, _, _, cx| { - let has_flag = cx.has_flag::(); - div.when(has_flag, |div| { - div.on_action(cx.listener( - move |workspace, _: &OpenEditPredictionContextView, window, cx| { - let project = workspace.project(); - workspace.split_item( - SplitDirection::Right, - Box::new(cx.new(|cx| { - EditPredictionContextView::new( - project.clone(), - workspace.client(), - workspace.user_store(), - window, - cx, - ) - })), - window, - cx, - ); - }, - )) - }) + div.on_action(cx.listener( + move |workspace, _: &OpenEditPredictionContextView, window, cx| { + let project = workspace.project(); + workspace.split_item( + SplitDirection::Right, + Box::new(cx.new(|cx| { + EditPredictionContextView::new( + project.clone(), + workspace.client(), + workspace.user_store(), + window, + cx, + ) + })), + window, + cx, + ); + }, + )) }); }) .detach(); diff --git a/crates/edit_prediction_ui/src/rate_prediction_modal.rs b/crates/edit_prediction_ui/src/rate_prediction_modal.rs index 4dba14e2bf449fe4744f31f3f875599f7dc02692..d07dbe9bad72c2252ee2e33c8a014778d1331e96 100644 --- a/crates/edit_prediction_ui/src/rate_prediction_modal.rs +++ b/crates/edit_prediction_ui/src/rate_prediction_modal.rs @@ -1,6 +1,6 @@ use buffer_diff::BufferDiff; use edit_prediction::{EditPrediction, EditPredictionRating, EditPredictionStore}; -use editor::{Editor, ExcerptRange, Inlay, MultiBuffer}; +use editor::{Editor, Inlay, MultiBuffer}; use feature_flags::FeatureFlag; use gpui::{ App, BorderStyle, DismissEvent, EdgesRefinement, Entity, EventEmitter, FocusHandle, Focusable, @@ -359,16 +359,9 @@ impl RatePredictionsModal { editor.disable_header_for_buffer(new_buffer_id, cx); let excerpt_id = editor.buffer().update(cx, |multibuffer, cx| { multibuffer.clear(cx); - let excerpt_ids = multibuffer.push_excerpts( - new_buffer, - vec![ExcerptRange { - context: start..end, - primary: start..end, - }], - cx, - ); + multibuffer.set_excerpts_for_buffer(new_buffer, [start..end], 0, cx); multibuffer.add_diff(diff, cx); - excerpt_ids.into_iter().next() + multibuffer.excerpt_ids().into_iter().next() }); if let Some((excerpt_id, cursor_position)) = diff --git a/crates/editor/src/bracket_colorization.rs b/crates/editor/src/bracket_colorization.rs index c4308075a8823819fc871f6c9a36b9dea56d2172..16fe29a7fa4aa066cf045a63c477fbb569d80334 100644 --- a/crates/editor/src/bracket_colorization.rs +++ b/crates/editor/src/bracket_colorization.rs @@ -5,10 +5,10 @@ use std::ops::Range; use crate::{Editor, HighlightKey}; -use collections::HashMap; -use gpui::{Context, HighlightStyle}; +use collections::{HashMap, HashSet}; +use gpui::{AppContext as _, Context, HighlightStyle}; use itertools::Itertools; -use language::language_settings; +use language::{BufferRow, BufferSnapshot, language_settings}; use multi_buffer::{Anchor, ExcerptId}; use ui::{ActiveTheme, utils::ensure_minimum_contrast}; @@ -19,22 +19,16 @@ impl Editor { } if invalidate { - self.fetched_tree_sitter_chunks.clear(); + self.bracket_fetched_tree_sitter_chunks.clear(); } let accents_count = cx.theme().accents().0.len(); let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); - let anchors_in_multi_buffer = |current_excerpt: ExcerptId, - text_anchors: [text::Anchor; 4]| - -> Option<[Option<_>; 4]> { - multi_buffer_snapshot - .anchors_in_excerpt(current_excerpt, text_anchors)? - .collect_array() - }; - - let bracket_matches_by_accent = self.visible_excerpts(false, cx).into_iter().fold( - HashMap::default(), - |mut acc, (excerpt_id, (buffer, _, buffer_range))| { + + let visible_excerpts = self.visible_excerpts(false, cx); + let excerpt_data: Vec<(ExcerptId, BufferSnapshot, Range)> = visible_excerpts + .into_iter() + .filter_map(|(excerpt_id, (buffer, _, buffer_range))| { let buffer_snapshot = buffer.read(cx).snapshot(); if language_settings::language_settings( buffer_snapshot.language().map(|language| language.name()), @@ -43,112 +37,173 @@ impl Editor { ) .colorize_brackets { - let fetched_chunks = self - .fetched_tree_sitter_chunks - .entry(excerpt_id) - .or_default(); - - let brackets_by_accent = buffer_snapshot - .fetch_bracket_ranges( - buffer_range.start..buffer_range.end, - Some(fetched_chunks), - ) - .into_iter() - .flat_map(|(chunk_range, pairs)| { - if fetched_chunks.insert(chunk_range) { - pairs - } else { - Vec::new() - } - }) - .filter_map(|pair| { - let color_index = pair.color_index?; - - let buffer_open_range = - buffer_snapshot.anchor_range_around(pair.open_range); - let buffer_close_range = - buffer_snapshot.anchor_range_around(pair.close_range); - let [ - buffer_open_range_start, - buffer_open_range_end, - buffer_close_range_start, - buffer_close_range_end, - ] = anchors_in_multi_buffer( - excerpt_id, - [ - buffer_open_range.start, - buffer_open_range.end, - buffer_close_range.start, - buffer_close_range.end, - ], - )?; - let multi_buffer_open_range = - buffer_open_range_start.zip(buffer_open_range_end); - let multi_buffer_close_range = - buffer_close_range_start.zip(buffer_close_range_end); - - let mut ranges = Vec::with_capacity(2); - if let Some((open_start, open_end)) = multi_buffer_open_range { - ranges.push(open_start..open_end); - } - if let Some((close_start, close_end)) = multi_buffer_close_range { - ranges.push(close_start..close_end); - } - if ranges.is_empty() { - None - } else { - Some((color_index % accents_count, ranges)) - } - }); + Some((excerpt_id, buffer_snapshot, buffer_range)) + } else { + None + } + }) + .collect(); - for (accent_number, new_ranges) in brackets_by_accent { - let ranges = acc - .entry(accent_number) - .or_insert_with(Vec::>::new); + let mut fetched_tree_sitter_chunks = excerpt_data + .iter() + .filter_map(|(excerpt_id, ..)| { + Some(( + *excerpt_id, + self.bracket_fetched_tree_sitter_chunks + .get(excerpt_id) + .cloned()?, + )) + }) + .collect::>>>(); + + let bracket_matches_by_accent = cx.background_spawn(async move { + let anchors_in_multi_buffer = |current_excerpt: ExcerptId, + text_anchors: [text::Anchor; 4]| + -> Option<[Option<_>; 4]> { + multi_buffer_snapshot + .anchors_in_excerpt(current_excerpt, text_anchors)? + .collect_array() + }; - for new_range in new_ranges { - let i = ranges - .binary_search_by(|probe| { - probe.start.cmp(&new_range.start, &multi_buffer_snapshot) - }) - .unwrap_or_else(|i| i); - ranges.insert(i, new_range); + let bracket_matches_by_accent: HashMap>> = + excerpt_data.into_iter().fold( + HashMap::default(), + |mut acc, (excerpt_id, buffer_snapshot, buffer_range)| { + let fetched_chunks = + fetched_tree_sitter_chunks.entry(excerpt_id).or_default(); + + let brackets_by_accent = compute_bracket_ranges( + &buffer_snapshot, + buffer_range, + fetched_chunks, + excerpt_id, + accents_count, + &anchors_in_multi_buffer, + ); + + for (accent_number, new_ranges) in brackets_by_accent { + let ranges = acc + .entry(accent_number) + .or_insert_with(Vec::>::new); + + for new_range in new_ranges { + let i = ranges + .binary_search_by(|probe| { + probe.start.cmp(&new_range.start, &multi_buffer_snapshot) + }) + .unwrap_or_else(|i| i); + ranges.insert(i, new_range); + } } - } - } - acc - }, - ); + acc + }, + ); - if invalidate { - self.clear_highlights_with( - &mut |key| matches!(key, HighlightKey::ColorizeBracket(_)), - cx, - ); - } + (bracket_matches_by_accent, fetched_tree_sitter_chunks) + }); let editor_background = cx.theme().colors().editor_background; let accents = cx.theme().accents().clone(); - for (accent_number, bracket_highlights) in bracket_matches_by_accent { - let bracket_color = accents.color_for_index(accent_number as u32); - let adjusted_color = ensure_minimum_contrast(bracket_color, editor_background, 55.0); - let style = HighlightStyle { - color: Some(adjusted_color), - ..HighlightStyle::default() - }; - self.highlight_text_key( - HighlightKey::ColorizeBracket(accent_number), - bracket_highlights, - style, - true, - cx, - ); - } + self.colorize_brackets_task = cx.spawn(async move |editor, cx| { + if invalidate { + editor + .update(cx, |editor, cx| { + editor.clear_highlights_with( + &mut |key| matches!(key, HighlightKey::ColorizeBracket(_)), + cx, + ); + }) + .ok(); + } + + let (bracket_matches_by_accent, updated_chunks) = bracket_matches_by_accent.await; + + editor + .update(cx, |editor, cx| { + editor + .bracket_fetched_tree_sitter_chunks + .extend(updated_chunks); + for (accent_number, bracket_highlights) in bracket_matches_by_accent { + let bracket_color = accents.color_for_index(accent_number as u32); + let adjusted_color = + ensure_minimum_contrast(bracket_color, editor_background, 55.0); + let style = HighlightStyle { + color: Some(adjusted_color), + ..HighlightStyle::default() + }; + + editor.highlight_text_key( + HighlightKey::ColorizeBracket(accent_number), + bracket_highlights, + style, + true, + cx, + ); + } + }) + .ok(); + }); } } +fn compute_bracket_ranges( + buffer_snapshot: &BufferSnapshot, + buffer_range: Range, + fetched_chunks: &mut HashSet>, + excerpt_id: ExcerptId, + accents_count: usize, + anchors_in_multi_buffer: &impl Fn(ExcerptId, [text::Anchor; 4]) -> Option<[Option; 4]>, +) -> Vec<(usize, Vec>)> { + buffer_snapshot + .fetch_bracket_ranges(buffer_range.start..buffer_range.end, Some(fetched_chunks)) + .into_iter() + .flat_map(|(chunk_range, pairs)| { + if fetched_chunks.insert(chunk_range) { + pairs + } else { + Vec::new() + } + }) + .filter_map(|pair| { + let color_index = pair.color_index?; + + let buffer_open_range = buffer_snapshot.anchor_range_around(pair.open_range); + let buffer_close_range = buffer_snapshot.anchor_range_around(pair.close_range); + let [ + buffer_open_range_start, + buffer_open_range_end, + buffer_close_range_start, + buffer_close_range_end, + ] = anchors_in_multi_buffer( + excerpt_id, + [ + buffer_open_range.start, + buffer_open_range.end, + buffer_close_range.start, + buffer_close_range.end, + ], + )?; + let multi_buffer_open_range = buffer_open_range_start.zip(buffer_open_range_end); + let multi_buffer_close_range = buffer_close_range_start.zip(buffer_close_range_end); + + let mut ranges = Vec::with_capacity(2); + if let Some((open_start, open_end)) = multi_buffer_open_range { + ranges.push(open_start..open_end); + } + if let Some((close_start, close_end)) = multi_buffer_close_range { + ranges.push(close_start..close_end); + } + if ranges.is_empty() { + None + } else { + Some((color_index % accents_count, ranges)) + } + }) + .collect() +} + #[cfg(test)] mod tests { use std::{cmp, sync::Arc, time::Duration}; @@ -164,12 +219,12 @@ mod tests { }; use collections::HashSet; use fs::FakeFs; - use gpui::{AppContext as _, UpdateGlobal as _}; + use gpui::UpdateGlobal as _; use indoc::indoc; use itertools::Itertools; use language::{Capability, markdown_lang}; use languages::rust_lang; - use multi_buffer::{ExcerptRange, MultiBuffer}; + use multi_buffer::{MultiBuffer, PathKey}; use pretty_assertions::assert_eq; use project::Project; use rope::Point; @@ -749,6 +804,7 @@ mod foo «1{ }); }); }); + cx.executor().run_until_parked(); assert_eq!( &separate_with_comment_lines( indoc! {r#" @@ -776,6 +832,7 @@ mod foo { }); }); }); + cx.executor().run_until_parked(); assert_eq!( &separate_with_comment_lines( indoc! {r#" @@ -1239,32 +1296,34 @@ mod foo «1{ let multi_buffer = cx.new(|cx| { let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))], + [Point::new(0, 0)..Point::new(1, 0)], + 0, cx, ); let excerpt_rows = 5; let rest_of_first_except_rows = 3; - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_1.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(excerpt_rows, 0)), - ExcerptRange::new( - Point::new( - comment_lines as u32 + excerpt_rows + rest_of_first_except_rows, + Point::new(0, 0)..Point::new(excerpt_rows, 0), + Point::new( + comment_lines as u32 + excerpt_rows + rest_of_first_except_rows, + 0, + ) + ..Point::new( + comment_lines as u32 + + excerpt_rows + + rest_of_first_except_rows + + excerpt_rows, 0, - ) - ..Point::new( - comment_lines as u32 - + excerpt_rows - + rest_of_first_except_rows - + excerpt_rows, - 0, - ), - ), + ), ], + 0, cx, ); multi_buffer @@ -1291,7 +1350,7 @@ mod foo «1{ let map: Option«34»>3» = None; // a // b - + // c fn process_data_2«2()2» «2{ let other_map: Option«34»>3» = None; @@ -1331,7 +1390,7 @@ mod foo «1{ let map: Option«34»>3» = None; // a // b - + // c fn process_data_2«2()2» «2{ let other_map: Option«34»>3» = None; @@ -1381,7 +1440,7 @@ mod foo «1{ let map: Option«12»>1» = None; // a // b - + // c fn process_data_2«2()2» «2{ let other_map: Option«12»>1» = None; diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 31a1f68300d6d24ef449f4f0eb0b072f0cbe0ea9..00a48a9ab3d249850b9749d64267d8274e7eaa79 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -113,6 +113,7 @@ use unicode_segmentation::UnicodeSegmentation; use ztracing::instrument; use std::cell::RefCell; +use std::collections::hash_map::Entry; use std::{ any::TypeId, borrow::Cow, @@ -175,9 +176,9 @@ pub trait ToDisplayPoint { fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint; } -type TextHighlights = TreeMap>)>>; +type TextHighlights = Arc>)>>>; type SemanticTokensHighlights = - TreeMap, Arc)>; + Arc, Arc)>>; type InlayHighlights = TreeMap>; #[derive(Debug)] @@ -478,7 +479,7 @@ impl DisplayMap { diagnostics_max_severity, text_highlights: Default::default(), inlay_highlights: Default::default(), - semantic_token_highlights: TreeMap::default(), + semantic_token_highlights: Default::default(), clip_at_line_ends: false, masked: false, companion: None, @@ -788,6 +789,9 @@ impl DisplayMap { .collect(), cx, ); + for buffer_id in &other.block_snapshot.buffers_with_disabled_headers { + self.disable_header_for_buffer(*buffer_id, cx); + } } /// Creates folds for the given creases. @@ -1226,22 +1230,25 @@ impl DisplayMap { cx: &App, ) { let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); - let to_insert = match self.text_highlights.remove(&key) { - Some(mut previous) if merge => match Arc::get_mut(&mut previous) { - Some((_, previous_ranges)) => { + match Arc::make_mut(&mut self.text_highlights).entry(key) { + Entry::Occupied(mut slot) => match Arc::get_mut(slot.get_mut()) { + Some((_, previous_ranges)) if merge => { previous_ranges.extend(ranges); previous_ranges.sort_by(|a, b| a.start.cmp(&b.start, &multi_buffer_snapshot)); - previous } - None => Arc::new((style, { - ranges.extend(previous.1.iter().cloned()); + Some((previous_style, previous_ranges)) => { + *previous_style = style; + *previous_ranges = ranges; + } + None if merge => { + ranges.extend(slot.get().1.iter().cloned()); ranges.sort_by(|a, b| a.start.cmp(&b.start, &multi_buffer_snapshot)); - ranges - })), + slot.insert(Arc::new((style, ranges))); + } + None => _ = slot.insert(Arc::new((style, ranges))), }, - _ => Arc::new((style, ranges)), - }; - self.text_highlights.insert(key, to_insert); + Entry::Vacant(slot) => _ = slot.insert(Arc::new((style, ranges))), + } } #[instrument(skip_all)] @@ -1288,14 +1295,16 @@ impl DisplayMap { } pub fn clear_highlights(&mut self, key: HighlightKey) -> bool { - let mut cleared = self.text_highlights.remove(&key).is_some(); + let mut cleared = Arc::make_mut(&mut self.text_highlights) + .remove(&key) + .is_some(); cleared |= self.inlay_highlights.remove(&key).is_some(); cleared } pub fn clear_highlights_with(&mut self, f: &mut dyn FnMut(&HighlightKey) -> bool) -> bool { let mut cleared = false; - self.text_highlights.retain(|k, _| { + Arc::make_mut(&mut self.text_highlights).retain(|k, _| { let b = !f(k); cleared |= b; b @@ -1349,7 +1358,7 @@ impl DisplayMap { widths_changed } - pub(crate) fn current_inlays(&self) -> impl Iterator { + pub(crate) fn current_inlays(&self) -> impl Iterator + Default { self.inlay_map.current_inlays() } @@ -1448,7 +1457,7 @@ impl DisplayMap { } pub fn invalidate_semantic_highlights(&mut self, buffer_id: BufferId) { - self.semantic_token_highlights.remove(&buffer_id); + Arc::make_mut(&mut self.semantic_token_highlights).remove(&buffer_id); } } @@ -1492,7 +1501,7 @@ impl<'a> HighlightedChunk<'a> { self, editor_style: &'a EditorStyle, ) -> impl Iterator + 'a { - let mut chars = self.text.chars().peekable(); + let mut chunks = self.text.graphemes(true).peekable(); let mut text = self.text; let style = self.style; let is_tab = self.is_tab; @@ -1500,10 +1509,12 @@ impl<'a> HighlightedChunk<'a> { let is_inlay = self.is_inlay; iter::from_fn(move || { let mut prefix_len = 0; - while let Some(&ch) = chars.peek() { - if !is_invisible(ch) { - prefix_len += ch.len_utf8(); - chars.next(); + while let Some(&chunk) = chunks.peek() { + let mut chars = chunk.chars(); + let Some(ch) = chars.next() else { break }; + if chunk.len() != ch.len_utf8() || !is_invisible(ch) { + prefix_len += chunk.len(); + chunks.next(); continue; } if prefix_len > 0 { @@ -1517,8 +1528,8 @@ impl<'a> HighlightedChunk<'a> { replacement: renderer.clone(), }); } - chars.next(); - let (prefix, suffix) = text.split_at(ch.len_utf8()); + chunks.next(); + let (prefix, suffix) = text.split_at(chunk.len()); text = suffix; if let Some(replacement) = replacement(ch) { let invisible_highlight = HighlightStyle { @@ -1908,6 +1919,9 @@ impl DisplaySnapshot { color } }), + underline: chunk_highlight + .underline + .filter(|_| editor_style.show_underlines), ..chunk_highlight } }); @@ -4118,4 +4132,35 @@ pub mod tests { assert_eq!(ranges[0].start, DisplayPoint::new(DisplayRow(0), 10)); assert_eq!(ranges[0].end, DisplayPoint::new(DisplayRow(0), 14)); } + + #[test] + fn test_highlight_invisibles_preserves_compound_emojis() { + let editor_style = EditorStyle::default(); + + let pilot_emoji = "🧑\u{200d}✈\u{fe0f}"; + let chunk = HighlightedChunk { + text: pilot_emoji, + style: None, + is_tab: false, + is_inlay: false, + replacement: None, + }; + + let chunks: Vec<_> = chunk + .highlight_invisibles(&editor_style) + .map(|chunk| chunk.text.to_string()) + .collect(); + + assert_eq!( + chunks.concat(), + pilot_emoji, + "all text bytes must be preserved" + ); + assert_eq!( + chunks.len(), + 1, + "compound emoji should not be split into multiple chunks, got: {:?}", + chunks, + ); + } } diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 0073166e3d5ee8989d7c5d16112b86395fc7cebf..2673baae84ab74b2852004320cf1d94c5ed1ed42 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -78,6 +78,7 @@ pub struct BlockSnapshot { custom_blocks_by_id: TreeMap>, pub(super) buffer_header_height: u32, pub(super) excerpt_header_height: u32, + pub(super) buffers_with_disabled_headers: HashSet, } impl Deref for BlockSnapshot { @@ -265,6 +266,10 @@ impl Debug for BlockProperties

{ pub enum BlockStyle { Fixed, Flex, + /// Like `Flex` but doesn't use the gutter: + /// - block content scrolls with buffer content + /// - doesn't paint in gutter + Spacer, Sticky, } @@ -272,6 +277,7 @@ pub enum BlockStyle { pub struct EditorMargins { pub gutter: GutterDimensions, pub right: Pixels, + pub extended_right: Pixels, } #[derive(gpui::AppContext, gpui::VisualContext)] @@ -289,6 +295,7 @@ pub struct BlockContext<'a, 'b> { pub height: u32, pub selected: bool, pub editor_style: &'b EditorStyle, + pub indent_guide_padding: Pixels, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)] @@ -393,8 +400,8 @@ impl Block { Block::Custom(block) => block.style, Block::ExcerptBoundary { .. } | Block::FoldedBuffer { .. } - | Block::BufferHeader { .. } - | Block::Spacer { .. } => BlockStyle::Sticky, + | Block::BufferHeader { .. } => BlockStyle::Sticky, + Block::Spacer { .. } => BlockStyle::Spacer, } } @@ -651,6 +658,7 @@ impl BlockMap { custom_blocks_by_id: self.custom_blocks_by_id.clone(), buffer_header_height: self.buffer_header_height, excerpt_header_height: self.excerpt_header_height, + buffers_with_disabled_headers: self.buffers_with_disabled_headers.clone(), }, } } @@ -1702,12 +1710,13 @@ pub(crate) fn balancing_block( Some(BlockProperties { placement: their_placement, height: my_block.height, - style: BlockStyle::Sticky, + style: BlockStyle::Spacer, render: Arc::new(move |cx| { crate::EditorElement::render_spacer_block( cx.block_id, cx.height, cx.line_height, + cx.indent_guide_padding, cx.window, cx.app, ) @@ -2845,8 +2854,8 @@ mod tests { use buffer_diff::BufferDiff; use gpui::{App, AppContext as _, Element, div, font, px}; use itertools::Itertools; - use language::{Buffer, Capability}; - use multi_buffer::{ExcerptRange, MultiBuffer}; + use language::{Buffer, Capability, Point}; + use multi_buffer::{MultiBuffer, PathKey}; use rand::prelude::*; use settings::SettingsStore; use std::env; @@ -3056,27 +3065,32 @@ mod tests { let buffer2 = cx.new(|cx| Buffer::local("Buffer 2", cx)); let buffer3 = cx.new(|cx| Buffer::local("Buffer 3", cx)); - let mut excerpt_ids = Vec::new(); let multi_buffer = cx.new(|cx| { let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite); - excerpt_ids.extend(multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(0), buffer1.clone(), - [ExcerptRange::new(0..buffer1.read(cx).len())], + [Point::zero()..buffer1.read(cx).max_point()], + 0, cx, - )); - excerpt_ids.extend(multi_buffer.push_excerpts( + ); + multi_buffer.set_excerpts_for_path( + PathKey::sorted(1), buffer2.clone(), - [ExcerptRange::new(0..buffer2.read(cx).len())], + [Point::zero()..buffer2.read(cx).max_point()], + 0, cx, - )); - excerpt_ids.extend(multi_buffer.push_excerpts( + ); + multi_buffer.set_excerpts_for_path( + PathKey::sorted(2), buffer3.clone(), - [ExcerptRange::new(0..buffer3.read(cx).len())], + [Point::zero()..buffer3.read(cx).max_point()], + 0, cx, - )); - + ); multi_buffer }); + let excerpt_ids = multi_buffer.read_with(cx, |mb, _| mb.excerpt_ids()); let font = test_font(); let font_size = px(14.); @@ -3403,30 +3417,32 @@ mod tests { fn test_custom_blocks_inside_buffer_folds(cx: &mut gpui::TestAppContext) { cx.update(init_test); - let text = "111\n222\n333\n444\n555\n666"; + let text = "111\n\n222\n\n333\n\n444\n\n555\n\n666"; let buffer = cx.update(|cx| { - MultiBuffer::build_multi( + let multibuffer = MultiBuffer::build_multi( [ (text, vec![Point::new(0, 0)..Point::new(0, 3)]), ( text, vec![ - Point::new(1, 0)..Point::new(1, 3), Point::new(2, 0)..Point::new(2, 3), - Point::new(3, 0)..Point::new(3, 3), + Point::new(4, 0)..Point::new(4, 3), + Point::new(6, 0)..Point::new(6, 3), ], ), ( text, vec![ - Point::new(4, 0)..Point::new(4, 3), - Point::new(5, 0)..Point::new(5, 3), + Point::new(8, 0)..Point::new(8, 3), + Point::new(10, 0)..Point::new(10, 3), ], ), ], cx, - ) + ); + assert_eq!(multibuffer.read(cx).excerpt_ids().len(), 6); + multibuffer }); let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); let buffer_ids = buffer_snapshot @@ -3462,16 +3478,16 @@ mod tests { Some(0), None, None, - Some(1), - None, Some(2), None, - Some(3), + Some(4), None, + Some(6), None, - Some(4), None, - Some(5), + Some(8), + None, + Some(10), ] ); @@ -3533,19 +3549,19 @@ mod tests { None, None, None, - Some(1), + Some(2), None, None, - Some(2), + Some(4), None, - Some(3), + Some(6), None, None, None, None, - Some(4), + Some(8), None, - Some(5), + Some(10), None, ] ); @@ -3601,19 +3617,19 @@ mod tests { None, None, None, - Some(1), + Some(2), None, None, - Some(2), + Some(4), None, - Some(3), + Some(6), None, None, None, None, - Some(4), + Some(8), None, - Some(5), + Some(10), None, ] ); @@ -3664,9 +3680,9 @@ mod tests { None, None, None, - Some(4), + Some(8), None, - Some(5), + Some(10), None, ] ); @@ -3720,9 +3736,9 @@ mod tests { None, None, None, - Some(4), + Some(8), None, - Some(5), + Some(10), None, ] ); @@ -4590,9 +4606,10 @@ mod tests { let lhs_multibuffer = cx.new(|cx| { let mut mb = MultiBuffer::new(Capability::ReadWrite); - mb.push_excerpts( + mb.set_excerpts_for_buffer( lhs_buffer.clone(), - [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)], + [Point::zero()..lhs_buffer.read(cx).max_point()], + 0, cx, ); mb.add_inverted_diff(diff.clone(), rhs_buffer.clone(), cx); @@ -4600,9 +4617,10 @@ mod tests { }); let rhs_multibuffer = cx.new(|cx| { let mut mb = MultiBuffer::new(Capability::ReadWrite); - mb.push_excerpts( + mb.set_excerpts_for_buffer( rhs_buffer.clone(), - [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)], + [Point::zero()..rhs_buffer.read(cx).max_point()], + 0, cx, ); mb.add_diff(diff.clone(), cx); diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index 8bc66123c41a1e7af4e2814bc79ea80c97219f14..122ca6f698115c2f5e6c194246f6a378825e5675 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -17,7 +17,7 @@ use multi_buffer::{ }; use project::InlayId; use std::{ - cmp, + cmp, iter, ops::{Add, AddAssign, Range, Sub, SubAssign}, sync::Arc, }; @@ -546,8 +546,11 @@ impl InlayMap { pub fn new(buffer: MultiBufferSnapshot) -> (Self, InlaySnapshot) { let version = 0; let snapshot = InlaySnapshot { - buffer: buffer.clone(), - transforms: SumTree::from_iter(Some(Transform::Isomorphic(buffer.text_summary())), ()), + transforms: SumTree::from_iter( + iter::once(Transform::Isomorphic(buffer.text_summary())), + (), + ), + buffer, version, }; @@ -745,7 +748,7 @@ impl InlayMap { } #[ztracing::instrument(skip_all)] - pub fn current_inlays(&self) -> impl Iterator { + pub fn current_inlays(&self) -> impl Iterator + Default { self.inlays.iter() } @@ -1328,9 +1331,10 @@ mod tests { use super::*; use crate::{ MultiBuffer, - display_map::{HighlightKey, InlayHighlights, TextHighlights}, + display_map::{HighlightKey, InlayHighlights}, hover_links::InlayHighlight, }; + use collections::HashMap; use gpui::{App, HighlightStyle}; use multi_buffer::Anchor; use project::{InlayHint, InlayHintLabel, ResolveState}; @@ -1897,7 +1901,7 @@ mod tests { ); } - let mut text_highlights = TextHighlights::default(); + let mut text_highlights = HashMap::default(); let text_highlight_count = rng.random_range(0_usize..10); let mut text_highlight_ranges = (0..text_highlight_count) .map(|_| buffer_snapshot.random_byte_range(MultiBufferOffset(0), &mut rng)) @@ -1917,6 +1921,7 @@ mod tests { .collect(), )), ); + let text_highlights = Arc::new(text_highlights); let mut inlay_highlights = InlayHighlights::default(); if !inlays.is_empty() { diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 5ff3979e7da848ddba98f5b6f8d1ea26ad990a81..187ed8614e01ddb8dcdae930fd484de9594cf63f 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -44,121 +44,146 @@ impl TabMap { mut fold_edits: Vec, tab_size: NonZeroU32, ) -> (TabSnapshot, Vec) { - let old_snapshot = &mut self.0; - let mut new_snapshot = TabSnapshot { - fold_snapshot, - tab_size: tab_size.min(MAX_TABS), - max_expansion_column: old_snapshot.max_expansion_column, - version: old_snapshot.version, - }; - - if old_snapshot.fold_snapshot.version != new_snapshot.fold_snapshot.version { - new_snapshot.version += 1; + let tab_size = tab_size.min(MAX_TABS); + + if self.0.tab_size != tab_size { + let old_max_point = self.0.max_point(); + self.0.version += 1; + self.0.fold_snapshot = fold_snapshot; + self.0.tab_size = tab_size; + return ( + self.0.clone(), + vec![TabEdit { + old: TabPoint::zero()..old_max_point, + new: TabPoint::zero()..self.0.max_point(), + }], + ); } - let tab_edits = if old_snapshot.tab_size == new_snapshot.tab_size { - // Expand each edit to include the next tab on the same line as the edit, - // and any subsequent tabs on that line that moved across the tab expansion - // boundary. - for fold_edit in &mut fold_edits { - let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot); - let old_end_row_successor_offset = cmp::min( - FoldPoint::new(old_end.row() + 1, 0), - old_snapshot.fold_snapshot.max_point(), - ) - .to_offset(&old_snapshot.fold_snapshot); - let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot); + let old_snapshot = &mut self.0; + let mut new_version = old_snapshot.version; + if old_snapshot.fold_snapshot.version != fold_snapshot.version { + new_version += 1; + } - let mut offset_from_edit = 0; - let mut first_tab_offset = None; - let mut last_tab_with_changed_expansion_offset = None; - 'outer: for chunk in old_snapshot.fold_snapshot.chunks( - fold_edit.old.end..old_end_row_successor_offset, - false, - Highlights::default(), - ) { - let mut remaining_tabs = chunk.tabs; - while remaining_tabs != 0 { - let ix = remaining_tabs.trailing_zeros(); - let offset_from_edit = offset_from_edit + ix; - if first_tab_offset.is_none() { - first_tab_offset = Some(offset_from_edit); - } - - let old_column = old_end.column() + offset_from_edit; - let new_column = new_end.column() + offset_from_edit; - let was_expanded = old_column < old_snapshot.max_expansion_column; - let is_expanded = new_column < new_snapshot.max_expansion_column; - if was_expanded != is_expanded { - last_tab_with_changed_expansion_offset = Some(offset_from_edit); - } else if !was_expanded && !is_expanded { - break 'outer; - } - - remaining_tabs &= remaining_tabs - 1; + if fold_edits.is_empty() { + old_snapshot.version = new_version; + old_snapshot.fold_snapshot = fold_snapshot; + old_snapshot.tab_size = tab_size; + return (old_snapshot.clone(), vec![]); + } + // Expand each edit to include the next tab on the same line as the edit, + // and any subsequent tabs on that line that moved across the tab expansion + // boundary. + // + // This is necessary because a tab's display width depends on its column + // position: it expands to fill up to the next tab stop. When an edit + // shifts text on a line, any tab character after the edit may now render + // at a different width even though the tab byte itself wasn't touched. + // Additionally, tabs beyond `max_expansion_column` are rendered as a + // single space instead of expanding to the next tab stop. An edit that + // shifts a tab across that boundary changes its display width, so the + // edit must cover it. We scan forward from the edit end to the end of + // the line, extending the edit to include the first subsequent tab (whose + // rendered width may have changed) and the last tab that crossed the + // expansion boundary (transitioning between expanded and non-expanded). + for fold_edit in &mut fold_edits { + let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot); + let old_end_row_successor_offset = cmp::min( + FoldPoint::new(old_end.row() + 1, 0), + old_snapshot.fold_snapshot.max_point(), + ) + .to_offset(&old_snapshot.fold_snapshot); + let new_end = fold_edit.new.end.to_point(&fold_snapshot); + + let mut offset_from_edit = 0; + let mut first_tab_offset = None; + let mut last_tab_with_changed_expansion_offset = None; + 'outer: for chunk in old_snapshot.fold_snapshot.chunks( + fold_edit.old.end..old_end_row_successor_offset, + false, + Highlights::default(), + ) { + let mut remaining_tabs = chunk.tabs; + while remaining_tabs != 0 { + let ix = remaining_tabs.trailing_zeros(); + let offset_from_edit = offset_from_edit + ix; + if first_tab_offset.is_none() { + first_tab_offset = Some(offset_from_edit); } - offset_from_edit += chunk.text.len() as u32; - if old_end.column() + offset_from_edit >= old_snapshot.max_expansion_column - && new_end.column() + offset_from_edit >= new_snapshot.max_expansion_column - { - break; + let old_column = old_end.column() + offset_from_edit; + let new_column = new_end.column() + offset_from_edit; + let was_expanded = old_column < old_snapshot.max_expansion_column; + let is_expanded = new_column < old_snapshot.max_expansion_column; + if was_expanded != is_expanded { + last_tab_with_changed_expansion_offset = Some(offset_from_edit); + } else if !was_expanded && !is_expanded { + break 'outer; } + + remaining_tabs &= remaining_tabs - 1; } - if let Some(offset) = last_tab_with_changed_expansion_offset.or(first_tab_offset) { - fold_edit.old.end.0 += offset as usize + 1; - fold_edit.new.end.0 += offset as usize + 1; + offset_from_edit += chunk.text.len() as u32; + if old_end.column() + offset_from_edit >= old_snapshot.max_expansion_column + && new_end.column() + offset_from_edit >= old_snapshot.max_expansion_column + { + break; } } - let _old_alloc_ptr = fold_edits.as_ptr(); - // Combine any edits that overlap due to the expansion. - let mut fold_edits = fold_edits.into_iter(); - if let Some(mut first_edit) = fold_edits.next() { - // This code relies on reusing allocations from the Vec<_> - at the time of writing .flatten() prevents them. - #[allow(clippy::filter_map_identity)] - let mut v: Vec<_> = fold_edits - .scan(&mut first_edit, |state, edit| { - if state.old.end >= edit.old.start { - state.old.end = edit.old.end; - state.new.end = edit.new.end; - Some(None) // Skip this edit, it's merged - } else { - let new_state = edit; - let result = Some(Some(state.clone())); // Yield the previous edit - **state = new_state; - result - } - }) - .filter_map(|x| x) - .collect(); - v.push(first_edit); - debug_assert_eq!(v.as_ptr(), _old_alloc_ptr, "Fold edits were reallocated"); - v.into_iter() - .map(|fold_edit| { - let old_start = fold_edit.old.start.to_point(&old_snapshot.fold_snapshot); - let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot); - let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot); - let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot); - TabEdit { - old: old_snapshot.fold_point_to_tab_point(old_start) - ..old_snapshot.fold_point_to_tab_point(old_end), - new: new_snapshot.fold_point_to_tab_point(new_start) - ..new_snapshot.fold_point_to_tab_point(new_end), - } - }) - .collect() - } else { - vec![] + if let Some(offset) = last_tab_with_changed_expansion_offset.or(first_tab_offset) { + fold_edit.old.end.0 += offset as usize + 1; + fold_edit.new.end.0 += offset as usize + 1; } - } else { - new_snapshot.version += 1; - vec![TabEdit { - old: TabPoint::zero()..old_snapshot.max_point(), - new: TabPoint::zero()..new_snapshot.max_point(), - }] + } + + let new_snapshot = TabSnapshot { + fold_snapshot, + tab_size, + max_expansion_column: old_snapshot.max_expansion_column, + version: new_version, }; + + let _old_alloc_ptr = fold_edits.as_ptr(); + // Combine any edits that overlap due to the expansion. + let mut fold_edits = fold_edits.into_iter(); + let mut first_edit = fold_edits.next().unwrap(); + // This code relies on reusing allocations from the Vec<_> - at the time of writing .flatten() prevents them. + #[allow(clippy::filter_map_identity)] + let mut v: Vec<_> = fold_edits + .scan(&mut first_edit, |state, edit| { + if state.old.end >= edit.old.start { + state.old.end = edit.old.end; + state.new.end = edit.new.end; + Some(None) // Skip this edit, it's merged + } else { + let new_state = edit; + let result = Some(Some(state.clone())); // Yield the previous edit + **state = new_state; + result + } + }) + .filter_map(|x| x) + .collect(); + v.push(first_edit); + debug_assert_eq!(v.as_ptr(), _old_alloc_ptr, "Fold edits were reallocated"); + let tab_edits = v + .into_iter() + .map(|fold_edit| { + let old_start = fold_edit.old.start.to_point(&old_snapshot.fold_snapshot); + let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot); + let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot); + let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot); + TabEdit { + old: old_snapshot.fold_point_to_tab_point(old_start) + ..old_snapshot.fold_point_to_tab_point(old_end), + new: new_snapshot.fold_point_to_tab_point(new_start) + ..new_snapshot.fold_point_to_tab_point(new_end), + } + }) + .collect(); *old_snapshot = new_snapshot; (old_snapshot.clone(), tab_edits) } @@ -168,6 +193,8 @@ impl TabMap { pub struct TabSnapshot { pub fold_snapshot: FoldSnapshot, pub tab_size: NonZeroU32, + /// The maximum column up to which a tab can expand. + /// Any tab after this column will not expand. pub max_expansion_column: u32, pub version: usize, } @@ -365,10 +392,11 @@ impl TabSnapshot { } #[ztracing::instrument(skip_all)] - fn expand_tabs<'a, I>(&self, mut cursor: TabStopCursor<'a, I>, column: u32) -> u32 - where - I: Iterator>, - { + fn expand_tabs<'a>(&self, mut cursor: TabStopCursor<'a>, column: u32) -> u32 { + // we only ever act on a single row at a time + // so the main difference is that other layers build a transform sumtree, and can then just run through that + // we cant quite do this here, as we need to work with the previous layer chunk to understand the tabs of the corresponding row + // we can still do forward searches for this though, we search for a row, then traverse the column up to where we need to be let tab_size = self.tab_size.get(); let end_column = column.min(self.max_expansion_column); @@ -376,7 +404,7 @@ impl TabSnapshot { let mut tab_count = 0; let mut expanded_tab_len = 0; - while let Some(tab_stop) = cursor.seek(seek_target) { + while let Some(tab_stop) = cursor.seek_forward(seek_target) { let expanded_chars_old = tab_stop.char_offset + expanded_tab_len - tab_count; let tab_len = tab_size - ((expanded_chars_old - 1) % tab_size); tab_count += 1; @@ -399,22 +427,19 @@ impl TabSnapshot { } #[ztracing::instrument(skip_all)] - fn collapse_tabs<'a, I>( + fn collapse_tabs<'a>( &self, - mut cursor: TabStopCursor<'a, I>, + mut cursor: TabStopCursor<'a>, column: u32, bias: Bias, - ) -> (u32, u32, u32) - where - I: Iterator>, - { + ) -> (u32, u32, u32) { let tab_size = self.tab_size.get(); let mut collapsed_column = column; let mut seek_target = column.min(self.max_expansion_column); let mut tab_count = 0; let mut expanded_tab_len = 0; - while let Some(tab_stop) = cursor.seek(seek_target) { + while let Some(tab_stop) = cursor.seek_forward(seek_target) { // Calculate how much we want to expand this tab stop (into spaces) let expanded_chars_old = tab_stop.char_offset + expanded_tab_len - tab_count; let tab_len = tab_size - ((expanded_chars_old - 1) % tab_size); @@ -617,13 +642,7 @@ impl<'a> Iterator for TabChunks<'a> { } } - let first_tab_ix = if self.chunk.tabs != 0 { - self.chunk.tabs.trailing_zeros() as usize - } else { - self.chunk.text.len() - }; - - if first_tab_ix == 0 { + if self.chunk.tabs & 1 != 0 { self.chunk.text = &self.chunk.text[1..]; self.chunk.tabs >>= 1; self.chunk.chars >>= 1; @@ -654,12 +673,46 @@ impl<'a> Iterator for TabChunks<'a> { }); } - let prefix_len = first_tab_ix; + // Fast path: no tabs in the remaining chunk, return it directly + if self.chunk.tabs == 0 { + let chunk = self.chunk.clone(); + self.chunk.text = ""; + self.chunk.tabs = 0; + self.chunk.chars = 0; + self.chunk.newlines = 0; + let chunk_len = chunk.text.len() as u32; + + let newline_count = chunk.newlines.count_ones(); + if newline_count > 0 { + let last_newline_bit = 128 - chunk.newlines.leading_zeros(); + let chars_after_last_newline = + chunk.chars.unbounded_shr(last_newline_bit).count_ones(); + let bytes_after_last_newline = chunk_len - last_newline_bit; + + self.column = chars_after_last_newline; + self.input_column = bytes_after_last_newline; + self.output_position = Point::new( + self.output_position.row + newline_count, + bytes_after_last_newline, + ); + } else { + let char_count = chunk.chars.count_ones(); + self.column += char_count; + if !self.inside_leading_tab { + self.input_column += chunk_len; + } + self.output_position.column += chunk_len; + } + + return Some(chunk); + } + + // Split at the next tab position + let prefix_len = self.chunk.tabs.trailing_zeros() as usize; let (prefix, suffix) = self.chunk.text.split_at(prefix_len); let mask = 1u128.unbounded_shl(prefix_len as u32).wrapping_sub(1); let prefix_chars = self.chunk.chars & mask; - let prefix_tabs = self.chunk.tabs & mask; let prefix_newlines = self.chunk.newlines & mask; self.chunk.text = suffix; @@ -692,13 +745,156 @@ impl<'a> Iterator for TabChunks<'a> { Some(Chunk { text: prefix, chars: prefix_chars, - tabs: prefix_tabs, + tabs: 0, newlines: prefix_newlines, ..self.chunk.clone() }) } } +struct TabStopCursor<'a> { + chunks: FoldChunks<'a>, + byte_offset: u32, + char_offset: u32, + /// Chunk + /// last tab position iterated through + current_chunk: Option<(TabStopChunk<'a>, u32)>, +} + +struct TabStopChunk<'a> { + chars: u128, + text: &'a str, + tabs: u128, +} + +impl<'a> TabStopCursor<'a> { + fn new(chunks: FoldChunks<'a>) -> Self { + Self { + chunks, + byte_offset: 0, + char_offset: 0, + current_chunk: None, + } + } + + fn bytes_until_next_char(&self) -> Option { + self.current_chunk.as_ref().map(|&(ref chunk, idx)| { + let higher_chars = chunk.chars.unbounded_shr(idx + 1); + + if higher_chars != 0 { + higher_chars.trailing_zeros() as usize + 1 + } else { + chunk.text.len() - idx as usize + } + }) + } + + fn is_char_boundary(&self) -> bool { + self.current_chunk + .as_ref() + .is_some_and(|&(ref chunk, idx)| { + (1u128.unbounded_shl(idx) & chunk.chars) != 0 || idx as usize == chunk.text.len() + }) + } + + /// distance: length to move forward while searching for the next tab stop + #[ztracing::instrument(skip_all)] + fn seek_forward(&mut self, distance: u32) -> Option { + if distance == 0 { + return None; + } + + let mut distance_remaining = distance; + + while let Some((mut chunk, chunk_position)) = self.current_chunk.take().or_else(|| { + self.chunks.next().map(|chunk| { + ( + TabStopChunk { + chars: chunk.chars, + text: chunk.text, + tabs: chunk.tabs, + }, + 0, + ) + }) + }) { + let chunk_len = chunk.text.len() as u32; + + if chunk.tabs == 0 { + let chunk_remaining = chunk_len - chunk_position; + if chunk_remaining >= distance_remaining { + let end = chunk_position + distance_remaining; + self.byte_offset += distance_remaining; + self.char_offset += + count_chars_in_byte_range(chunk_position..(end - 1), chunk.chars); + if end < 128 { + self.current_chunk = Some((chunk, end)); + } + return None; + } + + self.byte_offset += chunk_remaining; + self.char_offset += + count_chars_in_byte_range(chunk_position..(chunk_len - 1), chunk.chars); + distance_remaining -= chunk_remaining; + continue; + } + + let tab_end = chunk.tabs.trailing_zeros() + 1; + let bytes_to_tab = tab_end - chunk_position; + + if bytes_to_tab > distance_remaining { + let end = chunk_position + distance_remaining; + self.byte_offset += distance_remaining; + self.char_offset += + count_chars_in_byte_range(chunk_position..(end - 1), chunk.chars); + self.current_chunk = Some((chunk, end)); + return None; + } + + self.byte_offset += bytes_to_tab; + self.char_offset += + count_chars_in_byte_range(chunk_position..(tab_end - 1), chunk.chars); + + let tabstop = TabStop { + char_offset: self.char_offset, + byte_offset: self.byte_offset, + }; + + chunk.tabs = (chunk.tabs - 1) & chunk.tabs; + + if tab_end != chunk_len { + self.current_chunk = Some((chunk, tab_end)); + } + + return Some(tabstop); + } + + None + } + + fn byte_offset(&self) -> u32 { + self.byte_offset + } + + fn char_offset(&self) -> u32 { + self.char_offset + } +} + +#[inline(always)] +fn count_chars_in_byte_range(range: Range, bitmap: u128) -> u32 { + let low_mask = u128::MAX << range.start; + let high_mask = u128::MAX >> (127 - range.end); + (bitmap & low_mask & high_mask).count_ones() +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +struct TabStop { + char_offset: u32, + byte_offset: u32, +} + #[cfg(test)] mod tests { use std::mem; @@ -814,40 +1010,21 @@ mod tests { #[gpui::test] fn test_expand_tabs(cx: &mut gpui::App) { - let test_values = [ - ("κg🏀 f\nwo🏀❌by🍐❎β🍗c\tβ❎ \ncλ🎉", 17), - (" \twςe", 4), - ("fε", 1), - ("i❎\t", 3), - ]; - let buffer = MultiBuffer::build_simple("", cx); + let input = "A\tBC\tDEF\tG\tHI\tJ\tK\tL\tM"; + + let buffer = MultiBuffer::build_simple(input, cx); let buffer_snapshot = buffer.read(cx).snapshot(cx); let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); - for (text, column) in test_values { - let mut tabs = 0u128; - let mut chars = 0u128; - for (idx, c) in text.char_indices() { - if c == '\t' { - tabs |= 1 << idx; - } - chars |= 1 << idx; - } - - let chunks = [Chunk { - text, - tabs, - chars, - ..Default::default() - }]; - - let cursor = TabStopCursor::new(chunks); + for (ix, _) in input.char_indices() { + let fold_point = FoldPoint::new(0, ix as u32); assert_eq!( - tab_snapshot.expected_expand_tabs(text.chars(), column), - tab_snapshot.expand_tabs(cursor, column) + tab_snapshot.expected_to_tab_point(fold_point), + tab_snapshot.fold_point_to_tab_point(fold_point), + "Failed with fold_point at column {ix}" ); } } @@ -1263,7 +1440,7 @@ mod tests { Default::default(), ); let mut cursor = TabStopCursor::new(chunks); - assert!(cursor.seek(0).is_none()); + assert!(cursor.seek_forward(0).is_none()); let mut tab_stops = Vec::new(); let mut all_tab_stops = Vec::new(); @@ -1279,7 +1456,7 @@ mod tests { } } - while let Some(tab_stop) = cursor.seek(u32::MAX) { + while let Some(tab_stop) = cursor.seek_forward(u32::MAX) { tab_stops.push(tab_stop); } pretty_assertions::assert_eq!(tab_stops.as_slice(), all_tab_stops.as_slice(),); @@ -1314,7 +1491,7 @@ mod tests { } } - while let Some(tab_stop) = cursor.seek(u32::MAX) { + while let Some(tab_stop) = cursor.seek_forward(u32::MAX) { actual_tab_stops.push(tab_stop); } pretty_assertions::assert_eq!(actual_tab_stops.as_slice(), expected_tab_stops.as_slice(),); @@ -1379,7 +1556,7 @@ mod tests { let mut found_tab_stops = Vec::new(); let mut position = distance; - while let Some(tab_stop) = cursor.seek(position) { + while let Some(tab_stop) = cursor.seek_forward(position) { found_tab_stops.push(tab_stop); position = distance - tab_stop.byte_offset; } @@ -1425,7 +1602,7 @@ mod tests { Default::default(), ); let mut cursor = TabStopCursor::new(chunks); - assert!(cursor.seek(0).is_none()); + assert!(cursor.seek_forward(0).is_none()); let mut expected_tab_stops = Vec::new(); let mut byte_offset = 0; @@ -1441,7 +1618,7 @@ mod tests { } let mut actual_tab_stops = Vec::new(); - while let Some(tab_stop) = cursor.seek(u32::MAX) { + while let Some(tab_stop) = cursor.seek_forward(u32::MAX) { actual_tab_stops.push(tab_stop); } @@ -1487,7 +1664,7 @@ mod tests { let mut found_tab_stops = Vec::new(); let mut position = distance; - while let Some(tab_stop) = cursor.seek(position) { + while let Some(tab_stop) = cursor.seek_forward(position) { found_tab_stops.push(tab_stop); position = distance - tab_stop.byte_offset; } @@ -1520,165 +1697,3 @@ mod tests { } } } - -struct TabStopCursor<'a, I> -where - I: Iterator>, -{ - chunks: I, - byte_offset: u32, - char_offset: u32, - /// Chunk - /// last tab position iterated through - current_chunk: Option<(Chunk<'a>, u32)>, -} - -impl<'a, I> TabStopCursor<'a, I> -where - I: Iterator>, -{ - #[ztracing::instrument(skip_all)] - fn new(chunks: impl IntoIterator, IntoIter = I>) -> Self { - Self { - chunks: chunks.into_iter(), - byte_offset: 0, - char_offset: 0, - current_chunk: None, - } - } - - #[ztracing::instrument(skip_all)] - fn bytes_until_next_char(&self) -> Option { - self.current_chunk.as_ref().and_then(|(chunk, idx)| { - let mut idx = *idx; - let mut diff = 0; - while idx > 0 && chunk.chars & (1u128.unbounded_shl(idx)) == 0 { - idx -= 1; - diff += 1; - } - - if chunk.chars & (1 << idx) != 0 { - Some( - (chunk.text[idx as usize..].chars().next()?) - .len_utf8() - .saturating_sub(diff), - ) - } else { - None - } - }) - } - - #[ztracing::instrument(skip_all)] - fn is_char_boundary(&self) -> bool { - self.current_chunk - .as_ref() - .is_some_and(|(chunk, idx)| (chunk.chars & 1u128.unbounded_shl(*idx)) != 0) - } - - /// distance: length to move forward while searching for the next tab stop - #[ztracing::instrument(skip_all)] - fn seek(&mut self, distance: u32) -> Option { - if distance == 0 { - return None; - } - - let mut distance_traversed = 0; - - while let Some((mut chunk, chunk_position)) = self - .current_chunk - .take() - .or_else(|| self.chunks.next().zip(Some(0))) - { - if chunk.tabs == 0 { - let chunk_distance = chunk.text.len() as u32 - chunk_position; - if chunk_distance + distance_traversed >= distance { - let overshoot = distance_traversed.abs_diff(distance); - - self.byte_offset += overshoot; - self.char_offset += get_char_offset( - chunk_position..(chunk_position + overshoot).saturating_sub(1), - chunk.chars, - ); - - if chunk_position + overshoot < 128 { - self.current_chunk = Some((chunk, chunk_position + overshoot)); - } - - return None; - } - - self.byte_offset += chunk_distance; - self.char_offset += get_char_offset( - chunk_position..(chunk_position + chunk_distance).saturating_sub(1), - chunk.chars, - ); - distance_traversed += chunk_distance; - continue; - } - let tab_position = chunk.tabs.trailing_zeros() + 1; - - if distance_traversed + tab_position - chunk_position > distance { - let cursor_position = distance_traversed.abs_diff(distance); - - self.char_offset += get_char_offset( - chunk_position..(chunk_position + cursor_position - 1), - chunk.chars, - ); - self.current_chunk = Some((chunk, cursor_position + chunk_position)); - self.byte_offset += cursor_position; - - return None; - } - - self.byte_offset += tab_position - chunk_position; - self.char_offset += get_char_offset(chunk_position..(tab_position - 1), chunk.chars); - - let tabstop = TabStop { - char_offset: self.char_offset, - byte_offset: self.byte_offset, - }; - - chunk.tabs = (chunk.tabs - 1) & chunk.tabs; - - if tab_position as usize != chunk.text.len() { - self.current_chunk = Some((chunk, tab_position)); - } - - return Some(tabstop); - } - - None - } - - fn byte_offset(&self) -> u32 { - self.byte_offset - } - - fn char_offset(&self) -> u32 { - self.char_offset - } -} - -#[inline(always)] -fn get_char_offset(range: Range, bit_map: u128) -> u32 { - if range.start == range.end { - return if (1u128 << range.start) & bit_map == 0 { - 0 - } else { - 1 - }; - } - let end_shift: u128 = 127u128 - range.end as u128; - let mut bit_mask = (u128::MAX >> range.start) << range.start; - bit_mask = (bit_mask << end_shift) >> end_shift; - let bit_map = bit_map & bit_mask; - - bit_map.count_ones() -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -struct TabStop { - char_offset: u32, - byte_offset: u32, -} diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 3e734fdf1ab8254807a65c96bb98a0f804bc4dc4..94c7bb06eb98f56e05ff96bd3b64d96d2397730b 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -237,6 +237,7 @@ use crate::{ }; pub const FILE_HEADER_HEIGHT: u32 = 2; +pub const BUFFER_HEADER_PADDING: Rems = rems(0.25); pub const MULTI_BUFFER_EXCERPT_HEADER_HEIGHT: u32 = 1; const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500); const MAX_LINE_LEN: usize = 1024; @@ -1346,7 +1347,7 @@ pub struct Editor { suppress_selection_callback: bool, applicable_language_settings: HashMap, LanguageSettings>, accent_data: Option, - fetched_tree_sitter_chunks: HashMap>>, + bracket_fetched_tree_sitter_chunks: HashMap>>, semantic_token_state: SemanticTokenState, pub(crate) refresh_matching_bracket_highlights_task: Task<()>, refresh_document_symbols_task: Shared>, @@ -1355,6 +1356,7 @@ pub struct Editor { outline_symbols_at_cursor: Option<(BufferId, Vec>)>, sticky_headers_task: Task<()>, sticky_headers: Option>>, + pub(crate) colorize_brackets_task: Task<()>, } #[derive(Debug, PartialEq)] @@ -1971,6 +1973,8 @@ impl Editor { .clone_state(&self.scroll_manager, &my_snapshot, &clone_snapshot, cx); clone.searchable = self.searchable; clone.read_only = self.read_only; + clone.buffers_with_disabled_indent_guides = + self.buffers_with_disabled_indent_guides.clone(); clone } @@ -2397,7 +2401,9 @@ impl Editor { diagnostics_max_severity, hard_wrap: None, completion_provider: project.clone().map(|project| Rc::new(project) as _), - semantics_provider: project.clone().map(|project| Rc::new(project) as _), + semantics_provider: project + .as_ref() + .map(|project| Rc::new(project.downgrade()) as _), collaboration_hub: project.clone().map(|project| Box::new(project) as _), project, blink_manager: blink_manager.clone(), @@ -2597,7 +2603,7 @@ impl Editor { applicable_language_settings: HashMap::default(), semantic_token_state: SemanticTokenState::new(cx, full_mode), accent_data: None, - fetched_tree_sitter_chunks: HashMap::default(), + bracket_fetched_tree_sitter_chunks: HashMap::default(), number_deleted_lines: false, refresh_matching_bracket_highlights_task: Task::ready(()), refresh_document_symbols_task: Task::ready(()).shared(), @@ -2606,6 +2612,7 @@ impl Editor { outline_symbols_at_cursor: None, sticky_headers_task: Task::ready(()), sticky_headers: None, + colorize_brackets_task: Task::ready(()), }; if is_minimap { @@ -3625,7 +3632,7 @@ impl Editor { self.refresh_document_highlights(cx); refresh_linked_ranges(self, window, cx); - self.refresh_selected_text_highlights(false, window, cx); + self.refresh_selected_text_highlights(&display_map, false, window, cx); self.refresh_matching_bracket_highlights(&display_map, cx); self.refresh_outline_symbols_at_cursor(cx); self.update_visible_edit_prediction(window, cx); @@ -4893,8 +4900,10 @@ impl Editor { .scope_context(Some(CharScopeContext::LinkedEdit)); classifier.is_word(char) }); + let is_dot = text.as_ref() == "."; + let should_apply_linked_edit = is_word_char || is_dot; - if is_word_char { + if should_apply_linked_edit { let anchor_range = start_anchor.text_anchor..anchor.text_anchor; linked_edits.push(&self, anchor_range, text.clone(), cx); } else { @@ -5079,6 +5088,10 @@ impl Editor { } pub fn newline(&mut self, _: &Newline, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } + self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { let (edits_with_flags, selection_info): (Vec<_>, Vec<_>) = { @@ -5207,29 +5220,48 @@ impl Editor { extra_line_additional_indent, prevent_auto_indent, } => { + let auto_indent_mode = + buffer.language_settings_at(start, cx).auto_indent; + let preserve_indent = + auto_indent_mode != language::AutoIndentMode::None; + let apply_syntax_indent = + auto_indent_mode == language::AutoIndentMode::SyntaxAware; let capacity_for_delimiter = delimiter.as_deref().map(str::len).unwrap_or_default(); + let existing_indent_len = if preserve_indent { + existing_indent.len as usize + } else { + 0 + }; let extra_line_len = extra_line_additional_indent - .map(|i| 1 + existing_indent.len as usize + i.len as usize) + .map(|i| 1 + existing_indent_len + i.len as usize) .unwrap_or(0); let mut new_text = String::with_capacity( 1 + capacity_for_delimiter - + existing_indent.len as usize + + existing_indent_len + additional_indent.len as usize + extra_line_len, ); new_text.push('\n'); - new_text.extend(existing_indent.chars()); + if preserve_indent { + new_text.extend(existing_indent.chars()); + } new_text.extend(additional_indent.chars()); if let Some(delimiter) = &delimiter { new_text.push_str(delimiter); } if let Some(extra_indent) = extra_line_additional_indent { new_text.push('\n'); - new_text.extend(existing_indent.chars()); + if preserve_indent { + new_text.extend(existing_indent.chars()); + } new_text.extend(extra_indent.chars()); } - (start, new_text, *prevent_auto_indent) + ( + start, + new_text, + *prevent_auto_indent || !apply_syntax_indent, + ) } }; @@ -5281,6 +5313,10 @@ impl Editor { } pub fn newline_above(&mut self, _: &NewlineAbove, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } + self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let buffer = self.buffer.read(cx); @@ -5348,6 +5384,10 @@ impl Editor { } pub fn newline_below(&mut self, _: &NewlineBelow, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } + self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); let mut buffer_edits: HashMap, Vec)> = HashMap::default(); @@ -7499,7 +7539,7 @@ impl Editor { fn prepare_highlight_query_from_selection( &mut self, - window: &Window, + snapshot: &DisplaySnapshot, cx: &mut Context, ) -> Option<(String, Range)> { if matches!(self.mode, EditorMode::SingleLine) { @@ -7511,7 +7551,6 @@ impl Editor { if self.selections.count() != 1 || self.selections.line_mode() { return None; } - let snapshot = self.snapshot(window, cx); let selection = self.selections.newest::(&snapshot); // If the selection spans multiple rows OR it is empty if selection.start.row != selection.end.row @@ -7533,6 +7572,7 @@ impl Editor { #[ztracing::instrument(skip_all)] fn update_selection_occurrence_highlights( &mut self, + multi_buffer_snapshot: MultiBufferSnapshot, query_text: String, query_range: Range, multi_buffer_range_to_query: Range, @@ -7540,7 +7580,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Task<()> { - let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); cx.spawn_in(window, async move |editor, cx| { if use_debounce { cx.background_executor() @@ -7556,7 +7595,7 @@ impl Editor { .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()); let mut match_ranges = Vec::new(); let Ok(regex) = project::search::SearchQuery::text( - query_text.clone(), + query_text, false, false, false, @@ -7718,12 +7757,13 @@ impl Editor { #[ztracing::instrument(skip_all)] fn refresh_selected_text_highlights( &mut self, + snapshot: &DisplaySnapshot, on_buffer_edit: bool, window: &mut Window, cx: &mut Context, ) { let Some((query_text, query_range)) = - self.prepare_highlight_query_from_selection(window, cx) + self.prepare_highlight_query_from_selection(snapshot, cx) else { self.clear_background_highlights(HighlightKey::SelectedTextHighlight, cx); self.quick_selection_highlight_task.take(); @@ -7755,6 +7795,7 @@ impl Editor { self.quick_selection_highlight_task = Some(( query_range.clone(), self.update_selection_occurrence_highlights( + snapshot.buffer.clone(), query_text.clone(), query_range.clone(), multi_buffer_visible_range, @@ -7780,6 +7821,7 @@ impl Editor { self.debounced_selection_highlight_task = Some(( query_range.clone(), self.update_selection_occurrence_highlights( + snapshot.buffer.clone(), query_text, query_range, multi_buffer_full_range, @@ -9888,7 +9930,14 @@ impl Editor { origin.x -= BORDER_WIDTH; - window.defer_draw(element, origin, 1); + window.with_content_mask( + Some(gpui::ContentMask { + bounds: *text_bounds, + }), + |window| { + window.defer_draw(element, origin, 1, Some(window.content_mask())); + }, + ); // Do not return an element, since it will already be drawn due to defer_draw. None @@ -11307,6 +11356,15 @@ impl Editor { // would do nothing for single line selections individual cursors. let end = if selection.start.row == selection.end.row { MultiBufferRow(selection.start.row + 1) + } else if selection.end.column == 0 { + // If the selection ends at the start of a line, it's logically at the end of the + // previous line (plus its newline). + // Don't include the end line unless there's only one line selected. + if selection.start.row + 1 == selection.end.row { + MultiBufferRow(selection.end.row) + } else { + MultiBufferRow(selection.end.row - 1) + } } else { MultiBufferRow(selection.end.row) }; @@ -13664,94 +13722,94 @@ impl Editor { let selections = self.selections.all::(&self.display_snapshot(cx)); let buffer = self.buffer.read(cx).read(cx); let mut text = String::new(); - let mut clipboard_selections = Vec::with_capacity(selections.len()); - { - let max_point = buffer.max_point(); - let mut is_first = true; - let mut prev_selection_was_entire_line = false; - for selection in &selections { - let mut start = selection.start; - let mut end = selection.end; - let is_entire_line = selection.is_empty() || self.selections.line_mode(); - let mut add_trailing_newline = false; - if is_entire_line { - start = Point::new(start.row, 0); - let next_line_start = Point::new(end.row + 1, 0); - if next_line_start <= max_point { - end = next_line_start; - } else { - // We're on the last line without a trailing newline. - // Copy to the end of the line and add a newline afterwards. - end = Point::new(end.row, buffer.line_len(MultiBufferRow(end.row))); - add_trailing_newline = true; - } + + let max_point = buffer.max_point(); + let mut is_first = true; + for selection in &selections { + let mut start = selection.start; + let mut end = selection.end; + let is_entire_line = selection.is_empty() || self.selections.line_mode(); + let mut add_trailing_newline = false; + if is_entire_line { + start = Point::new(start.row, 0); + let next_line_start = Point::new(end.row + 1, 0); + if next_line_start <= max_point { + end = next_line_start; + } else { + // We're on the last line without a trailing newline. + // Copy to the end of the line and add a newline afterwards. + end = Point::new(end.row, buffer.line_len(MultiBufferRow(end.row))); + add_trailing_newline = true; } + } - let mut trimmed_selections = Vec::new(); - if strip_leading_indents && end.row.saturating_sub(start.row) > 0 { - let row = MultiBufferRow(start.row); - let first_indent = buffer.indent_size_for_line(row); - if first_indent.len == 0 || start.column > first_indent.len { - trimmed_selections.push(start..end); - } else { - trimmed_selections.push( - Point::new(row.0, first_indent.len) - ..Point::new(row.0, buffer.line_len(row)), - ); - for row in start.row + 1..=end.row { - let mut line_len = buffer.line_len(MultiBufferRow(row)); - if row == end.row { - line_len = end.column; - } - if line_len == 0 { - trimmed_selections - .push(Point::new(row, 0)..Point::new(row, line_len)); - continue; - } - let row_indent_size = buffer.indent_size_for_line(MultiBufferRow(row)); - if row_indent_size.len >= first_indent.len { - trimmed_selections.push( - Point::new(row, first_indent.len)..Point::new(row, line_len), - ); - } else { - trimmed_selections.clear(); - trimmed_selections.push(start..end); - break; - } + let mut trimmed_selections = Vec::new(); + if strip_leading_indents && end.row.saturating_sub(start.row) > 0 { + let row = MultiBufferRow(start.row); + let first_indent = buffer.indent_size_for_line(row); + if first_indent.len == 0 || start.column > first_indent.len { + trimmed_selections.push(start..end); + } else { + trimmed_selections.push( + Point::new(row.0, first_indent.len) + ..Point::new(row.0, buffer.line_len(row)), + ); + for row in start.row + 1..=end.row { + let mut line_len = buffer.line_len(MultiBufferRow(row)); + if row == end.row { + line_len = end.column; + } + if line_len == 0 { + trimmed_selections.push(Point::new(row, 0)..Point::new(row, line_len)); + continue; + } + let row_indent_size = buffer.indent_size_for_line(MultiBufferRow(row)); + if row_indent_size.len >= first_indent.len { + trimmed_selections + .push(Point::new(row, first_indent.len)..Point::new(row, line_len)); + } else { + trimmed_selections.clear(); + trimmed_selections.push(start..end); + break; } } - } else { - trimmed_selections.push(start..end); } + } else { + trimmed_selections.push(start..end); + } - let is_multiline_trim = trimmed_selections.len() > 1; - for trimmed_range in trimmed_selections { - if is_first { - is_first = false; - } else if is_multiline_trim || !prev_selection_was_entire_line { - text += "\n"; - } - prev_selection_was_entire_line = is_entire_line && !is_multiline_trim; - let mut len = 0; - for chunk in buffer.text_for_range(trimmed_range.start..trimmed_range.end) { - text.push_str(chunk); - len += chunk.len(); - } - if add_trailing_newline { - text.push('\n'); - len += 1; + let is_multiline_trim = trimmed_selections.len() > 1; + let mut selection_len: usize = 0; + let prev_selection_was_entire_line = is_entire_line && !is_multiline_trim; + + for trimmed_range in trimmed_selections { + if is_first { + is_first = false; + } else if is_multiline_trim || !prev_selection_was_entire_line { + text.push('\n'); + if is_multiline_trim { + selection_len += 1; } - clipboard_selections.push(ClipboardSelection::for_buffer( - len, - is_entire_line, - trimmed_range, - &buffer, - self.project.as_ref(), - cx, - )); + } + for chunk in buffer.text_for_range(trimmed_range.start..trimmed_range.end) { + text.push_str(chunk); + selection_len += chunk.len(); + } + if add_trailing_newline { + text.push('\n'); + selection_len += 1; } } + + clipboard_selections.push(ClipboardSelection::for_buffer( + selection_len, + is_entire_line, + start..end, + &buffer, + self.project.as_ref(), + cx, + )); } cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata( @@ -15340,7 +15398,7 @@ impl Editor { pub fn select_all(&mut self, _: &SelectAll, window: &mut Window, cx: &mut Context) { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges(vec![Anchor::min()..Anchor::max()]); + s.select_ranges([Anchor::min()..Anchor::max()]); }); } @@ -15372,7 +15430,7 @@ impl Editor { .into_iter() .map(|selection| selection.start..selection.end) .collect::>(); - self.unfold_ranges(&selections, true, true, cx); + self.unfold_ranges(&selections, true, false, cx); let mut new_selection_ranges = Vec::new(); { @@ -15414,7 +15472,7 @@ impl Editor { } } } - self.change_selections(Default::default(), window, cx, |s| { + self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges(new_selection_ranges); }); } @@ -23932,7 +23990,7 @@ impl Editor { } pub fn refresh_inline_values(&mut self, cx: &mut Context) { - let Some(project) = self.project.clone() else { + let Some(semantics) = self.semantics_provider.clone() else { return; }; @@ -23967,7 +24025,7 @@ impl Editor { let range = buffer.read(cx).anchor_before(0)..current_execution_position.text_anchor; - project.inline_values(buffer, range, cx) + semantics.inline_values(buffer, range, cx) }) .ok() .flatten()? @@ -24097,7 +24155,7 @@ impl Editor { self.update_lsp_data(Some(buffer_id), window, cx); self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); self.colorize_brackets(false, cx); - self.refresh_selected_text_highlights(true, window, cx); + self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx); cx.emit(EditorEvent::ExcerptsAdded { buffer: buffer.clone(), predecessor: *predecessor, @@ -24122,6 +24180,11 @@ impl Editor { display_map.clear_lsp_folding_ranges(*buffer_id, cx); }); } + + self.display_map.update(cx, |display_map, cx| { + display_map.unfold_buffers(removed_buffer_ids.iter().copied(), cx); + }); + jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); cx.emit(EditorEvent::ExcerptsRemoved { ids: ids.clone(), @@ -24144,7 +24207,7 @@ impl Editor { self.refresh_document_highlights(cx); let snapshot = multibuffer.read(cx).snapshot(cx); for id in ids { - self.fetched_tree_sitter_chunks.remove(id); + self.bracket_fetched_tree_sitter_chunks.remove(id); if let Some(buffer) = snapshot.buffer_for_excerpt(*id) { self.semantic_token_state .invalidate_buffer(&buffer.remote_id()); @@ -24156,7 +24219,7 @@ impl Editor { } multi_buffer::Event::Reparsed(buffer_id) => { self.tasks_update_task = Some(self.refresh_runnables(window, cx)); - self.refresh_selected_text_highlights(true, window, cx); + self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx); self.colorize_brackets(true, cx); jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); @@ -26796,7 +26859,7 @@ pub trait SemanticsProvider { buffer: Entity, refresh: Option, cx: &mut App, - ) -> Shared>>>; + ) -> Option>>>>; fn supports_inlay_hints(&self, buffer: &Entity, cx: &mut App) -> bool; @@ -27278,14 +27341,15 @@ impl CompletionProvider for Entity { } } -impl SemanticsProvider for Entity { +impl SemanticsProvider for WeakEntity { fn hover( &self, buffer: &Entity, position: text::Anchor, cx: &mut App, ) -> Option>>> { - Some(self.update(cx, |project, cx| project.hover(buffer, position, cx))) + self.update(cx, |project, cx| project.hover(buffer, position, cx)) + .ok() } fn document_highlights( @@ -27294,9 +27358,10 @@ impl SemanticsProvider for Entity { position: text::Anchor, cx: &mut App, ) -> Option>>> { - Some(self.update(cx, |project, cx| { + self.update(cx, |project, cx| { project.document_highlights(buffer, position, cx) - })) + }) + .ok() } fn definitions( @@ -27306,12 +27371,13 @@ impl SemanticsProvider for Entity { kind: GotoDefinitionKind, cx: &mut App, ) -> Option>>>> { - Some(self.update(cx, |project, cx| match kind { + self.update(cx, |project, cx| match kind { GotoDefinitionKind::Symbol => project.definitions(buffer, position, cx), GotoDefinitionKind::Declaration => project.declarations(buffer, position, cx), GotoDefinitionKind::Type => project.type_definitions(buffer, position, cx), GotoDefinitionKind::Implementation => project.implementations(buffer, position, cx), - })) + }) + .ok() } fn supports_inlay_hints(&self, buffer: &Entity, cx: &mut App) -> bool { @@ -27327,6 +27393,7 @@ impl SemanticsProvider for Entity { project.any_language_server_supports_inlay_hints(buffer, cx) }) }) + .unwrap_or(false) } fn supports_semantic_tokens(&self, buffer: &Entity, cx: &mut App) -> bool { @@ -27335,6 +27402,7 @@ impl SemanticsProvider for Entity { project.any_language_server_supports_semantic_tokens(buffer, cx) }) }) + .unwrap_or(false) } fn inline_values( @@ -27348,6 +27416,8 @@ impl SemanticsProvider for Entity { Some(project.inline_values(session, active_stack_frame, buffer_handle, range, cx)) }) + .ok() + .flatten() } fn applicable_inlay_chunks( @@ -27356,15 +27426,21 @@ impl SemanticsProvider for Entity { ranges: &[Range], cx: &mut App, ) -> Vec> { - self.read(cx).lsp_store().update(cx, |lsp_store, cx| { - lsp_store.applicable_inlay_chunks(buffer, ranges, cx) + self.update(cx, |project, cx| { + project.lsp_store().update(cx, |lsp_store, cx| { + lsp_store.applicable_inlay_chunks(buffer, ranges, cx) + }) }) + .unwrap_or_default() } fn invalidate_inlay_hints(&self, for_buffers: &HashSet, cx: &mut App) { - self.read(cx).lsp_store().update(cx, |lsp_store, _| { - lsp_store.invalidate_inlay_hints(for_buffers) - }); + self.update(cx, |project, cx| { + project.lsp_store().update(cx, |lsp_store, _| { + lsp_store.invalidate_inlay_hints(for_buffers) + }) + }) + .ok(); } fn inlay_hints( @@ -27375,9 +27451,12 @@ impl SemanticsProvider for Entity { known_chunks: Option<(clock::Global, HashSet>)>, cx: &mut App, ) -> Option, Task>>> { - Some(self.read(cx).lsp_store().update(cx, |lsp_store, cx| { - lsp_store.inlay_hints(invalidate, buffer, ranges, known_chunks, cx) - })) + self.update(cx, |project, cx| { + project.lsp_store().update(cx, |lsp_store, cx| { + lsp_store.inlay_hints(invalidate, buffer, ranges, known_chunks, cx) + }) + }) + .ok() } fn semantic_tokens( @@ -27385,10 +27464,13 @@ impl SemanticsProvider for Entity { buffer: Entity, refresh: Option, cx: &mut App, - ) -> Shared>>> { - self.read(cx).lsp_store().update(cx, |lsp_store, cx| { - lsp_store.semantic_tokens(buffer, refresh, cx) + ) -> Option>>>> { + self.update(cx, |this, cx| { + this.lsp_store().update(cx, |lsp_store, cx| { + lsp_store.semantic_tokens(buffer, refresh, cx) + }) }) + .ok() } fn range_for_rename( @@ -27397,7 +27479,7 @@ impl SemanticsProvider for Entity { position: text::Anchor, cx: &mut App, ) -> Option>>>> { - Some(self.update(cx, |project, cx| { + self.update(cx, |project, cx| { let buffer = buffer.clone(); let task = project.prepare_rename(buffer.clone(), position, cx); cx.spawn(async move |_, cx| { @@ -27420,7 +27502,8 @@ impl SemanticsProvider for Entity { } }) }) - })) + }) + .ok() } fn perform_rename( @@ -27430,9 +27513,10 @@ impl SemanticsProvider for Entity { new_name: String, cx: &mut App, ) -> Option>> { - Some(self.update(cx, |project, cx| { + self.update(cx, |project, cx| { project.perform_rename(buffer.clone(), position, new_name, cx) - })) + }) + .ok() } } @@ -28532,7 +28616,7 @@ fn edit_prediction_edit_text( } fn edit_prediction_fallback_text(edits: &[(Range, Arc)], cx: &App) -> HighlightedText { - // Fallback for providers that don't provide edit_preview (like Copilot/Supermaven) + // Fallback for providers that don't provide edit_preview (like Copilot) // Just show the raw edit text with basic styling let mut text = String::new(); let mut highlights = Vec::new(); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 7f5a84ebd326603e1c239bbbb4062b115b17d095..e3d5e698153e39fd4de04893b50a804dc2105b99 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -7,7 +7,7 @@ use crate::{ linked_editing_ranges::LinkedEditingRanges, scroll::scroll_amount::ScrollAmount, test::{ - assert_text_with_selections, build_editor, + assert_text_with_selections, build_editor, editor_content_with_blocks, editor_lsp_test_context::{EditorLspTestContext, git_commit_lang}, editor_test_context::EditorTestContext, select_ranges, @@ -35,9 +35,7 @@ use language_settings::Formatter; use languages::markdown_lang; use languages::rust_lang; use lsp::{CompletionParams, DEFAULT_LSP_REQUEST_TIMEOUT}; -use multi_buffer::{ - ExcerptRange, IndentGuide, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16, PathKey, -}; +use multi_buffer::{IndentGuide, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16, PathKey}; use parking_lot::Mutex; use pretty_assertions::{assert_eq, assert_ne}; use project::{ @@ -64,7 +62,6 @@ use util::{ assert_set_eq, path, rel_path::rel_path, test::{TextRangeMarker, marked_text_ranges, marked_text_ranges_by, sample_text}, - uri, }; use workspace::{ CloseActiveItem, CloseAllItems, CloseOtherItems, MultiWorkspace, NavigationEntry, OpenOptions, @@ -3385,6 +3382,46 @@ async fn test_newline_below(cx: &mut TestAppContext) { "}); } +#[gpui::test] +fn test_newline_respects_read_only(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let editor = cx.add_window(|window, cx| { + let buffer = MultiBuffer::build_simple("aaaa\nbbbb\n", cx); + build_editor(buffer, window, cx) + }); + + _ = editor.update(cx, |editor, window, cx| { + editor.set_read_only(true); + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2) + ]) + }); + + editor.newline(&Newline, window, cx); + assert_eq!( + editor.text(cx), + "aaaa\nbbbb\n", + "newline should not modify a read-only editor" + ); + + editor.newline_above(&NewlineAbove, window, cx); + assert_eq!( + editor.text(cx), + "aaaa\nbbbb\n", + "newline_above should not modify a read-only editor" + ); + + editor.newline_below(&NewlineBelow, window, cx); + assert_eq!( + editor.text(cx), + "aaaa\nbbbb\n", + "newline_below should not modify a read-only editor" + ); + }); +} + #[gpui::test] fn test_newline_below_multibuffer(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -3393,14 +3430,18 @@ fn test_newline_below_multibuffer(cx: &mut TestAppContext) { let buffer_2 = cx.new(|cx| Buffer::local("ddd\neee\nfff", cx)); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))], + [Point::new(0, 0)..Point::new(2, 3)], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))], + [Point::new(0, 0)..Point::new(2, 3)], + 0, cx, ); multibuffer @@ -3467,14 +3508,18 @@ fn test_newline_below_multibuffer_middle_of_excerpt(cx: &mut TestAppContext) { let buffer_2 = cx.new(|cx| Buffer::local("ddd\neee\nfff", cx)); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))], + [Point::new(0, 0)..Point::new(2, 3)], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))], + [Point::new(0, 0)..Point::new(2, 3)], + 0, cx, ); multibuffer @@ -3528,14 +3573,18 @@ fn test_newline_below_multibuffer_last_line_of_last_excerpt(cx: &mut TestAppCont let buffer_2 = cx.new(|cx| Buffer::local("ddd\neee\nfff", cx)); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))], + [Point::new(0, 0)..Point::new(2, 3)], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))], + [Point::new(0, 0)..Point::new(2, 3)], + 0, cx, ); multibuffer @@ -3589,14 +3638,18 @@ fn test_newline_below_multibuffer_multiple_cursors(cx: &mut TestAppContext) { let buffer_2 = cx.new(|cx| Buffer::local("ddd\neee\nfff", cx)); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))], + [Point::new(0, 0)..Point::new(2, 3)], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 3))], + [Point::new(0, 0)..Point::new(2, 3)], + 0, cx, ); multibuffer @@ -4671,14 +4724,18 @@ fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) { cx.new(|cx| Buffer::local("const c: usize = 3;\n", cx).with_language(rust_language, cx)); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), toml_buffer.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))], + [Point::new(0, 0)..Point::new(2, 0)], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), rust_buffer.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))], + [Point::new(0, 0)..Point::new(1, 0)], + 0, cx, ); multibuffer @@ -4882,6 +4939,32 @@ fn test_join_lines_with_single_selection(cx: &mut TestAppContext) { &[Point::new(0, 3)..Point::new(0, 3)] ); + editor.undo(&Undo, window, cx); + assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc\nddd\n\n"); + + // Select a full line, i.e. start of the first line to the start of the second line + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([Point::new(0, 0)..Point::new(1, 0)]) + }); + editor.join_lines(&JoinLines, window, cx); + assert_eq!(buffer.read(cx).text(), "aaa bbb\nccc\nddd\n\n"); + + editor.undo(&Undo, window, cx); + assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc\nddd\n\n"); + + // Select two full lines + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([Point::new(0, 0)..Point::new(2, 0)]) + }); + editor.join_lines(&JoinLines, window, cx); + + // Only the selected lines should be joined, not the third. + assert_eq!( + buffer.read(cx).text(), + "aaa bbb\nccc\nddd\n\n", + "only the two selected lines (a and b) should be joined" + ); + // When multiple lines are selected, remove newlines that are spanned by the selection editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges([Point::new(0, 5)..Point::new(2, 2)]) @@ -8030,16 +8113,54 @@ async fn test_copy_trim_line_mode(cx: &mut TestAppContext) { let mut cx = EditorTestContext::new(cx).await; cx.set_state(indoc! {" - « a - bˇ» + « fn main() { + 1 + }ˇ» "}); cx.update_editor(|editor, _window, _cx| editor.selections.set_line_mode(true)); cx.update_editor(|editor, window, cx| editor.copy_and_trim(&CopyAndTrim, window, cx)); assert_eq!( cx.read_from_clipboard().and_then(|item| item.text()), - Some("a\nb\n".to_string()) + Some("fn main() {\n 1\n}\n".to_string()) ); + + let clipboard_selections: Vec = cx + .read_from_clipboard() + .and_then(|item| item.entries().first().cloned()) + .and_then(|entry| match entry { + gpui::ClipboardEntry::String(text) => text.metadata_json(), + _ => None, + }) + .expect("should have clipboard selections"); + + assert_eq!(clipboard_selections.len(), 1); + assert!(clipboard_selections[0].is_entire_line); + + cx.set_state(indoc! {" + «fn main() { + 1 + }ˇ» + "}); + cx.update_editor(|editor, _window, _cx| editor.selections.set_line_mode(true)); + cx.update_editor(|editor, window, cx| editor.copy_and_trim(&CopyAndTrim, window, cx)); + + assert_eq!( + cx.read_from_clipboard().and_then(|item| item.text()), + Some("fn main() {\n 1\n}\n".to_string()) + ); + + let clipboard_selections: Vec = cx + .read_from_clipboard() + .and_then(|item| item.entries().first().cloned()) + .and_then(|entry| match entry { + gpui::ClipboardEntry::String(text) => text.metadata_json(), + _ => None, + }) + .expect("should have clipboard selections"); + + assert_eq!(clipboard_selections.len(), 1); + assert!(clipboard_selections[0].is_entire_line); } #[gpui::test] @@ -8064,9 +8185,11 @@ async fn test_clipboard_line_numbers_from_multibuffer(cx: &mut TestAppContext) { let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer.clone(), - [ExcerptRange::new(Point::new(2, 0)..Point::new(5, 0))], + [Point::new(2, 0)..Point::new(5, 0)], + 0, cx, ); multibuffer @@ -8455,6 +8578,26 @@ async fn test_split_selection_into_lines(cx: &mut TestAppContext) { ); } +#[gpui::test] +async fn test_split_selection_into_lines_does_not_scroll(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + + let large_body = "\nline".repeat(300); + cx.set_state(&format!("«ˇstart{large_body}\nend»")); + let initial_scroll_position = cx.update_editor(|editor, _, cx| editor.scroll_position(cx)); + + cx.update_editor(|editor, window, cx| { + editor.split_selection_into_lines(&Default::default(), window, cx); + }); + + let scroll_position_after_split = cx.update_editor(|editor, _, cx| editor.scroll_position(cx)); + assert_eq!( + initial_scroll_position, scroll_position_after_split, + "Scroll position should not change after splitting selection into lines" + ); +} + #[gpui::test] async fn test_split_selection_into_lines_interacting_with_creases(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -9581,31 +9724,25 @@ async fn test_select_previous_multibuffer(cx: &mut TestAppContext) { init_test(cx, |_| {}); let mut cx = - EditorTestContext::new_multibuffer(cx, ["aaa\n«bbb\nccc\n»ddd", "aaa\n«bbb\nccc\n»ddd"]); + EditorTestContext::new_multibuffer(cx, ["aaa\n«bbb\nccc»\nddd", "aaa\n«bbb\nccc»\nddd"]); cx.assert_editor_state(indoc! {" ˇbbb ccc - bbb - ccc - "}); + ccc"}); cx.dispatch_action(SelectPrevious::default()); cx.assert_editor_state(indoc! {" «bbbˇ» ccc - bbb - ccc - "}); + ccc"}); cx.dispatch_action(SelectPrevious::default()); cx.assert_editor_state(indoc! {" «bbbˇ» ccc - «bbbˇ» - ccc - "}); + ccc"}); } #[gpui::test] @@ -10619,7 +10756,9 @@ async fn test_autoindent(cx: &mut TestAppContext) { #[gpui::test] async fn test_autoindent_disabled(cx: &mut TestAppContext) { - init_test(cx, |settings| settings.defaults.auto_indent = Some(false)); + init_test(cx, |settings| { + settings.defaults.auto_indent = Some(settings::AutoIndentMode::None) + }); let language = Arc::new( Language::new( @@ -10697,14 +10836,165 @@ async fn test_autoindent_disabled(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_autoindent_none_does_not_preserve_indentation_on_newline(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.defaults.auto_indent = Some(settings::AutoIndentMode::None) + }); + + let mut cx = EditorTestContext::new(cx).await; + + cx.set_state(indoc! {" + hello + indented lineˇ + world + "}); + + cx.update_editor(|editor, window, cx| { + editor.newline(&Newline, window, cx); + }); + + cx.assert_editor_state(indoc! {" + hello + indented line + ˇ + world + "}); +} + +#[gpui::test] +async fn test_autoindent_preserve_indent_maintains_indentation_on_newline(cx: &mut TestAppContext) { + // When auto_indent is "preserve_indent", pressing Enter on an indented line + // should preserve the indentation but not adjust based on syntax. + init_test(cx, |settings| { + settings.defaults.auto_indent = Some(settings::AutoIndentMode::PreserveIndent) + }); + + let mut cx = EditorTestContext::new(cx).await; + + cx.set_state(indoc! {" + hello + indented lineˇ + world + "}); + + cx.update_editor(|editor, window, cx| { + editor.newline(&Newline, window, cx); + }); + + // The new line SHOULD have the same indentation as the previous line + cx.assert_editor_state(indoc! {" + hello + indented line + ˇ + world + "}); +} + +#[gpui::test] +async fn test_autoindent_preserve_indent_does_not_apply_syntax_indent(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.defaults.auto_indent = Some(settings::AutoIndentMode::PreserveIndent) + }); + + let language = Arc::new( + Language::new( + LanguageConfig { + brackets: BracketPairConfig { + pairs: vec![BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: false, + surround: false, + newline: false, // Disable extra newline behavior to isolate syntax indent test + }], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + ) + .with_indents_query(r#"(_ "{" "}" @end) @indent"#) + .unwrap(), + ); + + let buffer = + cx.new(|cx| Buffer::local("fn foo() {\n}", cx).with_language(language.clone(), cx)); + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + let (editor, cx) = cx.add_window_view(|window, cx| build_editor(buffer, window, cx)); + editor + .condition::(cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx)) + .await; + + // Position cursor at end of line containing `{` + editor.update_in(cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(10)..MultiBufferOffset(10)]) // After "fn foo() {" + }); + editor.newline(&Newline, window, cx); + + // With PreserveIndent, the new line should have 0 indentation (same as the fn line) + // NOT 4 spaces (which tree-sitter would add for being inside `{}`) + assert_eq!(editor.text(cx), "fn foo() {\n\n}"); + }); +} + +#[gpui::test] +async fn test_autoindent_syntax_aware_applies_syntax_indent(cx: &mut TestAppContext) { + // Companion test to show that SyntaxAware DOES apply tree-sitter indentation + init_test(cx, |settings| { + settings.defaults.auto_indent = Some(settings::AutoIndentMode::SyntaxAware) + }); + + let language = Arc::new( + Language::new( + LanguageConfig { + brackets: BracketPairConfig { + pairs: vec![BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: false, + surround: false, + newline: false, // Disable extra newline behavior to isolate syntax indent test + }], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + ) + .with_indents_query(r#"(_ "{" "}" @end) @indent"#) + .unwrap(), + ); + + let buffer = + cx.new(|cx| Buffer::local("fn foo() {\n}", cx).with_language(language.clone(), cx)); + let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); + let (editor, cx) = cx.add_window_view(|window, cx| build_editor(buffer, window, cx)); + editor + .condition::(cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx)) + .await; + + // Position cursor at end of line containing `{` + editor.update_in(cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(10)..MultiBufferOffset(10)]) // After "fn foo() {" + }); + editor.newline(&Newline, window, cx); + + // With SyntaxAware, tree-sitter adds indentation for being inside `{}` + assert_eq!(editor.text(cx), "fn foo() {\n \n}"); + }); +} + #[gpui::test] async fn test_autoindent_disabled_with_nested_language(cx: &mut TestAppContext) { init_test(cx, |settings| { - settings.defaults.auto_indent = Some(true); + settings.defaults.auto_indent = Some(settings::AutoIndentMode::SyntaxAware); settings.languages.0.insert( "python".into(), LanguageSettingsContent { - auto_indent: Some(false), + auto_indent: Some(settings::AutoIndentMode::None), ..Default::default() }, ); @@ -12708,10 +12998,10 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) { sample_text_2, "llll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu" ); - let sample_text_3 = sample_text(rows, cols, 'v'); + let sample_text_3 = sample_text(rows, cols, 'v').replace('\u{7f}', "."); assert_eq!( sample_text_3, - "vvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}" + "vvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n...." ); let fs = FakeFs::new(cx.executor()); @@ -12770,33 +13060,40 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) { let multi_buffer = cx.new(|cx| { let mut multi_buffer = MultiBuffer::new(ReadWrite); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)), + Point::new(0, 0)..Point::new(2, 4), + Point::new(5, 0)..Point::new(6, 4), + Point::new(9, 0)..Point::new(9, 4), ], + 0, cx, ); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)), + Point::new(0, 0)..Point::new(2, 4), + Point::new(5, 0)..Point::new(6, 4), + Point::new(9, 0)..Point::new(9, 4), ], + 0, cx, ); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(2), buffer_3.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)), + Point::new(0, 0)..Point::new(2, 4), + Point::new(5, 0)..Point::new(6, 4), + Point::new(9, 0)..Point::new(9, 4), ], + 0, cx, ); + assert_eq!(multi_buffer.excerpt_ids().len(), 9); multi_buffer }); let multi_buffer_editor = cx.new_window_entity(|window, cx| { @@ -12810,30 +13107,61 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) { }); multi_buffer_editor.update_in(cx, |editor, window, cx| { + let a = editor.text(cx).find("aaaa").unwrap(); editor.change_selections( SelectionEffects::scroll(Autoscroll::Next), window, cx, - |s| s.select_ranges(Some(MultiBufferOffset(1)..MultiBufferOffset(2))), + |s| s.select_ranges(Some(MultiBufferOffset(a + 1)..MultiBufferOffset(a + 2))), ); editor.insert("|one|two|three|", window, cx); }); assert!(cx.read(|cx| multi_buffer_editor.is_dirty(cx))); multi_buffer_editor.update_in(cx, |editor, window, cx| { + let n = editor.text(cx).find("nnnn").unwrap(); editor.change_selections( SelectionEffects::scroll(Autoscroll::Next), window, cx, - |s| s.select_ranges(Some(MultiBufferOffset(60)..MultiBufferOffset(70))), + |s| s.select_ranges(Some(MultiBufferOffset(n + 4)..MultiBufferOffset(n + 14))), ); editor.insert("|four|five|six|", window, cx); }); assert!(cx.read(|cx| multi_buffer_editor.is_dirty(cx))); // First two buffers should be edited, but not the third one. - assert_eq!( - multi_buffer_editor.update(cx, |editor, cx| editor.text(cx)), - "a|one|two|three|aa\nbbbb\ncccc\n\nffff\ngggg\n\njjjj\nllll\nmmmm\nnnnn|four|five|six|\nr\n\nuuuu\nvvvv\nwwww\nxxxx\n\n{{{{\n||||\n\n\u{7f}\u{7f}\u{7f}\u{7f}", + pretty_assertions::assert_eq!( + editor_content_with_blocks(&multi_buffer_editor, cx), + indoc! {" + § main.rs + § ----- + a|one|two|three|aa + bbbb + cccc + § ----- + ffff + gggg + § ----- + jjjj + § other.rs + § ----- + llll + mmmm + nnnn|four|five|six| + § ----- + + § ----- + uuuu + § lib.rs + § ----- + vvvv + wwww + xxxx + § ----- + {{{{ + |||| + § ----- + ...."} ); buffer_1.update(cx, |buffer, _| { assert!(buffer.is_dirty()); @@ -12846,7 +13174,7 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) { assert!(buffer.is_dirty()); assert_eq!( buffer.text(), - "llll\nmmmm\nnnnn|four|five|six|oooo\npppp\nr\nssss\ntttt\nuuuu", + "llll\nmmmm\nnnnn|four|five|six|\noooo\npppp\n\nssss\ntttt\nuuuu", ) }); buffer_3.update(cx, |buffer, _| { @@ -12872,10 +13200,10 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) { let fake_server = fake_servers.next().await.unwrap(); fake_server .server - .on_request::(move |params, _| async move { + .on_request::(move |_params, _| async move { Ok(Some(vec![lsp::TextEdit::new( lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)), - format!("[{} formatted]", params.text_document.uri), + "[formatted]".to_string(), )])) }) .detach(); @@ -12884,23 +13212,61 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) { // After multibuffer saving, only first two buffers should be reformatted, but not the third one (as it was not dirty). assert!(cx.read(|cx| !multi_buffer_editor.is_dirty(cx))); assert_eq!( - multi_buffer_editor.update(cx, |editor, cx| editor.text(cx)), - uri!( - "a|o[file:///a/main.rs formatted]bbbb\ncccc\n\nffff\ngggg\n\njjjj\n\nlll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|\nr\n\nuuuu\n\nvvvv\nwwww\nxxxx\n\n{{{{\n||||\n\n\u{7f}\u{7f}\u{7f}\u{7f}" - ), + editor_content_with_blocks(&multi_buffer_editor, cx), + indoc! {" + § main.rs + § ----- + a|o[formatted]bbbb + cccc + § ----- + ffff + gggg + § ----- + jjjj + + § other.rs + § ----- + lll[formatted]mmmm + nnnn|four|five|six| + § ----- + + § ----- + uuuu + + § lib.rs + § ----- + vvvv + wwww + xxxx + § ----- + {{{{ + |||| + § ----- + ...."} ); buffer_1.update(cx, |buffer, _| { assert!(!buffer.is_dirty()); assert_eq!( buffer.text(), - uri!("a|o[file:///a/main.rs formatted]bbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n"), + "a|o[formatted]bbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n", ) }); + // Diff < left / right > : + // lll[formatted]mmmm + // nnnn|four|five|six|oooo + // pppp + // < + // ssss + // tttt + // uuuu + buffer_2.update(cx, |buffer, _| { assert!(!buffer.is_dirty()); assert_eq!( buffer.text(), - uri!("lll[file:///a/other.rs formatted]mmmm\nnnnn|four|five|six|oooo\npppp\nr\nssss\ntttt\nuuuu\n"), + "lll[formatted]mmmm\nnnnn|four|five|six|\noooo\npppp\n\nssss\ntttt\nuuuu\n", ) }); buffer_3.update(cx, |buffer, _| { @@ -12957,19 +13323,25 @@ async fn test_autosave_with_dirty_buffers(cx: &mut TestAppContext) { // Create a multi-buffer with all three buffers let multi_buffer = cx.new(|cx| { let mut multi_buffer = MultiBuffer::new(ReadWrite); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))], + [Point::new(0, 0)..Point::new(1, 0)], + 0, cx, ); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))], + [Point::new(0, 0)..Point::new(1, 0)], + 0, cx, ); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(2), buffer_3.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))], + [Point::new(0, 0)..Point::new(1, 0)], + 0, cx, ); multi_buffer @@ -15456,7 +15828,9 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte 10.satu; // - // separate cursors so they open in different excerpts (manually reproducible) + // separate1 + // separate2 + // separate3 // 10.satu20; @@ -15468,8 +15842,6 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte // - // - 10.satuˇ20; } "}; @@ -15479,15 +15851,10 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte // - // - 10.saturating_sub()ˇ; } "}; - let first_excerpt_end = buffer_text.find("//").unwrap() + 3; - let second_excerpt_end = buffer_text.rfind("//").unwrap() - 4; - let fs = FakeFs::new(cx.executor()); fs.insert_tree( path!("/a"), @@ -15527,14 +15894,14 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte let multi_buffer = cx.new(|cx| { let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite); - multi_buffer.push_excerpts( - buffer.clone(), - [ExcerptRange::new(0..first_excerpt_end)], - cx, - ); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(0), buffer.clone(), - [ExcerptRange::new(second_excerpt_end..buffer_text.len())], + [ + Point::zero()..Point::new(2, 0), + Point::new(7, 0)..buffer.read(cx).max_point(), + ], + 0, cx, ); multi_buffer @@ -15568,7 +15935,7 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges([ Point::new(1, 11)..Point::new(1, 11), - Point::new(7, 11)..Point::new(7, 11), + Point::new(5, 11)..Point::new(5, 11), ]) }); @@ -15587,12 +15954,12 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte lsp::InsertReplaceEdit { new_text: "saturating_sub()".to_owned(), insert: lsp::Range::new( - lsp::Position::new(7, 7), - lsp::Position::new(7, 11), + lsp::Position::new(9, 7), + lsp::Position::new(9, 11), ), replace: lsp::Range::new( - lsp::Position::new(7, 7), - lsp::Position::new(7, 13), + lsp::Position::new(9, 7), + lsp::Position::new(9, 13), ), }, )), @@ -17130,6 +17497,7 @@ async fn test_no_duplicated_completion_requests(cx: &mut TestAppContext) { } }); + cx.executor().run_until_parked(); cx.condition(|editor, _| editor.context_menu_visible()) .await; cx.assert_editor_state("fn main() { let a = 2.ˇ; }"); @@ -17674,24 +18042,26 @@ async fn test_toggle_block_comment(cx: &mut TestAppContext) { fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { init_test(cx, |_| {}); - let buffer = cx.new(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); + let buffer = cx.new(|cx| Buffer::local(sample_text(6, 4, 'a'), cx)); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(0, 4)), - ExcerptRange::new(Point::new(1, 0)..Point::new(1, 4)), + Point::new(0, 0)..Point::new(0, 4), + Point::new(5, 0)..Point::new(5, 4), ], + 0, cx, ); - assert_eq!(multibuffer.read(cx).text(), "aaaa\nbbbb"); + assert_eq!(multibuffer.read(cx).text(), "aaaa\nffff"); multibuffer }); let (editor, cx) = cx.add_window_view(|window, cx| build_editor(multibuffer, window, cx)); editor.update_in(cx, |editor, window, cx| { - assert_eq!(editor.text(cx), "aaaa\nbbbb"); + assert_eq!(editor.text(cx), "aaaa\nffff"); editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges([ Point::new(0, 0)..Point::new(0, 0), @@ -17700,7 +18070,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { }); editor.handle_input("X", window, cx); - assert_eq!(editor.text(cx), "Xaaaa\nXbbbb"); + assert_eq!(editor.text(cx), "Xaaaa\nXffff"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), [ @@ -17714,7 +18084,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { s.select_ranges([Point::new(0, 2)..Point::new(1, 2)]) }); editor.backspace(&Default::default(), window, cx); - assert_eq!(editor.text(cx), "Xa\nbbb"); + assert_eq!(editor.text(cx), "Xa\nfff"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), [Point::new(1, 0)..Point::new(1, 0)] @@ -17724,7 +18094,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { s.select_ranges([Point::new(1, 1)..Point::new(0, 1)]) }); editor.backspace(&Default::default(), window, cx); - assert_eq!(editor.text(cx), "X\nbb"); + assert_eq!(editor.text(cx), "X\nff"); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), [Point::new(0, 1)..Point::new(0, 1)] @@ -17732,115 +18102,23 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { }); } -#[gpui::test] -fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) { - init_test(cx, |_| {}); - - let markers = vec![('[', ']').into(), ('(', ')').into()]; - let (initial_text, mut excerpt_ranges) = marked_text_ranges_by( - indoc! {" - [aaaa - (bbbb] - cccc)", - }, - markers.clone(), - ); - let excerpt_ranges = markers.into_iter().map(|marker| { - let context = excerpt_ranges.remove(&marker).unwrap()[0].clone(); - ExcerptRange::new(context) - }); - let buffer = cx.new(|cx| Buffer::local(initial_text, cx)); - let multibuffer = cx.new(|cx| { - let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts(buffer, excerpt_ranges, cx); - multibuffer - }); - - let (editor, cx) = cx.add_window_view(|window, cx| build_editor(multibuffer, window, cx)); - editor.update_in(cx, |editor, window, cx| { - let (expected_text, selection_ranges) = marked_text_ranges( - indoc! {" - aaaa - bˇbbb - bˇbbˇb - cccc" - }, - true, - ); - assert_eq!(editor.text(cx), expected_text); - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges( - selection_ranges - .iter() - .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)), - ) - }); - - editor.handle_input("X", window, cx); - - let (expected_text, expected_selections) = marked_text_ranges( - indoc! {" - aaaa - bXˇbbXb - bXˇbbXˇb - cccc" - }, - false, - ); - assert_eq!(editor.text(cx), expected_text); - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - expected_selections - .iter() - .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)) - .collect::>() - ); - - editor.newline(&Newline, window, cx); - let (expected_text, expected_selections) = marked_text_ranges( - indoc! {" - aaaa - bX - ˇbbX - b - bX - ˇbbX - ˇb - cccc" - }, - false, - ); - assert_eq!(editor.text(cx), expected_text); - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - expected_selections - .iter() - .map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end)) - .collect::>() - ); - }); -} - #[gpui::test] fn test_refresh_selections(cx: &mut TestAppContext) { init_test(cx, |_| {}); - let buffer = cx.new(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); - let mut excerpt1_id = None; + let buffer = cx.new(|cx| Buffer::local(sample_text(5, 4, 'a'), cx)); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - excerpt1_id = multibuffer - .push_excerpts( - buffer.clone(), - [ - ExcerptRange::new(Point::new(0, 0)..Point::new(1, 4)), - ExcerptRange::new(Point::new(1, 0)..Point::new(2, 4)), - ], - cx, - ) - .into_iter() - .next(); - assert_eq!(multibuffer.read(cx).text(), "aaaa\nbbbb\nbbbb\ncccc"); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer.clone(), + [ + Point::new(0, 0)..Point::new(1, 4), + Point::new(3, 0)..Point::new(4, 4), + ], + 0, + cx, + ); multibuffer }); @@ -17880,7 +18158,13 @@ fn test_refresh_selections(cx: &mut TestAppContext) { }); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts([excerpt1_id.unwrap()], cx); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer.clone(), + [Point::new(3, 0)..Point::new(4, 4)], + 0, + cx, + ); }); _ = editor.update(cx, |editor, window, cx| { // Removing an excerpt causes the first selection to become degenerate. @@ -17898,8 +18182,8 @@ fn test_refresh_selections(cx: &mut TestAppContext) { assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), [ + Point::new(0, 0)..Point::new(0, 0), Point::new(0, 1)..Point::new(0, 1), - Point::new(0, 3)..Point::new(0, 3) ] ); assert!(editor.selections.pending_anchor().is_some()); @@ -17910,22 +18194,20 @@ fn test_refresh_selections(cx: &mut TestAppContext) { fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) { init_test(cx, |_| {}); - let buffer = cx.new(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); - let mut excerpt1_id = None; + let buffer = cx.new(|cx| Buffer::local(sample_text(5, 4, 'a'), cx)); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - excerpt1_id = multibuffer - .push_excerpts( - buffer.clone(), - [ - ExcerptRange::new(Point::new(0, 0)..Point::new(1, 4)), - ExcerptRange::new(Point::new(1, 0)..Point::new(2, 4)), - ], - cx, - ) - .into_iter() - .next(); - assert_eq!(multibuffer.read(cx).text(), "aaaa\nbbbb\nbbbb\ncccc"); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer.clone(), + [ + Point::new(0, 0)..Point::new(1, 4), + Point::new(3, 0)..Point::new(4, 4), + ], + 0, + cx, + ); + assert_eq!(multibuffer.read(cx).text(), "aaaa\nbbbb\ndddd\neeee"); multibuffer }); @@ -17947,7 +18229,13 @@ fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) { }); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts([excerpt1_id.unwrap()], cx); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer.clone(), + [Point::new(3, 0)..Point::new(4, 4)], + 0, + cx, + ); }); _ = editor.update(cx, |editor, window, cx| { assert_eq!( @@ -17959,7 +18247,7 @@ fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh()); assert_eq!( editor.selections.ranges(&editor.display_snapshot(cx)), - [Point::new(0, 3)..Point::new(0, 3)] + [Point::new(0, 0)..Point::new(0, 0)] ); assert!(editor.selections.pending_anchor().is_some()); }); @@ -18476,9 +18764,10 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) { // Remove some excerpts. leader.update(cx, |leader, cx| { leader.buffer.update(cx, |multibuffer, cx| { - let excerpt_ids = multibuffer.excerpt_ids(); - multibuffer.remove_excerpts([excerpt_ids[1], excerpt_ids[2]], cx); - multibuffer.remove_excerpts([excerpt_ids[0]], cx); + multibuffer.remove_excerpts_for_path( + PathKey::with_sort_prefix(1, rel_path("b.txt").into_arc()), + cx, + ); }); }); @@ -20709,31 +20998,37 @@ async fn test_multibuffer_reverts(cx: &mut TestAppContext) { let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)), + Point::new(0, 0)..Point::new(2, 0), + Point::new(5, 0)..Point::new(6, 0), + Point::new(9, 0)..Point::new(9, 4), ], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)), + Point::new(0, 0)..Point::new(2, 0), + Point::new(5, 0)..Point::new(6, 0), + Point::new(9, 0)..Point::new(9, 4), ], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(2), buffer_3.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)), + Point::new(0, 0)..Point::new(2, 0), + Point::new(5, 0)..Point::new(6, 0), + Point::new(9, 0)..Point::new(9, 4), ], + 0, cx, ); multibuffer @@ -20760,7 +21055,7 @@ async fn test_multibuffer_reverts(cx: &mut TestAppContext) { cx.executor().run_until_parked(); editor.update_in(cx, |editor, window, cx| { - assert_eq!(editor.text(cx), "Xaaa\nXbbb\nXccc\n\nXfff\nXggg\n\nXjjj\nXlll\nXmmm\nXnnn\n\nXqqq\nXrrr\n\nXuuu\nXvvv\nXwww\nXxxx\n\nX{{{\nX|||\n\nX\u{7f}\u{7f}\u{7f}"); + assert_eq!(editor.display_text(cx), "\n\nXaaa\nXbbb\nXccc\n\nXfff\nXggg\n\nXjjj\n\n\nXlll\nXmmm\nXnnn\n\nXqqq\nXrrr\n\nXuuu\n\n\nXvvv\nXwww\nXxxx\n\nX{{{\nX|||\n\nX\u{7f}\u{7f}\u{7f}"); editor.select_all(&SelectAll, window, cx); editor.git_restore(&Default::default(), window, cx); }); @@ -20768,7 +21063,7 @@ async fn test_multibuffer_reverts(cx: &mut TestAppContext) { // When all ranges are selected, all buffer hunks are reverted. editor.update(cx, |editor, cx| { - assert_eq!(editor.text(cx), "aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n\n\nllll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu\n\n\nvvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}\n\n"); + assert_eq!(editor.display_text(cx), "\n\naaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n\n\n\n\n\n\nllll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu\n\n\n\n\n\n\nvvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}\n\n\n\n"); }); buffer_1.update(cx, |buffer, _| { assert_eq!(buffer.text(), base_text_1); @@ -20786,7 +21081,7 @@ async fn test_multibuffer_reverts(cx: &mut TestAppContext) { editor.update_in(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges(Some(Point::new(0, 0)..Point::new(6, 0))); + s.select_ranges(Some(Point::new(0, 0)..Point::new(5, 0))); }); editor.git_restore(&Default::default(), window, cx); }); @@ -20795,8 +21090,8 @@ async fn test_multibuffer_reverts(cx: &mut TestAppContext) { // but not affect buffer_2 and its related excerpts. editor.update(cx, |editor, cx| { assert_eq!( - editor.text(cx), - "aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n\n\nXlll\nXmmm\nXnnn\n\nXqqq\nXrrr\n\nXuuu\nXvvv\nXwww\nXxxx\n\nX{{{\nX|||\n\nX\u{7f}\u{7f}\u{7f}" + editor.display_text(cx), + "\n\naaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n\n\n\n\n\n\nXlll\nXmmm\nXnnn\n\nXqqq\nXrrr\n\nXuuu\n\n\nXvvv\nXwww\nXxxx\n\nX{{{\nX|||\n\nX\u{7f}\u{7f}\u{7f}" ); }); buffer_1.update(cx, |buffer, _| { @@ -20851,31 +21146,37 @@ async fn test_multibuffer_in_navigation_history(cx: &mut TestAppContext) { let multi_buffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)), + Point::new(0, 0)..Point::new(2, 0), + Point::new(5, 0)..Point::new(6, 0), + Point::new(9, 0)..Point::new(9, 4), ], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)), + Point::new(0, 0)..Point::new(2, 0), + Point::new(5, 0)..Point::new(6, 0), + Point::new(9, 0)..Point::new(9, 4), ], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(2), buffer_3.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)), + Point::new(0, 0)..Point::new(2, 0), + Point::new(5, 0)..Point::new(6, 0), + Point::new(9, 0)..Point::new(9, 4), ], + 0, cx, ); multibuffer @@ -21330,33 +21631,40 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut TestAppContext) { let multi_buffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 3)), + Point::new(0, 0)..Point::new(2, 3), + Point::new(5, 0)..Point::new(6, 3), + Point::new(9, 0)..Point::new(10, 3), ], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 3)), + Point::new(0, 0)..Point::new(2, 3), + Point::new(5, 0)..Point::new(6, 3), + Point::new(9, 0)..Point::new(10, 3), ], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(2), buffer_3.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 3)), + Point::new(0, 0)..Point::new(2, 3), + Point::new(5, 0)..Point::new(6, 3), + Point::new(9, 0)..Point::new(10, 3), ], + 0, cx, ); + assert_eq!(multibuffer.excerpt_ids().len(), 9); multibuffer }); @@ -21387,26 +21695,20 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut TestAppContext) { ˇaaa ccc ddd - ggg hhh - lll mmm NNN - qqq rrr - uuu 111 222 333 - 666 777 - 000 !!!" .unindent(), @@ -21424,27 +21726,21 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut TestAppContext) { - bbb ccc ddd - ggg hhh - lll mmm - nnn + NNN - qqq rrr - uuu 111 222 333 - + 666 777 - 000 !!!ˇ»" .unindent(), @@ -21461,15 +21757,18 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut TestAppContext) { let buffer = cx.new(|cx| Buffer::local(text.to_string(), cx)); let multi_buffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0)), - ExcerptRange::new(Point::new(4, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 0)), + Point::new(0, 0)..Point::new(1, 3), + Point::new(4, 0)..Point::new(6, 3), + Point::new(9, 0)..Point::new(9, 3), ], + 0, cx, ); + assert_eq!(multibuffer.excerpt_ids().len(), 3); multibuffer }); @@ -21502,15 +21801,12 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut TestAppContext) { ˇaaa - bbb + BBB - - ddd - eee + DDD + EEE fff - - iii - " + iii" .unindent(), ); } @@ -24100,31 +24396,37 @@ async fn test_folding_buffers(cx: &mut TestAppContext) { let multi_buffer = cx.new(|cx| { let mut multi_buffer = MultiBuffer::new(ReadWrite); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)), + Point::new(0, 0)..Point::new(2, 0), + Point::new(5, 0)..Point::new(6, 0), + Point::new(9, 0)..Point::new(10, 4), ], + 0, cx, ); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)), + Point::new(0, 0)..Point::new(2, 0), + Point::new(5, 0)..Point::new(6, 0), + Point::new(9, 0)..Point::new(10, 4), ], + 0, cx, ); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(2), buffer_3.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0)), - ExcerptRange::new(Point::new(5, 0)..Point::new(7, 0)), - ExcerptRange::new(Point::new(9, 0)..Point::new(10, 4)), + Point::new(0, 0)..Point::new(2, 0), + Point::new(5, 0)..Point::new(6, 0), + Point::new(9, 0)..Point::new(10, 4), ], + 0, cx, ); multi_buffer @@ -24141,7 +24443,7 @@ async fn test_folding_buffers(cx: &mut TestAppContext) { assert_eq!( multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)), - "\n\naaaa\nbbbb\ncccc\n\n\nffff\ngggg\n\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n\n1111\n2222\n\n\n5555", + "\n\naaaa\nbbbb\ncccc\n\nffff\ngggg\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\nqqqq\nrrrr\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n1111\n2222\n\n5555", ); multi_buffer_editor.update(cx, |editor, cx| { @@ -24149,7 +24451,7 @@ async fn test_folding_buffers(cx: &mut TestAppContext) { }); assert_eq!( multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)), - "\n\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n\n1111\n2222\n\n\n5555", + "\n\n\n\nllll\nmmmm\nnnnn\n\nqqqq\nrrrr\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n1111\n2222\n\n5555", "After folding the first buffer, its text should not be displayed" ); @@ -24158,7 +24460,7 @@ async fn test_folding_buffers(cx: &mut TestAppContext) { }); assert_eq!( multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)), - "\n\n\n\n\n\nvvvv\nwwww\nxxxx\n\n\n1111\n2222\n\n\n5555", + "\n\n\n\n\n\nvvvv\nwwww\nxxxx\n\n1111\n2222\n\n5555", "After folding the second buffer, its text should not be displayed" ); @@ -24183,7 +24485,7 @@ async fn test_folding_buffers(cx: &mut TestAppContext) { }); assert_eq!( multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)), - "\n\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n", + "\n\n\n\nllll\nmmmm\nnnnn\n\nqqqq\nrrrr\n\nuuuu\n\n", "After unfolding the second buffer, its text should be displayed" ); @@ -24205,7 +24507,7 @@ async fn test_folding_buffers(cx: &mut TestAppContext) { assert_eq!( multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)), - "\n\naaaa\nBbbbb\ncccc\n\n\nffff\ngggg\n\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n", + "\n\naaaa\nBbbbb\ncccc\n\nffff\ngggg\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\nqqqq\nrrrr\n\nuuuu\n\n", "After unfolding the first buffer, its and 2nd buffer's text should be displayed" ); @@ -24214,11 +24516,82 @@ async fn test_folding_buffers(cx: &mut TestAppContext) { }); assert_eq!( multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)), - "\n\naaaa\nBbbbb\ncccc\n\n\nffff\ngggg\n\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n\n1111\n2222\n\n\n5555", + "\n\naaaa\nBbbbb\ncccc\n\nffff\ngggg\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\nqqqq\nrrrr\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n1111\n2222\n\n5555", "After unfolding the all buffers, all original text should be displayed" ); } +#[gpui::test] +async fn test_folded_buffers_cleared_on_excerpts_removed(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + json!({ + "file_a.txt": "File A\nFile A\nFile A", + "file_b.txt": "File B\nFile B\nFile B", + }), + ) + .await; + + let project = Project::test(fs, [path!("/root").as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let cx = &mut VisualTestContext::from_window(*window, cx); + let worktree = project.update(cx, |project, cx| { + let mut worktrees = project.worktrees(cx).collect::>(); + assert_eq!(worktrees.len(), 1); + worktrees.pop().unwrap() + }); + let worktree_id = worktree.update(cx, |worktree, _| worktree.id()); + + let buffer_a = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, rel_path("file_a.txt")), cx) + }) + .await + .unwrap(); + let buffer_b = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, rel_path("file_b.txt")), cx) + }) + .await + .unwrap(); + + let multi_buffer = cx.new(|cx| { + let mut multi_buffer = MultiBuffer::new(ReadWrite); + let range_a = Point::new(0, 0)..Point::new(2, 4); + let range_b = Point::new(0, 0)..Point::new(2, 4); + + multi_buffer.set_excerpts_for_path(PathKey::sorted(0), buffer_a.clone(), [range_a], 0, cx); + multi_buffer.set_excerpts_for_path(PathKey::sorted(1), buffer_b.clone(), [range_b], 0, cx); + multi_buffer + }); + + let editor = cx.new_window_entity(|window, cx| { + Editor::new( + EditorMode::full(), + multi_buffer.clone(), + Some(project.clone()), + window, + cx, + ) + }); + + editor.update(cx, |editor, cx| { + editor.fold_buffer(buffer_a.read(cx).remote_id(), cx); + }); + assert!(editor.update(cx, |editor, cx| editor.has_any_buffer_folded(cx))); + + // When the excerpts for `buffer_a` are removed, a + // `multi_buffer::Event::ExcerptsRemoved` event is emitted, which should be + // picked up by the editor and update the display map accordingly. + multi_buffer.update(cx, |multi_buffer, cx| { + multi_buffer.remove_excerpts_for_path(PathKey::sorted(0), cx) + }); + assert!(!editor.update(cx, |editor, cx| editor.has_any_buffer_folded(cx))); +} + #[gpui::test] async fn test_folding_buffers_with_one_excerpt(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -24268,19 +24641,25 @@ async fn test_folding_buffers_with_one_excerpt(cx: &mut TestAppContext) { let multi_buffer = cx.new(|cx| { let mut multi_buffer = MultiBuffer::new(ReadWrite); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0))], + [Point::new(0, 0)..Point::new(3, 0)], + 0, cx, ); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0))], + [Point::new(0, 0)..Point::new(3, 0)], + 0, cx, ); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(2), buffer_3.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0))], + [Point::new(0, 0)..Point::new(3, 0)], + 0, cx, ); multi_buffer @@ -24391,15 +24770,15 @@ async fn test_folding_buffer_when_multibuffer_has_only_one_excerpt(cx: &mut Test let multi_buffer = cx.new(|cx| { let mut multi_buffer = MultiBuffer::new(ReadWrite); - multi_buffer.push_excerpts( + multi_buffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new( - Point::new(0, 0) - ..Point::new( - sample_text.chars().filter(|&c| c == '\n').count() as u32 + 1, - 0, - ), - )], + [Point::new(0, 0) + ..Point::new( + sample_text.chars().filter(|&c| c == '\n').count() as u32 + 1, + 0, + )], + 0, cx, ); multi_buffer @@ -26528,6 +26907,48 @@ async fn test_linked_edits_on_typing_punctuation(cx: &mut TestAppContext) { cx.assert_editor_state(""); } +#[gpui::test] +async fn test_linked_edits_on_typing_dot_without_language_override(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + let language = Arc::new(Language::new( + LanguageConfig { + name: "HTML".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["html".to_string()], + ..LanguageMatcher::default() + }, + brackets: BracketPairConfig { + pairs: vec![BracketPair { + start: "<".into(), + end: ">".into(), + close: true, + ..Default::default() + }], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_html::LANGUAGE.into()), + )); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + cx.set_state(""); + cx.update_editor(|editor, _, cx| { + set_linked_edit_ranges( + (Point::new(0, 1), Point::new(0, 6)), + (Point::new(0, 9), Point::new(0, 14)), + editor, + cx, + ); + }); + cx.update_editor(|editor, window, cx| { + editor.handle_input(".", window, cx); + }); + cx.assert_editor_state(""); +} + #[gpui::test] async fn test_invisible_worktree_servers(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -29645,19 +30066,25 @@ fn test_relative_line_numbers(cx: &mut TestAppContext) { let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))], + [Point::new(0, 0)..Point::new(2, 0)], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))], + [Point::new(0, 0)..Point::new(2, 0)], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(2), buffer_3.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))], + [Point::new(0, 0)..Point::new(2, 0)], + 0, cx, ); multibuffer diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index a04e16683d92f8e79cbe75d6dc03764276ede226..159aee456a6894824ff8e3e212281074498df3c6 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1,15 +1,15 @@ use crate::{ - ActiveDiagnostic, BlockId, CURSORS_VISIBLE_FOR, ChunkRendererContext, ChunkReplacement, - CodeActionSource, ColumnarMode, ConflictsOurs, ConflictsOursMarker, ConflictsOuter, - ConflictsTheirs, ConflictsTheirsMarker, ContextMenuPlacement, CursorShape, CustomBlockId, - DisplayDiffHunk, DisplayPoint, DisplayRow, EditDisplayMode, EditPrediction, Editor, EditorMode, - EditorSettings, EditorSnapshot, EditorStyle, FILE_HEADER_HEIGHT, FocusedBlock, - GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor, InlayHintRefreshReason, - JumpData, LineDown, LineHighlight, LineUp, MAX_LINE_LEN, MINIMAP_FONT_SIZE, - MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown, PageUp, PhantomBreakpointIndicator, - PhantomDiffReviewIndicator, Point, RowExt, RowRangeExt, SelectPhase, Selection, - SelectionDragState, SelectionEffects, SizingBehavior, SoftWrap, StickyHeaderExcerpt, ToPoint, - ToggleFold, ToggleFoldAll, + ActiveDiagnostic, BUFFER_HEADER_PADDING, BlockId, CURSORS_VISIBLE_FOR, ChunkRendererContext, + ChunkReplacement, CodeActionSource, ColumnarMode, ConflictsOurs, ConflictsOursMarker, + ConflictsOuter, ConflictsTheirs, ConflictsTheirsMarker, ContextMenuPlacement, CursorShape, + CustomBlockId, DisplayDiffHunk, DisplayPoint, DisplayRow, EditDisplayMode, EditPrediction, + Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, FILE_HEADER_HEIGHT, + FocusedBlock, GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor, + InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp, MAX_LINE_LEN, + MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown, PageUp, + PhantomBreakpointIndicator, PhantomDiffReviewIndicator, Point, RowExt, RowRangeExt, + SelectPhase, Selection, SelectionDragState, SelectionEffects, SizingBehavior, SoftWrap, + StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll, code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP}, column_pixels, display_map::{ @@ -47,8 +47,8 @@ use gpui::{ MouseDownEvent, MouseMoveEvent, MousePressureEvent, MouseUpEvent, PaintQuad, ParentElement, Pixels, PressureStage, ScrollDelta, ScrollHandle, ScrollWheelEvent, ShapedLine, SharedString, Size, StatefulInteractiveElement, Style, Styled, StyledText, TextAlign, TextRun, - TextStyleRefinement, WeakEntity, Window, anchored, checkerboard, deferred, div, fill, - linear_color_stop, linear_gradient, outline, point, px, quad, relative, size, solid_background, + TextStyleRefinement, WeakEntity, Window, anchored, deferred, div, fill, linear_color_stop, + linear_gradient, outline, pattern_slash, point, px, quad, relative, size, solid_background, transparent_black, }; use itertools::Itertools; @@ -186,7 +186,10 @@ impl SelectionLayout { #[derive(Default)] struct RenderBlocksOutput { - blocks: Vec, + // We store spacer blocks separately because they paint in a different order + // (spacers -> indent guides -> non-spacers) + non_spacer_blocks: Vec, + spacer_blocks: Vec, row_block_types: HashMap, resized_blocks: Option>, } @@ -2860,7 +2863,7 @@ impl EditorElement { } }); - window.defer_draw(element, origin, 2); + window.defer_draw(element, origin, 2, None); } } @@ -2970,11 +2973,12 @@ impl EditorElement { - scroll_pixel_position.x, ); if start_x >= text_origin.x { - let (offset_y, length) = Self::calculate_indent_guide_bounds( - indent_guide.start_row..indent_guide.end_row, - line_height, - snapshot, - ); + let (offset_y, length, display_row_range) = + Self::calculate_indent_guide_bounds( + indent_guide.start_row..indent_guide.end_row, + line_height, + snapshot, + ); let start_y = Pixels::from( ScrollOffset::from(content_origin.y) + offset_y @@ -2985,6 +2989,7 @@ impl EditorElement { origin: point(start_x, start_y), length, single_indent_width, + display_row_range, depth: indent_guide.depth, active: active_indent_guide_indices.contains(&i), settings: indent_guide.settings, @@ -2997,6 +3002,22 @@ impl EditorElement { ) } + fn depth_zero_indent_guide_padding_for_row( + indent_guides: &[IndentGuideLayout], + row: DisplayRow, + ) -> Pixels { + indent_guides + .iter() + .find(|guide| guide.depth == 0 && guide.display_row_range.contains(&row)) + .and_then(|guide| { + guide + .settings + .visible_line_width(guide.active) + .map(|width| px(width as f32 * 2.0)) + }) + .unwrap_or(px(0.0)) + } + fn layout_wrap_guides( &self, em_advance: Pixels, @@ -3034,11 +3055,11 @@ impl EditorElement { row_range: Range, line_height: Pixels, snapshot: &DisplaySnapshot, - ) -> (f64, gpui::Pixels) { + ) -> (f64, gpui::Pixels, Range) { let start_point = Point::new(row_range.start.0, 0); let end_point = Point::new(row_range.end.0, 0); - let row_range = start_point.to_display_point(snapshot).row() + let mut row_range = start_point.to_display_point(snapshot).row() ..end_point.to_display_point(snapshot).row(); let mut prev_line = start_point; @@ -3076,6 +3097,7 @@ impl EditorElement { if !found_excerpt_header { offset_y -= block_offset as f64 * f64::from(line_height); length += block_height as f32 * line_height; + row_range = DisplayRow(row_range.start.0.saturating_sub(block_offset))..row_range.end; } // If there is a block (e.g. diagnostic) at the end of an multibuffer excerpt, @@ -3093,9 +3115,11 @@ impl EditorElement { } if found_excerpt_header { length -= block_height as f32 * line_height; + } else { + row_range = row_range.start..cons_line; } - (offset_y, length) + (offset_y, length, row_range) } fn layout_breakpoints( @@ -3857,6 +3881,7 @@ impl EditorElement { latest_selection_anchors: &HashMap, is_row_soft_wrapped: impl Copy + Fn(usize) -> bool, sticky_header_excerpt_id: Option, + indent_guides: &Option>, block_resize_offset: &mut i32, window: &mut Window, cx: &mut App, @@ -3908,19 +3933,30 @@ impl EditorElement { div() .size_full() - .child(custom.render(&mut BlockContext { - window, - app: cx, - anchor_x, - margins: editor_margins, - line_height, - em_width, - block_id, - height: custom.height.unwrap_or(1), - selected, - max_width: text_hitbox.size.width.max(*scroll_width), - editor_style: &self.style, - })) + .child( + custom.render(&mut BlockContext { + window, + app: cx, + anchor_x, + margins: editor_margins, + line_height, + em_width, + block_id, + height: custom.height.unwrap_or(1), + selected, + max_width: text_hitbox.size.width.max(*scroll_width), + editor_style: &self.style, + indent_guide_padding: indent_guides + .as_ref() + .map(|guides| { + Self::depth_zero_indent_guide_padding_for_row( + guides, + block_row_start, + ) + }) + .unwrap_or(px(0.0)), + }), + ) .into_any() } @@ -4008,7 +4044,20 @@ impl EditorElement { } Block::Spacer { height, .. } => { - Self::render_spacer_block(block_id, *height, line_height, window, cx) + let indent_guide_padding = indent_guides + .as_ref() + .map(|guides| { + Self::depth_zero_indent_guide_padding_for_row(guides, block_row_start) + }) + .unwrap_or(px(0.0)); + Self::render_spacer_block( + block_id, + *height, + line_height, + indent_guide_padding, + window, + cx, + ) } }; @@ -4070,10 +4119,13 @@ impl EditorElement { Some((element, final_size, row, x_offset)) } - /// The checkerboard pattern height must be an even factor of the line - /// height, so that two consecutive spacer blocks can render contiguously - /// without an obvious break in the pattern. - fn checkerboard_size(line_height: f32, target_height: f32) -> f32 { + /// The spacer pattern period must be an even factor of the line height, so + /// that two consecutive spacer blocks can render contiguously without an + /// obvious break in the pattern. + /// + /// Two consecutive spacers can appear when the other side has a diff hunk + /// and a custom block next to each other (e.g. merge conflict buttons). + fn spacer_pattern_period(line_height: f32, target_height: f32) -> f32 { let k_approx = line_height / (2.0 * target_height); let k_floor = (k_approx.floor() as u32).max(1); let k_ceil = (k_approx.ceil() as u32).max(1); @@ -4092,24 +4144,40 @@ impl EditorElement { block_id: BlockId, block_height: u32, line_height: Pixels, + indent_guide_padding: Pixels, window: &mut Window, cx: &App, ) -> AnyElement { + let target_size = 16.0; + let scale = window.scale_factor(); + let pattern_size = + Self::spacer_pattern_period(f32::from(line_height) * scale, target_size * scale); + let color = cx.theme().colors().panel_background; + let background = pattern_slash(color, 2.0, pattern_size - 2.0); + div() .id(block_id) + .cursor(CursorStyle::Arrow) .w_full() .h((block_height as f32) * line_height) - // the checkerboard pattern is semi-transparent, so we render a - // solid background to prevent indent guides peeking through - .bg(cx.theme().colors().editor_background) + .flex() + .flex_row() + .child(div().flex_shrink_0().w(indent_guide_padding).h_full()) .child( div() - .size_full() - .bg(checkerboard(cx.theme().colors().panel_background, { - let target_size = 16.0; - let scale = window.scale_factor(); - Self::checkerboard_size(f32::from(line_height) * scale, target_size * scale) - })), + .flex_1() + .h_full() + .relative() + .overflow_x_hidden() + .child( + div() + .absolute() + .top_0() + .bottom_0() + .right_0() + .left(-indent_guide_padding) + .bg(background), + ), ) .into_any() } @@ -4154,6 +4222,7 @@ impl EditorElement { latest_selection_anchors: &HashMap, is_row_soft_wrapped: impl Copy + Fn(usize) -> bool, sticky_header_excerpt_id: Option, + indent_guides: &Option>, window: &mut Window, cx: &mut App, ) -> RenderBlocksOutput { @@ -4166,6 +4235,7 @@ impl EditorElement { .update(cx, |editor, _| editor.take_focused_block()); let mut fixed_block_max_width = Pixels::ZERO; let mut blocks = Vec::new(); + let mut spacer_blocks = Vec::new(); let mut resized_blocks = HashMap::default(); let mut row_block_types = HashMap::default(); let mut block_resize_offset: i32 = 0; @@ -4199,6 +4269,7 @@ impl EditorElement { latest_selection_anchors, is_row_soft_wrapped, sticky_header_excerpt_id, + indent_guides, &mut block_resize_offset, window, cx, @@ -4226,7 +4297,15 @@ impl EditorElement { .size .width .max(fixed_block_max_width) - .max(editor_margins.gutter.width + *scroll_width) + .max( + editor_margins.gutter.width + *scroll_width + editor_margins.extended_right, + ) + .into(), + (BlockStyle::Spacer, _) => hitbox + .size + .width + .max(fixed_block_max_width) + .max(*scroll_width + editor_margins.extended_right) .into(), (BlockStyle::Fixed, _) => unreachable!(), }; @@ -4258,20 +4337,26 @@ impl EditorElement { latest_selection_anchors, is_row_soft_wrapped, sticky_header_excerpt_id, + indent_guides, &mut block_resize_offset, window, cx, ) { - blocks.push(BlockLayout { + let layout = BlockLayout { id: block_id, x_offset, row: Some(row), element, available_space: size(width, element_size.height.into()), style, - overlaps_gutter: !block.place_near(), + overlaps_gutter: !block.place_near() && style != BlockStyle::Spacer, is_buffer_header: block.is_buffer_header(), - }); + }; + if style == BlockStyle::Spacer { + spacer_blocks.push(layout); + } else { + blocks.push(layout); + } } } @@ -4283,12 +4368,17 @@ impl EditorElement { let style = block.style(); let width = match style { BlockStyle::Fixed => AvailableSpace::MinContent, - BlockStyle::Flex => AvailableSpace::Definite( + BlockStyle::Flex => { + AvailableSpace::Definite(hitbox.size.width.max(fixed_block_max_width).max( + editor_margins.gutter.width + *scroll_width + editor_margins.extended_right, + )) + } + BlockStyle::Spacer => AvailableSpace::Definite( hitbox .size .width .max(fixed_block_max_width) - .max(editor_margins.gutter.width + *scroll_width), + .max(*scroll_width + editor_margins.extended_right), ), BlockStyle::Sticky => AvailableSpace::Definite(hitbox.size.width), }; @@ -4315,6 +4405,7 @@ impl EditorElement { latest_selection_anchors, is_row_soft_wrapped, sticky_header_excerpt_id, + indent_guides, &mut block_resize_offset, window, cx, @@ -4338,7 +4429,8 @@ impl EditorElement { } RenderBlocksOutput { - blocks, + non_spacer_blocks: blocks, + spacer_blocks, row_block_types, resized_blocks: (!resized_blocks.is_empty()).then_some(resized_blocks), } @@ -4348,9 +4440,11 @@ impl EditorElement { &self, blocks: &mut Vec, hitbox: &Hitbox, + gutter_hitbox: &Hitbox, line_height: Pixels, scroll_position: gpui::Point, scroll_pixel_position: gpui::Point, + editor_margins: &EditorMargins, window: &mut Window, cx: &mut App, ) { @@ -4369,6 +4463,13 @@ impl EditorElement { hitbox.origin + point(Pixels::ZERO, hitbox.size.height) }; + if block.style == BlockStyle::Spacer { + origin += point( + gutter_hitbox.size.width + editor_margins.gutter.margin, + Pixels::ZERO, + ); + } + if !matches!(block.style, BlockStyle::Sticky) { origin += point(Pixels::from(-scroll_pixel_position.x), Pixels::ZERO); } @@ -5008,7 +5109,7 @@ impl EditorElement { current_position.y -= size.height; } let position = current_position; - window.defer_draw(element, current_position, 1); + window.defer_draw(element, current_position, 1, None); if !y_flipped { current_position.y += size.height + MENU_GAP; } else { @@ -5111,7 +5212,7 @@ impl EditorElement { // Skip drawing if it doesn't fit anywhere. if let Some((aside, position, size)) = positioned_aside { let aside_bounds = Bounds::new(position, size); - window.defer_draw(aside, position, 2); + window.defer_draw(aside, position, 2, None); return Some(aside_bounds); } @@ -5320,7 +5421,7 @@ impl EditorElement { .on_mouse_move(|_, _, cx| cx.stop_propagation()) .into_any_element(); occlusion.layout_as_root(size(width, HOVER_POPOVER_GAP).into(), window, cx); - window.defer_draw(occlusion, origin, 2); + window.defer_draw(occlusion, origin, 2, None); } fn place_popovers_above( @@ -5337,7 +5438,7 @@ impl EditorElement { current_y - size.height, ); - window.defer_draw(popover.element, popover_origin, 2); + window.defer_draw(popover.element, popover_origin, 2, None); if position != itertools::Position::Last { let origin = point(popover_origin.x, popover_origin.y - HOVER_POPOVER_GAP); draw_occluder(size.width, origin, window, cx); @@ -5359,7 +5460,7 @@ impl EditorElement { let size = popover.size; let popover_origin = point(hovered_point.x + popover.horizontal_offset, current_y); - window.defer_draw(popover.element, popover_origin, 2); + window.defer_draw(popover.element, popover_origin, 2, None); if position != itertools::Position::Last { let origin = point(popover_origin.x, popover_origin.y + size.height); draw_occluder(size.width, origin, window, cx); @@ -5461,7 +5562,7 @@ impl EditorElement { let size = popover.size; let popover_origin = point(origin.x, current_y); - window.defer_draw(popover.element, popover_origin, 2); + window.defer_draw(popover.element, popover_origin, 2, None); if position != itertools::Position::Last { let origin = point(popover_origin.x, popover_origin.y + size.height); draw_occluder(size.width, origin, window, cx); @@ -5580,7 +5681,11 @@ impl EditorElement { continue; } let row_ix = display_row_range.start.0.saturating_sub(row_range.start.0); - if row_infos[row_ix as usize].diff_status.is_none() { + if row_infos + .get(row_ix as usize) + .and_then(|row_info| row_info.diff_status) + .is_none() + { continue; } if highlighted_rows @@ -5789,7 +5894,7 @@ impl EditorElement { }) }; - window.defer_draw(element, final_origin, 2); + window.defer_draw(element, final_origin, 2, None); } fn paint_background(&self, layout: &EditorLayout, window: &mut Window, cx: &mut App) { @@ -6019,22 +6124,18 @@ impl EditorElement { )), }; - let requested_line_width = if indent_guide.active { - settings.active_line_width - } else { - settings.line_width - } - .clamp(1, 10); let mut line_indicator_width = 0.; - if let Some(color) = line_color { - window.paint_quad(fill( - Bounds { - origin: indent_guide.origin, - size: size(px(requested_line_width as f32), indent_guide.length), - }, - color, - )); - line_indicator_width = requested_line_width as f32; + if let Some(requested_line_width) = settings.visible_line_width(indent_guide.active) { + if let Some(color) = line_color { + window.paint_quad(fill( + Bounds { + origin: indent_guide.origin, + size: size(px(requested_line_width as f32), indent_guide.length), + }, + color, + )); + line_indicator_width = requested_line_width as f32; + } } if let Some(color) = background_color { @@ -7464,7 +7565,27 @@ impl EditorElement { } } - fn paint_blocks(&mut self, layout: &mut EditorLayout, window: &mut Window, cx: &mut App) { + fn paint_spacer_blocks( + &mut self, + layout: &mut EditorLayout, + window: &mut Window, + cx: &mut App, + ) { + for mut block in layout.spacer_blocks.drain(..) { + let mut bounds = layout.hitbox.bounds; + bounds.origin.x += layout.gutter_hitbox.bounds.size.width; + window.with_content_mask(Some(ContentMask { bounds }), |window| { + block.element.paint(window, cx); + }) + } + } + + fn paint_non_spacer_blocks( + &mut self, + layout: &mut EditorLayout, + window: &mut Window, + cx: &mut App, + ) { for mut block in layout.blocks.drain(..) { if block.overlaps_gutter { block.element.paint(window, cx); @@ -7950,7 +8071,7 @@ fn apply_dirty_filename_style( text_style: &gpui::TextStyle, cx: &App, ) -> Option { - let text = segment.text.replace('\n', "⏎"); + let text = segment.text.replace('\n', " "); let filename_position = std::path::Path::new(&segment.text) .file_name() @@ -8134,7 +8255,7 @@ pub(crate) fn render_buffer_header( let header = div() .id(("buffer-header", for_excerpt.buffer_id.to_proto())) - .p_1() + .p(BUFFER_HEADER_PADDING) .w_full() .h(FILE_HEADER_HEIGHT as f32 * window.line_height()) .child( @@ -9587,11 +9708,12 @@ impl Element for EditorElement { let right_margin = minimap_width + vertical_scrollbar_width; - let editor_width = - text_width - gutter_dimensions.margin - 2 * em_width - right_margin; + let extended_right = 2 * em_width + right_margin; + let editor_width = text_width - gutter_dimensions.margin - extended_right; let editor_margins = EditorMargins { gutter: gutter_dimensions, right: right_margin, + extended_right, }; snapshot = self.editor.update(cx, |editor, cx| { @@ -10212,6 +10334,26 @@ impl Element for EditorElement { let sticky_header_excerpt_id = sticky_header_excerpt.as_ref().map(|top| top.excerpt.id); + let buffer = snapshot.buffer_snapshot(); + let start_buffer_row = MultiBufferRow(start_anchor.to_point(&buffer).row); + let end_buffer_row = MultiBufferRow(end_anchor.to_point(&buffer).row); + + let preliminary_scroll_pixel_position = point( + scroll_position.x * f64::from(em_layout_width), + scroll_position.y * f64::from(line_height), + ); + let indent_guides = self.layout_indent_guides( + content_origin, + text_hitbox.origin, + start_buffer_row..end_buffer_row, + preliminary_scroll_pixel_position, + line_height, + &snapshot, + window, + cx, + ); + let indent_guides_for_spacers = indent_guides.clone(); + let blocks = (!is_minimap) .then(|| { window.with_element_namespace("blocks", |window| { @@ -10232,6 +10374,7 @@ impl Element for EditorElement { &latest_selection_anchors, is_row_soft_wrapped, sticky_header_excerpt_id, + &indent_guides_for_spacers, window, cx, ) @@ -10239,7 +10382,8 @@ impl Element for EditorElement { }) .unwrap_or_default(); let RenderBlocksOutput { - mut blocks, + non_spacer_blocks: mut blocks, + mut spacer_blocks, row_block_types, resized_blocks, } = blocks; @@ -10290,11 +10434,6 @@ impl Element for EditorElement { None }; - let start_buffer_row = - MultiBufferRow(start_anchor.to_point(&snapshot.buffer_snapshot()).row); - let end_buffer_row = - MultiBufferRow(end_anchor.to_point(&snapshot.buffer_snapshot()).row); - let scroll_max: gpui::Point = point( ScrollPixelOffset::from( ((scroll_width - editor_width) / em_layout_width).max(0.0), @@ -10355,16 +10494,21 @@ impl Element for EditorElement { sticky_headers.as_ref().map_or(0, |h| h.lines.len()), ); }); - let indent_guides = self.layout_indent_guides( - content_origin, - text_hitbox.origin, - start_buffer_row..end_buffer_row, - scroll_pixel_position, - line_height, - &snapshot, - window, - cx, - ); + let indent_guides = + if scroll_pixel_position != preliminary_scroll_pixel_position { + self.layout_indent_guides( + content_origin, + text_hitbox.origin, + start_buffer_row..end_buffer_row, + scroll_pixel_position, + line_height, + &snapshot, + window, + cx, + ) + } else { + indent_guides + }; let crease_trailers = window.with_element_namespace("crease_trailers", |window| { @@ -10503,9 +10647,22 @@ impl Element for EditorElement { self.layout_blocks( &mut blocks, &hitbox, + &gutter_hitbox, + line_height, + scroll_position, + scroll_pixel_position, + &editor_margins, + window, + cx, + ); + self.layout_blocks( + &mut spacer_blocks, + &hitbox, + &gutter_hitbox, line_height, scroll_position, scroll_pixel_position, + &editor_margins, window, cx, ); @@ -10803,7 +10960,9 @@ impl Element for EditorElement { .and_then(|headers| headers.lines.last()) .map_or(Pixels::ZERO, |last| last.offset + line_height); - let sticky_header_height = if sticky_buffer_header.is_some() { + let has_sticky_buffer_header = + sticky_buffer_header.is_some() || sticky_header_excerpt_id.is_some(); + let sticky_header_height = if has_sticky_buffer_header { let full_height = FILE_HEADER_HEIGHT as f32 * line_height; let display_row = blocks .iter() @@ -10822,7 +10981,9 @@ impl Element for EditorElement { } None => full_height, }; - sticky_scroll_header_height + offset + let header_bottom_padding = + BUFFER_HEADER_PADDING.to_pixels(window.rem_size()); + sticky_scroll_header_height + offset - header_bottom_padding } else { sticky_scroll_header_height }; @@ -10900,6 +11061,7 @@ impl Element for EditorElement { inline_blame_layout, inline_code_actions, blocks, + spacer_blocks, cursors, visible_cursors, selections, @@ -10961,6 +11123,7 @@ impl Element for EditorElement { window.with_content_mask(Some(ContentMask { bounds }), |window| { self.paint_mouse_listeners(layout, window, cx); self.paint_background(layout, window, cx); + self.paint_indent_guides(layout, window, cx); if layout.gutter_hitbox.size.width > Pixels::ZERO { @@ -10970,6 +11133,12 @@ impl Element for EditorElement { self.paint_text(layout, window, cx); + if !layout.spacer_blocks.is_empty() { + window.with_element_namespace("blocks", |window| { + self.paint_spacer_blocks(layout, window, cx); + }); + } + if layout.gutter_hitbox.size.width > Pixels::ZERO { self.paint_gutter_highlights(layout, window, cx); self.paint_gutter_indicators(layout, window, cx); @@ -10977,7 +11146,7 @@ impl Element for EditorElement { if !layout.blocks.is_empty() { window.with_element_namespace("blocks", |window| { - self.paint_blocks(layout, window, cx); + self.paint_non_spacer_blocks(layout, window, cx); }); } @@ -11079,6 +11248,7 @@ pub struct EditorLayout { inline_blame_layout: Option, inline_code_actions: Option, blocks: Vec, + spacer_blocks: Vec, highlighted_ranges: Vec<(Range, Hsla)>, highlighted_gutter_ranges: Vec<(Range, Hsla)>, redacted_ranges: Vec>, @@ -11843,11 +12013,12 @@ pub fn layout_line( .unwrap() } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct IndentGuideLayout { origin: gpui::Point, length: Pixels, single_indent_width: Pixels, + display_row_range: Range, depth: u32, active: bool, settings: IndentGuideSettings, @@ -13353,26 +13524,26 @@ mod tests { } #[test] - fn test_checkerboard_size() { + fn test_spacer_pattern_period() { // line height is smaller than target height, so we just return half the line height - assert_eq!(EditorElement::checkerboard_size(10.0, 20.0), 5.0); + assert_eq!(EditorElement::spacer_pattern_period(10.0, 20.0), 5.0); // line height is exactly half the target height, perfect match - assert_eq!(EditorElement::checkerboard_size(20.0, 10.0), 10.0); + assert_eq!(EditorElement::spacer_pattern_period(20.0, 10.0), 10.0); // line height is close to half the target height - assert_eq!(EditorElement::checkerboard_size(20.0, 9.0), 10.0); + assert_eq!(EditorElement::spacer_pattern_period(20.0, 9.0), 10.0); // line height is close to 1/4 the target height - assert_eq!(EditorElement::checkerboard_size(20.0, 4.8), 5.0); + assert_eq!(EditorElement::spacer_pattern_period(20.0, 4.8), 5.0); } #[gpui::test(iterations = 100)] - fn test_random_checkerboard_size(mut rng: StdRng) { + fn test_random_spacer_pattern_period(mut rng: StdRng) { let line_height = rng.next_u32() as f32; let target_height = rng.next_u32() as f32; - let result = EditorElement::checkerboard_size(line_height, target_height); + let result = EditorElement::spacer_pattern_period(line_height, target_height); let k = line_height / result; assert!(k - k.round() < 0.0000001); // approximately integer diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index a2f56f625d9553e81c9de4abbe21451982cfd17e..d4877a5f1986685bea37f243edf4ac8bbdfdf9f5 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -673,7 +673,7 @@ pub(crate) async fn find_file( // (literally, [LinkTitle](link_file.txt)) as a candidate. fn link_pattern_file_candidates(candidate: &str) -> Vec<(String, Range)> { static MD_LINK_REGEX: LazyLock = - LazyLock::new(|| Regex::new(r"\(([^)]*)\)").expect("Failed to create REGEX")); + LazyLock::new(|| Regex::new(r"]\(([^)]*)\)").expect("Failed to create REGEX")); let candidate_len = candidate.len(); @@ -1444,14 +1444,26 @@ mod tests { candidates, vec!["LinkTitle](link\\ _file.txt)", "link\\ _file.txt",] ); - // - // Square brackets not strictly necessary + // Parentheses without preceding `]` should not extract inner content, + // to avoid matching function calls like `do_work(file2)` as file paths. let candidates: Vec = link_pattern_file_candidates("(link_file.txt)") .into_iter() .map(|(c, _)| c) .collect(); + assert_eq!(candidates, vec!["(link_file.txt)"]); - assert_eq!(candidates, vec!["(link_file.txt)", "link_file.txt",]); + let candidates: Vec = link_pattern_file_candidates("do_work(file2);") + .into_iter() + .map(|(c, _)| c) + .collect(); + assert_eq!(candidates, vec!["do_work(file2);"]); + + // Markdown links should still extract the path + let candidates: Vec = link_pattern_file_candidates("](readme.md)") + .into_iter() + .map(|(c, _)| c) + .collect(); + assert_eq!(candidates, vec!["](readme.md)", "readme.md"]); // No nesting let candidates: Vec = diff --git a/crates/editor/src/inlays.rs b/crates/editor/src/inlays.rs index a240837918340f3a2540491a175d13e90de2931e..8c46e797cada703c9101fd91e670cbdd4ea713ac 100644 --- a/crates/editor/src/inlays.rs +++ b/crates/editor/src/inlays.rs @@ -58,10 +58,12 @@ pub enum InlayContent { impl Inlay { pub fn hint(id: InlayId, position: Anchor, hint: &InlayHint) -> Self { let mut text = hint.text(); - if hint.padding_right && text.reversed_chars_at(text.len()).next() != Some(' ') { + let needs_right_padding = hint.padding_right && !text.ends_with(" "); + let needs_left_padding = hint.padding_left && !text.starts_with(" "); + if needs_right_padding { text.push(" "); } - if hint.padding_left && text.chars_at(0).next() != Some(' ') { + if needs_left_padding { text.push_front(" "); } Self { diff --git a/crates/editor/src/inlays/inlay_hints.rs b/crates/editor/src/inlays/inlay_hints.rs index 72951834ff4794b8862f9254af77bd9c997fb1a1..0b3f6bda09c2cf86b994682e2ed89c2614d72737 100644 --- a/crates/editor/src/inlays/inlay_hints.rs +++ b/crates/editor/src/inlays/inlay_hints.rs @@ -27,6 +27,7 @@ use util::debug_panic; use super::{Inlay, InlayId}; use crate::{ Editor, EditorSnapshot, PointForPosition, ToggleInlayHints, ToggleInlineValues, debounce_value, + display_map::{DisplayMap, InlayOffset}, hover_links::{InlayHighlight, TriggerPoint, show_link_definition}, hover_popover::{self, InlayHover}, inlays::InlaySplice, @@ -104,13 +105,34 @@ impl LspInlayHintData { self.added_hints.clear(); } + /// Like `clear`, but only wipes tracking state for the given buffer IDs. + /// Hints belonging to other buffers are left intact so they are neither + /// re-fetched nor duplicated on the next `NewLinesShown`. + pub fn clear_for_buffers( + &mut self, + buffer_ids: &HashSet, + current_hints: impl IntoIterator, + ) { + for buffer_id in buffer_ids { + self.hint_refresh_tasks.remove(buffer_id); + self.hint_chunk_fetching.remove(buffer_id); + } + for hint in current_hints { + if let Some(buffer_id) = hint.position.text_anchor.buffer_id { + if buffer_ids.contains(&buffer_id) { + self.added_hints.remove(&hint.id); + } + } + } + } + /// Checks inlay hint settings for enabled hint kinds and general enabled state. /// Generates corresponding inlay_map splice updates on settings changes. /// Does not update inlay hint cache state on disabling or inlay hint kinds change: only reenabling forces new LSP queries. fn update_settings( &mut self, new_hint_settings: InlayHintSettings, - visible_hints: Vec, + visible_hints: impl IntoIterator, ) -> ControlFlow, Option> { let old_enabled = self.enabled; // If the setting for inlay hints has changed, update `enabled`. This condition avoids inlay @@ -140,7 +162,7 @@ impl LspInlayHintData { ControlFlow::Continue( Some(InlaySplice { to_remove: visible_hints - .iter() + .into_iter() .filter_map(|inlay| { let inlay_kind = self.added_hints.get(&inlay.id).copied()?; if !self.allowed_hint_kinds.contains(&inlay_kind) { @@ -159,12 +181,13 @@ impl LspInlayHintData { (true, false) => { self.modifiers_override = false; self.allowed_hint_kinds = new_allowed_hint_kinds; - if visible_hints.is_empty() { + let mut visible_hints = visible_hints.into_iter().peekable(); + if visible_hints.peek().is_none() { ControlFlow::Break(None) } else { self.clear(); ControlFlow::Break(Some(InlaySplice { - to_remove: visible_hints.iter().map(|inlay| inlay.id).collect(), + to_remove: visible_hints.map(|inlay| inlay.id).collect(), to_insert: Vec::new(), })) } @@ -175,7 +198,7 @@ impl LspInlayHintData { ControlFlow::Continue( Some(InlaySplice { to_remove: visible_hints - .iter() + .into_iter() .filter_map(|inlay| { let inlay_kind = self.added_hints.get(&inlay.id).copied()?; if !self.allowed_hint_kinds.contains(&inlay_kind) { @@ -338,12 +361,20 @@ impl Editor { }; let multi_buffer = self.buffer().clone(); + let Some(inlay_hints) = self.inlay_hints.as_mut() else { return; }; if invalidate_cache.should_invalidate() { - inlay_hints.clear(); + if invalidate_hints_for_buffers.is_empty() { + inlay_hints.clear(); + } else { + inlay_hints.clear_for_buffers( + &invalidate_hints_for_buffers, + Self::visible_inlay_hints(self.display_map.read(cx)), + ); + } } inlay_hints .invalidate_hints_for_buffers @@ -420,16 +451,8 @@ impl Editor { } pub fn clear_inlay_hints(&mut self, cx: &mut Context) { - let to_remove = self - .visible_inlay_hints(cx) - .into_iter() - .map(|inlay| { - let inlay_id = inlay.id; - if let Some(inlay_hints) = &mut self.inlay_hints { - inlay_hints.added_hints.remove(&inlay_id); - } - inlay_id - }) + let to_remove = Self::visible_inlay_hints(self.display_map.read(cx)) + .map(|inlay| inlay.id) .collect::>(); self.splice_inlays(&to_remove, Vec::new(), cx); } @@ -439,7 +462,6 @@ impl Editor { reason: &InlayHintRefreshReason, cx: &mut Context<'_, Editor>, ) -> Option { - let visible_inlay_hints = self.visible_inlay_hints(cx); let Some(inlay_hints) = self.inlay_hints.as_mut() else { return None; }; @@ -471,6 +493,8 @@ impl Editor { } } InlayHintRefreshReason::SettingsChange(new_settings) => { + let visible_inlay_hints = + Self::visible_inlay_hints(self.display_map.read(cx)).collect::>(); match inlay_hints.update_settings(*new_settings, visible_inlay_hints) { ControlFlow::Break(Some(InlaySplice { to_remove, @@ -534,13 +558,11 @@ impl Editor { Some(invalidate_cache) } - pub(crate) fn visible_inlay_hints(&self, cx: &Context) -> Vec { - self.display_map - .read(cx) + fn visible_inlay_hints(display_map: &DisplayMap) -> impl Iterator + use<'_> { + display_map .current_inlays() .filter(move |inlay| matches!(inlay.id, InlayId::Hint(_))) .cloned() - .collect() } pub fn update_inlay_link_and_hover_points( @@ -575,9 +597,8 @@ impl Editor { point_for_position.next_valid.to_point(snapshot), Bias::Right, ); - if let Some(hovered_hint) = self - .visible_inlay_hints(cx) - .into_iter() + if let Some(hovered_hint) = Self::visible_inlay_hints(self.display_map.read(cx)) + .filter(|hint| snapshot.can_resolve(&hint.position)) .skip_while(|hint| { hint.position .cmp(&previous_valid_anchor, &buffer_snapshot) @@ -602,15 +623,19 @@ impl Editor { { match cached_hint.resolve_state { ResolveState::Resolved => { - let mut extra_shift_left = 0; - let mut extra_shift_right = 0; - if cached_hint.padding_left { - extra_shift_left += 1; - extra_shift_right += 1; - } - if cached_hint.padding_right { - extra_shift_right += 1; - } + let original_text = cached_hint.text(); + let actual_left_padding = + if cached_hint.padding_left && !original_text.starts_with(" ") { + 1 + } else { + 0 + }; + let actual_right_padding = + if cached_hint.padding_right && !original_text.ends_with(" ") { + 1 + } else { + 0 + }; match cached_hint.label { InlayHintLabel::String(_) => { if let Some(tooltip) = cached_hint.tooltip { @@ -632,9 +657,9 @@ impl Editor { range: InlayHighlight { inlay: hovered_hint.id, inlay_position: hovered_hint.position, - range: extra_shift_left + range: actual_left_padding ..hovered_hint.text().len() - + extra_shift_right, + - actual_right_padding, }, }, window, @@ -646,17 +671,17 @@ impl Editor { InlayHintLabel::LabelParts(label_parts) => { let hint_start = snapshot.anchor_to_inlay_offset(hovered_hint.position); + let content_start = + InlayOffset(hint_start.0 + actual_left_padding); if let Some((hovered_hint_part, part_range)) = hover_popover::find_hovered_hint_part( label_parts, - hint_start, + content_start, hovered_offset, ) { - let highlight_start = - (part_range.start - hint_start) + extra_shift_left; - let highlight_end = - (part_range.end - hint_start) + extra_shift_right; + let highlight_start = part_range.start - hint_start; + let highlight_end = part_range.end - hint_start; let highlight = InlayHighlight { inlay: hovered_hint.id, inlay_position: hovered_hint.position, @@ -763,9 +788,7 @@ impl Editor { new_hints: Vec<(Range, anyhow::Result)>, cx: &mut Context, ) { - let visible_inlay_hint_ids = self - .visible_inlay_hints(cx) - .iter() + let visible_inlay_hint_ids = Self::visible_inlay_hints(self.display_map.read(cx)) .filter(|inlay| inlay.position.text_anchor.buffer_id == Some(buffer_id)) .map(|inlay| inlay.id) .collect::>(); @@ -794,6 +817,18 @@ impl Editor { // from the cache. if invalidate_cache.should_invalidate() { hints_to_remove.extend(visible_inlay_hint_ids); + + // When invalidating, this task removes ALL visible hints for the buffer + // but only adds back hints for its own chunk ranges. Chunks fetched by + // other concurrent tasks (e.g., a scroll task that completed before this + // edit task) would have their hints removed but remain marked as "already + // fetched" in hint_chunk_fetching, preventing re-fetch on the next + // NewLinesShown. Fix: retain only chunks that this task has results for. + let task_chunk_ranges: HashSet<&Range> = + new_hints.iter().map(|(range, _)| range).collect(); + if let Some((_, fetched_chunks)) = inlay_hints.hint_chunk_fetching.get_mut(&buffer_id) { + fetched_chunks.retain(|chunk| task_chunk_ranges.contains(chunk)); + } } let mut inserted_hint_text = HashMap::default(); @@ -874,8 +909,7 @@ impl Editor { std::mem::take(&mut inlay_hints.invalidate_hints_for_buffers); if !invalidate_hints_for_buffers.is_empty() { hints_to_remove.extend( - self.visible_inlay_hints(cx) - .iter() + Self::visible_inlay_hints(self.display_map.read(cx)) .filter(|inlay| { inlay .position @@ -959,9 +993,9 @@ fn spawn_editor_hints_refresh( pub mod tests { use crate::editor_tests::update_test_language_settings; use crate::inlays::inlay_hints::InlayHintRefreshReason; + use crate::scroll::Autoscroll; use crate::scroll::ScrollAmount; use crate::{Editor, SelectionEffects}; - use crate::{ExcerptRange, scroll::Autoscroll}; use collections::HashSet; use futures::{StreamExt, future}; use gpui::{AppContext as _, Context, TestAppContext, WindowHandle}; @@ -971,7 +1005,7 @@ pub mod tests { use language::{Language, LanguageConfig, LanguageMatcher}; use languages::rust_lang; use lsp::{DEFAULT_LSP_REQUEST_TIMEOUT, FakeLanguageServer}; - use multi_buffer::{MultiBuffer, MultiBufferOffset}; + use multi_buffer::{MultiBuffer, MultiBufferOffset, PathKey}; use parking_lot::Mutex; use pretty_assertions::assert_eq; use project::{FakeFs, Project}; @@ -2321,28 +2355,32 @@ pub mod tests { .unwrap(); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0)), - ExcerptRange::new(Point::new(4, 0)..Point::new(11, 0)), - ExcerptRange::new(Point::new(22, 0)..Point::new(33, 0)), - ExcerptRange::new(Point::new(44, 0)..Point::new(55, 0)), - ExcerptRange::new(Point::new(56, 0)..Point::new(66, 0)), - ExcerptRange::new(Point::new(67, 0)..Point::new(77, 0)), + Point::new(0, 0)..Point::new(2, 0), + Point::new(4, 0)..Point::new(11, 0), + Point::new(22, 0)..Point::new(33, 0), + Point::new(44, 0)..Point::new(55, 0), + Point::new(56, 0)..Point::new(66, 0), + Point::new(67, 0)..Point::new(77, 0), ], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), [ - ExcerptRange::new(Point::new(0, 1)..Point::new(2, 1)), - ExcerptRange::new(Point::new(4, 1)..Point::new(11, 1)), - ExcerptRange::new(Point::new(22, 1)..Point::new(33, 1)), - ExcerptRange::new(Point::new(44, 1)..Point::new(55, 1)), - ExcerptRange::new(Point::new(56, 1)..Point::new(66, 1)), - ExcerptRange::new(Point::new(67, 1)..Point::new(77, 1)), + Point::new(0, 1)..Point::new(2, 1), + Point::new(4, 1)..Point::new(11, 1), + Point::new(22, 1)..Point::new(33, 1), + Point::new(44, 1)..Point::new(55, 1), + Point::new(56, 1)..Point::new(66, 1), + Point::new(67, 1)..Point::new(77, 1), ], + 0, cx, ); multibuffer @@ -2732,19 +2770,21 @@ let c = 3;"# .unwrap(); let multi_buffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), [ - // Have first excerpt to spawn over 2 chunks (50 lines each). - ExcerptRange::new(Point::new(49, 0)..Point::new(53, 0)), - // Have 2nd excerpt to be in the 2nd chunk only. - ExcerptRange::new(Point::new(70, 0)..Point::new(73, 0)), + Point::new(49, 0)..Point::new(53, 0), + Point::new(70, 0)..Point::new(73, 0), ], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(4, 0))], + [Point::new(0, 0)..Point::new(4, 0)], + 0, cx, ); multibuffer @@ -2930,16 +2970,23 @@ let c = 3;"# .unwrap(); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); let (buffer_1_excerpts, buffer_2_excerpts) = multibuffer.update(cx, |multibuffer, cx| { - let buffer_1_excerpts = multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))], + [Point::new(0, 0)..Point::new(2, 0)], + 0, cx, ); - let buffer_2_excerpts = multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 1)..Point::new(2, 1))], + [Point::new(0, 1)..Point::new(2, 1)], + 0, cx, ); + let excerpt_ids = multibuffer.excerpt_ids(); + let buffer_1_excerpts = vec![excerpt_ids[0]]; + let buffer_2_excerpts = vec![excerpt_ids[1]]; (buffer_1_excerpts, buffer_2_excerpts) }); @@ -3046,7 +3093,7 @@ let c = 3;"# editor .update(cx, |editor, _, cx| { editor.buffer().update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts(buffer_2_excerpts, cx) + multibuffer.remove_excerpts_for_path(PathKey::sorted(1), cx); }) }) .unwrap(); @@ -4000,20 +4047,24 @@ let c = 3;"# .unwrap(); let multi_buffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_2.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(10, 0)), - ExcerptRange::new(Point::new(23, 0)..Point::new(34, 0)), + Point::new(0, 0)..Point::new(10, 0), + Point::new(23, 0)..Point::new(34, 0), ], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_1.clone(), [ - ExcerptRange::new(Point::new(0, 0)..Point::new(10, 0)), - ExcerptRange::new(Point::new(13, 0)..Point::new(23, 0)), + Point::new(0, 0)..Point::new(10, 0), + Point::new(13, 0)..Point::new(23, 0), ], + 0, cx, ); multibuffer @@ -4137,6 +4188,613 @@ let c = 3;"# ); } + #[gpui::test] + async fn test_edit_then_scroll_race(cx: &mut gpui::TestAppContext) { + // Bug 1: An edit fires with a long debounce, and a scroll brings new lines + // before that debounce elapses. The edit task's apply_fetched_hints removes + // ALL visible hints (including the scroll-added ones) but only adds back + // hints for its own chunks. The scroll chunk remains in hint_chunk_fetching, + // so it is never re-queried, leaving it permanently empty. + init_test(cx, &|settings| { + settings.defaults.inlay_hints = Some(InlayHintSettingsContent { + enabled: Some(true), + edit_debounce_ms: Some(700), + scroll_debounce_ms: Some(50), + show_type_hints: Some(true), + show_parameter_hints: Some(true), + show_other_hints: Some(true), + ..InlayHintSettingsContent::default() + }) + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + let mut file_content = String::from("fn main() {\n"); + for i in 0..150 { + file_content.push_str(&format!(" let v{i} = {i};\n")); + } + file_content.push_str("}\n"); + fs.insert_tree( + path!("/a"), + json!({ + "main.rs": file_content, + "other.rs": "// Test file", + }), + ) + .await; + + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + + let lsp_request_ranges = Arc::new(Mutex::new(Vec::new())); + let mut fake_servers = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..lsp::ServerCapabilities::default() + }, + initializer: Some(Box::new({ + let lsp_request_ranges = lsp_request_ranges.clone(); + move |fake_server| { + let lsp_request_ranges = lsp_request_ranges.clone(); + fake_server.set_request_handler::( + move |params, _| { + let lsp_request_ranges = lsp_request_ranges.clone(); + async move { + lsp_request_ranges.lock().push(params.range); + let start_line = params.range.start.line; + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(start_line + 1, 9), + label: lsp::InlayHintLabel::String(format!( + "chunk_{start_line}" + )), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + } + })), + ..FakeLspAdapter::default() + }, + ); + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/a/main.rs"), cx) + }) + .await + .unwrap(); + let editor = + cx.add_window(|window, cx| Editor::for_buffer(buffer, Some(project), window, cx)); + cx.executor().run_until_parked(); + let _fake_server = fake_servers.next().await.unwrap(); + + editor + .update(cx, |editor, window, cx| { + editor.set_visible_line_count(50.0, window, cx); + editor.set_visible_column_count(120.0); + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + assert!( + visible.iter().any(|h| h.starts_with("chunk_0")), + "Should have chunk_0 hints initially, got: {visible:?}" + ); + }) + .unwrap(); + + lsp_request_ranges.lock().clear(); + + // Step 1: Make an edit → triggers BufferEdited with 700ms debounce. + editor + .update(cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)]) + }); + editor.handle_input("x", window, cx); + }) + .unwrap(); + // Let the BufferEdited event propagate and the edit task get spawned. + cx.executor().run_until_parked(); + + // Step 2: Scroll down to reveal a new chunk, then trigger NewLinesShown. + // This spawns a scroll task with the shorter 50ms debounce. + editor + .update(cx, |editor, window, cx| { + editor.scroll_screen(&ScrollAmount::Page(1.0), window, cx); + }) + .unwrap(); + // Explicitly trigger NewLinesShown for the new visible range. + editor + .update(cx, |editor, _window, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .unwrap(); + + // Step 3: Advance clock past scroll debounce (50ms) but NOT past edit + // debounce (700ms). The scroll task completes and adds hints for the + // new chunk. + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + // The scroll task's apply_fetched_hints also processes + // invalidate_hints_for_buffers (set by the earlier BufferEdited), which + // removes the old chunk_0 hint. Only the scroll chunk's hint remains. + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + assert!( + visible.iter().any(|h| h.starts_with("chunk_50")), + "After scroll task completes, the scroll chunk's hints should be \ + present, got: {visible:?}" + ); + }) + .unwrap(); + + // Step 4: Advance clock past the edit debounce (700ms). The edit task + // completes, calling apply_fetched_hints with should_invalidate()=true, + // which removes ALL visible hints (including the scroll chunk's) but only + // adds back hints for its own chunks (chunk_0). + cx.executor().advance_clock(Duration::from_millis(700)); + cx.executor().run_until_parked(); + + // At this point the edit task has: + // - removed chunk_50's hint (via should_invalidate removing all visible) + // - added chunk_0's hint (from its own fetch) + // - (with fix) cleared chunk_50 from hint_chunk_fetching + // Without the fix, chunk_50 is stuck in hint_chunk_fetching and will + // never be re-queried by NewLinesShown. + + // Step 5: Trigger NewLinesShown to give the system a chance to re-fetch + // any chunks whose hints were lost. + editor + .update(cx, |editor, _window, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + assert!( + visible.iter().any(|h| h.starts_with("chunk_0")), + "chunk_0 hints (from edit task) should be present. Got: {visible:?}" + ); + assert!( + visible.iter().any(|h| h.starts_with("chunk_50")), + "chunk_50 hints should have been re-fetched after NewLinesShown. \ + Bug 1: the scroll chunk's hints were removed by the edit task \ + and the chunk was stuck in hint_chunk_fetching, preventing \ + re-fetch. Got: {visible:?}" + ); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_refresh_requested_multi_server(cx: &mut gpui::TestAppContext) { + // Bug 2: When one LSP server sends workspace/inlayHint/refresh, the editor + // wipes all tracking state via clear(), then spawns tasks that call + // LspStore::inlay_hints with for_server=Some(requesting_server). The LspStore + // filters out other servers' cached hints via the for_server guard, so only + // the requesting server's hints are returned. apply_fetched_hints removes ALL + // visible hints (should_invalidate()=true) but only adds back the requesting + // server's hints. Other servers' hints disappear permanently. + init_test(cx, &|settings| { + settings.defaults.inlay_hints = Some(InlayHintSettingsContent { + enabled: Some(true), + edit_debounce_ms: Some(0), + scroll_debounce_ms: Some(0), + show_type_hints: Some(true), + show_parameter_hints: Some(true), + show_other_hints: Some(true), + ..InlayHintSettingsContent::default() + }) + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/a"), + json!({ + "main.rs": "fn main() { let x = 1; } // padding to keep hints from being trimmed", + "other.rs": "// Test file", + }), + ) + .await; + + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + + // Server A returns a hint labeled "server_a". + let server_a_request_count = Arc::new(AtomicU32::new(0)); + let mut fake_servers_a = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + name: "rust-analyzer", + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..lsp::ServerCapabilities::default() + }, + initializer: Some(Box::new({ + let server_a_request_count = server_a_request_count.clone(); + move |fake_server| { + let server_a_request_count = server_a_request_count.clone(); + fake_server.set_request_handler::( + move |_params, _| { + let count = + server_a_request_count.fetch_add(1, Ordering::Release) + 1; + async move { + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, 9), + label: lsp::InlayHintLabel::String(format!( + "server_a_{count}" + )), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + } + })), + ..FakeLspAdapter::default() + }, + ); + + // Server B returns a hint labeled "server_b" at a different position. + let server_b_request_count = Arc::new(AtomicU32::new(0)); + let mut fake_servers_b = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + name: "secondary-ls", + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..lsp::ServerCapabilities::default() + }, + initializer: Some(Box::new({ + let server_b_request_count = server_b_request_count.clone(); + move |fake_server| { + let server_b_request_count = server_b_request_count.clone(); + fake_server.set_request_handler::( + move |_params, _| { + let count = + server_b_request_count.fetch_add(1, Ordering::Release) + 1; + async move { + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, 22), + label: lsp::InlayHintLabel::String(format!( + "server_b_{count}" + )), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + } + })), + ..FakeLspAdapter::default() + }, + ); + + let (buffer, _buffer_handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx) + }) + .await + .unwrap(); + let editor = + cx.add_window(|window, cx| Editor::for_buffer(buffer, Some(project), window, cx)); + cx.executor().run_until_parked(); + + let fake_server_a = fake_servers_a.next().await.unwrap(); + let _fake_server_b = fake_servers_b.next().await.unwrap(); + + editor + .update(cx, |editor, window, cx| { + editor.set_visible_line_count(50.0, window, cx); + editor.set_visible_column_count(120.0); + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + // Verify both servers' hints are present initially. + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + let has_a = visible.iter().any(|h| h.starts_with("server_a")); + let has_b = visible.iter().any(|h| h.starts_with("server_b")); + assert!( + has_a && has_b, + "Both servers should have hints initially. Got: {visible:?}" + ); + }) + .unwrap(); + + // Trigger RefreshRequested from server A. This should re-fetch server A's + // hints while keeping server B's hints intact. + editor + .update(cx, |editor, _window, cx| { + editor.refresh_inlay_hints( + InlayHintRefreshReason::RefreshRequested { + server_id: fake_server_a.server.server_id(), + request_id: Some(1), + }, + cx, + ); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + // Also trigger NewLinesShown to give the system a chance to recover + // any chunks that might have been cleared. + editor + .update(cx, |editor, _window, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + let has_a = visible.iter().any(|h| h.starts_with("server_a")); + let has_b = visible.iter().any(|h| h.starts_with("server_b")); + assert!( + has_a, + "Server A hints should be present after its own refresh. Got: {visible:?}" + ); + assert!( + has_b, + "Server B hints should NOT be lost when server A triggers \ + RefreshRequested. Bug 2: clear() wipes all tracking, then \ + LspStore filters out server B's cached hints via the for_server \ + guard, and apply_fetched_hints removes all visible hints but only \ + adds back server A's. Got: {visible:?}" + ); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_multi_language_multibuffer_no_duplicate_hints(cx: &mut gpui::TestAppContext) { + init_test(cx, &|settings| { + settings.defaults.inlay_hints = Some(InlayHintSettingsContent { + show_value_hints: Some(true), + enabled: Some(true), + edit_debounce_ms: Some(0), + scroll_debounce_ms: Some(0), + show_type_hints: Some(true), + show_parameter_hints: Some(true), + show_other_hints: Some(true), + show_background: Some(false), + toggle_on_modifiers_press: None, + }) + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/a"), + json!({ + "main.rs": "fn main() { let x = 1; } // padding to keep hints from being trimmed", + "index.ts": "const y = 2; // padding to keep hints from being trimmed in typescript", + }), + ) + .await; + + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + + let mut rs_fake_servers = None; + let mut ts_fake_servers = None; + for (name, path_suffix) in [("Rust", "rs"), ("TypeScript", "ts")] { + language_registry.add(Arc::new(Language::new( + LanguageConfig { + name: name.into(), + matcher: LanguageMatcher { + path_suffixes: vec![path_suffix.to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + ))); + let fake_servers = language_registry.register_fake_lsp( + name, + FakeLspAdapter { + name, + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + initializer: Some(Box::new({ + move |fake_server| { + let request_count = Arc::new(AtomicU32::new(0)); + fake_server + .set_request_handler::( + move |params, _| { + let count = + request_count.fetch_add(1, Ordering::Release) + 1; + let prefix = match name { + "Rust" => "rs_hint", + "TypeScript" => "ts_hint", + other => panic!("Unexpected language: {other}"), + }; + async move { + Ok(Some(vec![lsp::InlayHint { + position: params.range.start, + label: lsp::InlayHintLabel::String(format!( + "{prefix}_{count}" + )), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + } + })), + ..Default::default() + }, + ); + match name { + "Rust" => rs_fake_servers = Some(fake_servers), + "TypeScript" => ts_fake_servers = Some(fake_servers), + _ => unreachable!(), + } + } + + let (rs_buffer, _rs_handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx) + }) + .await + .unwrap(); + let (ts_buffer, _ts_handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/a/index.ts"), cx) + }) + .await + .unwrap(); + + let multi_buffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + rs_buffer.clone(), + [Point::new(0, 0)..Point::new(1, 0)], + 0, + cx, + ); + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), + ts_buffer.clone(), + [Point::new(0, 0)..Point::new(1, 0)], + 0, + cx, + ); + multibuffer + }); + + cx.executor().run_until_parked(); + let editor = cx.add_window(|window, cx| { + Editor::for_multibuffer(multi_buffer, Some(project.clone()), window, cx) + }); + + let _rs_fake_server = rs_fake_servers.unwrap().next().await.unwrap(); + let _ts_fake_server = ts_fake_servers.unwrap().next().await.unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + // Verify initial state: both languages have exactly one hint each + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + let rs_hints: Vec<_> = visible + .iter() + .filter(|h| h.starts_with("rs_hint")) + .collect(); + let ts_hints: Vec<_> = visible + .iter() + .filter(|h| h.starts_with("ts_hint")) + .collect(); + assert_eq!( + rs_hints.len(), + 1, + "Should have exactly 1 Rust hint initially, got: {rs_hints:?}" + ); + assert_eq!( + ts_hints.len(), + 1, + "Should have exactly 1 TypeScript hint initially, got: {ts_hints:?}" + ); + }) + .unwrap(); + + // Edit the Rust buffer — triggers BufferEdited(rust_buffer_id). + // The language filter in refresh_inlay_hints excludes TypeScript excerpts + // from processing, but the global clear() wipes added_hints for ALL buffers. + editor + .update(cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(0)..MultiBufferOffset(0)]) + }); + editor.handle_input("x", window, cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + + // Trigger NewLinesShown — this causes TypeScript chunks to be re-fetched + // because hint_chunk_fetching was wiped by clear(). The cached hints pass + // the added_hints.insert(...).is_none() filter (also wiped) and get inserted + // alongside the still-displayed copies, causing duplicates. + editor + .update(cx, |editor, _window, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + + // Assert: TypeScript hints must NOT be duplicated + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + let ts_hints: Vec<_> = visible + .iter() + .filter(|h| h.starts_with("ts_hint")) + .collect(); + assert_eq!( + ts_hints.len(), + 1, + "TypeScript hints should NOT be duplicated after editing Rust buffer \ + and triggering NewLinesShown. Got: {ts_hints:?}" + ); + + let rs_hints: Vec<_> = visible + .iter() + .filter(|h| h.starts_with("rs_hint")) + .collect(); + assert_eq!( + rs_hints.len(), + 1, + "Rust hints should still be present after editing. Got: {rs_hints:?}" + ); + }) + .unwrap(); + } + pub(crate) fn init_test(cx: &mut TestAppContext, f: &dyn Fn(&mut AllLanguageSettingsContent)) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); @@ -4246,9 +4904,7 @@ let c = 3;"# } pub fn visible_hint_labels(editor: &Editor, cx: &Context) -> Vec { - editor - .visible_inlay_hints(cx) - .into_iter() + Editor::visible_inlay_hints(editor.display_map.read(cx)) .map(|hint| hint.text().to_string()) .collect() } diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 685387342caf8e705a3648cb07acaa1867db55d8..1a79414ddc3aa57397d964d4e0af0d87bedc9c3b 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -735,10 +735,13 @@ impl Item for Editor { h_flex() .gap_2() .child( - Label::new(self.title(cx).to_string()) - .color(label_color) - .when(params.preview, |this| this.italic()) - .when(was_deleted, |this| this.strikethrough()), + Label::new(util::truncate_and_trailoff( + &self.title(cx), + MAX_TAB_TITLE_LEN, + )) + .color(label_color) + .when(params.preview, |this| this.italic()) + .when(was_deleted, |this| this.strikethrough()), ) .when_some(description, |this, description| { this.child( diff --git a/crates/editor/src/jsx_tag_auto_close.rs b/crates/editor/src/jsx_tag_auto_close.rs index 20843518a069d74ab3d7351091ecc27cb6755811..a7c0c5eed2aed44d69bcaa3657894bad4d9deeb1 100644 --- a/crates/editor/src/jsx_tag_auto_close.rs +++ b/crates/editor/src/jsx_tag_auto_close.rs @@ -619,7 +619,7 @@ mod jsx_tag_autoclose_tests { use super::*; use gpui::{AppContext as _, TestAppContext}; use languages::language; - use multi_buffer::{ExcerptRange, MultiBufferOffset}; + use multi_buffer::{MultiBufferOffset, PathKey}; use text::Selection; async fn test_setup(cx: &mut TestAppContext) -> EditorTestContext { @@ -816,21 +816,12 @@ mod jsx_tag_autoclose_tests { let buffer_c = cx.new(|cx| language::Buffer::local(") { @@ -73,18 +75,37 @@ impl Editor { ) { let display_snapshot = self.display_snapshot(cx); let scroll_margin_rows = self.vertical_scroll_margin() as u32; - let new_screen_top = self - .selections - .newest_display(&display_snapshot) - .head() - .row() - .0; + let selection_head = self.selections.newest_display(&display_snapshot).head(); + + let sticky_headers_len = if EditorSettings::get_global(cx).sticky_scroll.enabled + && let Some((_, _, buffer_snapshot)) = display_snapshot.buffer_snapshot().as_singleton() + { + let select_head_point = + rope::Point::new(selection_head.to_point(&display_snapshot).row, 0); + buffer_snapshot + .outline_items_containing(select_head_point..select_head_point, false, None) + .iter() + .filter(|outline| { + outline.range.start.offset + < select_head_point.to_offset(&buffer_snapshot) as u32 + }) + .collect::>() + .len() + } else { + 0 + } as u32; + + let new_screen_top = selection_head.row().0; let header_offset = display_snapshot .buffer_snapshot() .show_headers() .then(|| display_snapshot.buffer_header_height()) .unwrap_or(0); - let new_screen_top = new_screen_top.saturating_sub(scroll_margin_rows + header_offset); + + // If the number of sticky headers exceeds the vertical_scroll_margin, + // we need to adjust the scroll top a bit further + let adjustment = scroll_margin_rows.max(sticky_headers_len) + header_offset; + let new_screen_top = new_screen_top.saturating_sub(adjustment); self.set_scroll_top_row(DisplayRow(new_screen_top), window, cx); } diff --git a/crates/editor/src/semantic_tokens.rs b/crates/editor/src/semantic_tokens.rs index d2bbe533124efdb252dc80c5677165521ef76bab..31a573f04787e3759a6a21ec15f36ec148a80f30 100644 --- a/crates/editor/src/semantic_tokens.rs +++ b/crates/editor/src/semantic_tokens.rs @@ -122,7 +122,10 @@ impl Editor { if !self.mode().is_full() || !self.semantic_token_state.enabled() { self.invalidate_semantic_tokens(None); self.display_map.update(cx, |display_map, _| { - display_map.semantic_token_highlights.clear(); + match Arc::get_mut(&mut display_map.semantic_token_highlights) { + Some(highlights) => highlights.clear(), + None => display_map.semantic_token_highlights = Arc::new(Default::default()), + }; }); self.semantic_token_state.update_task = Task::ready(()); cx.notify(); @@ -171,8 +174,8 @@ impl Editor { .display_map .read(cx) .semantic_token_highlights - .iter() - .map(|(buffer_id, _)| *buffer_id) + .keys() + .copied() .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id)) .filter(|buffer_id| { !self @@ -214,8 +217,9 @@ impl Editor { }) { None } else { - let task = sema.semantic_tokens(buffer, for_server, cx); - Some(async move { (buffer_id, query_version, task.await) }) + sema.semantic_tokens(buffer, for_server, cx).map( + |task| async move { (buffer_id, query_version, task.await) }, + ) } }) .collect::>() @@ -308,7 +312,7 @@ impl Editor { token_highlights.sort_by(|a, b| { a.range.start.cmp(&b.range.start, &multi_buffer_snapshot) }); - display_map.semantic_token_highlights.insert( + Arc::make_mut(&mut display_map.semantic_token_highlights).insert( buffer_id, (Arc::from(token_highlights), Arc::new(interner)), ); @@ -464,7 +468,7 @@ mod tests { use language::{Language, LanguageConfig, LanguageMatcher}; use languages::FakeLspAdapter; use multi_buffer::{ - AnchorRangeExt, ExcerptRange, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, + AnchorRangeExt, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, PathKey, }; use project::Project; use rope::Point; @@ -1160,14 +1164,18 @@ mod tests { }); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), toml_buffer.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))], + [Point::new(0, 0)..Point::new(0, 4)], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), rust_buffer.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))], + [Point::new(0, 0)..Point::new(0, 4)], + 0, cx, ); multibuffer @@ -1234,202 +1242,6 @@ mod tests { ); } - #[gpui::test] - async fn lsp_semantic_tokens_multibuffer_shared(cx: &mut TestAppContext) { - init_test(cx, |_| {}); - - update_test_language_settings(cx, &|language_settings| { - language_settings.languages.0.insert( - "TOML".into(), - LanguageSettingsContent { - semantic_tokens: Some(SemanticTokens::Full), - ..LanguageSettingsContent::default() - }, - ); - }); - - let toml_language = Arc::new(Language::new( - LanguageConfig { - name: "TOML".into(), - matcher: LanguageMatcher { - path_suffixes: vec!["toml".into()], - ..LanguageMatcher::default() - }, - ..LanguageConfig::default() - }, - None, - )); - - let toml_legend = lsp::SemanticTokensLegend { - token_types: vec!["property".into()], - token_modifiers: Vec::new(), - }; - - let app_state = cx.update(workspace::AppState::test); - - cx.update(|cx| { - assets::Assets.load_test_fonts(cx); - crate::init(cx); - workspace::init(app_state.clone(), cx); - }); - - let project = Project::test(app_state.fs.clone(), [], cx).await; - let language_registry = project.read_with(cx, |project, _| project.languages().clone()); - let full_counter_toml = Arc::new(AtomicUsize::new(0)); - let full_counter_toml_clone = full_counter_toml.clone(); - - let mut toml_server = language_registry.register_fake_lsp( - toml_language.name(), - FakeLspAdapter { - name: "toml", - capabilities: lsp::ServerCapabilities { - semantic_tokens_provider: Some( - lsp::SemanticTokensServerCapabilities::SemanticTokensOptions( - lsp::SemanticTokensOptions { - legend: toml_legend, - full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }), - ..lsp::SemanticTokensOptions::default() - }, - ), - ), - ..lsp::ServerCapabilities::default() - }, - initializer: Some(Box::new({ - let full_counter_toml_clone = full_counter_toml_clone.clone(); - move |fake_server| { - let full_counter = full_counter_toml_clone.clone(); - fake_server - .set_request_handler::( - move |_, _| { - full_counter.fetch_add(1, atomic::Ordering::Release); - async move { - Ok(Some(lsp::SemanticTokensResult::Tokens( - lsp::SemanticTokens { - // highlight 'a' as a property - data: vec![ - 0, // delta_line - 0, // delta_start - 1, // length - 0, // token_type - 0, // token_modifiers_bitset - ], - result_id: Some("a".into()), - }, - ))) - } - }, - ); - } - })), - ..FakeLspAdapter::default() - }, - ); - language_registry.add(toml_language.clone()); - - app_state - .fs - .as_fake() - .insert_tree( - EditorLspTestContext::root_path(), - json!({ - ".git": {}, - "dir": { - "foo.toml": "a = 1\nb = 2\n", - } - }), - ) - .await; - - let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); - project - .update(cx, |project, cx| { - project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx) - }) - .await - .unwrap(); - cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx)) - .await; - - let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone()); - let toml_item = workspace - .update_in(cx, |workspace, window, cx| { - workspace.open_path(toml_file, None, true, window, cx) - }) - .await - .expect("Could not open test file"); - - let toml_editor = cx.update(|_, cx| { - toml_item - .act_as::(cx) - .expect("Opened test file wasn't an editor") - }); - let toml_buffer = cx.read(|cx| { - toml_editor - .read(cx) - .buffer() - .read(cx) - .as_singleton() - .unwrap() - }); - let multibuffer = cx.new(|cx| { - let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); - multibuffer.push_excerpts( - toml_buffer.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))], - cx, - ); - multibuffer.push_excerpts( - toml_buffer.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))], - cx, - ); - multibuffer - }); - - let editor = workspace.update_in(cx, |_, window, cx| { - cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx)) - }); - editor.update_in(cx, |editor, window, cx| { - let nav_history = workspace - .read(cx) - .active_pane() - .read(cx) - .nav_history_for_item(&cx.entity()); - editor.set_nav_history(Some(nav_history)); - window.focus(&editor.focus_handle(cx), cx) - }); - - let _toml_server = toml_server.next().await.unwrap(); - - // Initial request. - cx.executor().advance_clock(Duration::from_millis(200)); - let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task()); - cx.run_until_parked(); - task.await; - assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1); - - // Edit two parts of the multibuffer, which both map to the same buffer. - // - // Without debouncing, this grabs semantic tokens 4 times (twice for the - // toml editor, and twice for the multibuffer). - editor.update_in(cx, |editor, _, cx| { - editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx); - editor.edit([(MultiBufferOffset(12)..MultiBufferOffset(13), "c")], cx); - }); - cx.executor().advance_clock(Duration::from_millis(200)); - let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task()); - cx.run_until_parked(); - task.await; - assert_eq!( - extract_semantic_highlights(&editor, &cx), - vec![MultiBufferOffset(0)..MultiBufferOffset(1)] - ); - - assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 2); - } - fn extract_semantic_highlights( editor: &Entity, cx: &TestAppContext, diff --git a/crates/editor/src/split.rs b/crates/editor/src/split.rs index c85b7545c2c8bbabb3777476fa4b318f0b70908f..cff98f474487b52e55ab3f53bff250de24cf2d80 100644 --- a/crates/editor/src/split.rs +++ b/crates/editor/src/split.rs @@ -34,7 +34,7 @@ use workspace::{ }; use crate::{ - Autoscroll, DisplayMap, Editor, EditorEvent, RenderDiffHunkControlsFn, ToggleSoftWrap, + Autoscroll, Editor, EditorEvent, RenderDiffHunkControlsFn, ToggleSoftWrap, actions::{DisableBreakpoint, EditLogBreakpoint, EnableBreakpoint, ToggleBreakpoint}, display_map::Companion, }; @@ -667,52 +667,28 @@ impl SplittableEditor { .collect() }; - let mut companion = Companion::new( - rhs_display_map_id, - convert_rhs_rows_to_lhs, - convert_lhs_rows_to_rhs, - ); - - // stream this - for (path, diff) in path_diffs { - self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - let sync_result = lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { - LhsEditor::update_path_excerpts_from_rhs( - path.clone(), - rhs_multibuffer, - lhs_multibuffer, - diff.clone(), - lhs_cx, - ) - }); - - if let Some((lhs_excerpt_ids, rhs_merge_groups)) = sync_result { - let mut final_rhs_ids = Vec::with_capacity(lhs_excerpt_ids.len()); - for group in rhs_merge_groups { - if group.len() == 1 { - final_rhs_ids.push(group[0]); - } else { - let merged_id = rhs_multibuffer.merge_excerpts(&group, cx); - final_rhs_ids.push(merged_id); - } - } + let companion = cx.new(|_| { + Companion::new( + rhs_display_map_id, + convert_rhs_rows_to_lhs, + convert_lhs_rows_to_rhs, + ) + }); - for (rhs_id, lhs_id) in final_rhs_ids.iter().zip(lhs_excerpt_ids.iter()) { - companion.add_excerpt_mapping(*lhs_id, *rhs_id); - } - let lhs_buffer_id = diff.read(cx).base_text(cx).remote_id(); - let rhs_buffer_id = diff.read(cx).buffer_id; - companion.add_buffer_mapping(lhs_buffer_id, rhs_buffer_id); - } - }); - } + self.lhs = Some(lhs); - let companion = cx.new(|_| companion); + let paths_for_sync: Vec<_> = path_diffs + .into_iter() + .map(|(path, diff)| (path, vec![], diff)) + .collect(); + self.sync_lhs_for_paths(paths_for_sync, &companion, cx); rhs_display_map.update(cx, |dm, cx| { dm.set_companion(Some((lhs_display_map, companion.clone())), cx); }); + let lhs = self.lhs.as_ref().unwrap(); + let shared_scroll_anchor = self .rhs_editor .read(cx) @@ -761,8 +737,6 @@ impl SplittableEditor { cx.notify(); }); - self.lhs = Some(lhs); - cx.notify(); } @@ -1011,34 +985,52 @@ impl SplittableEditor { diff: Entity, cx: &mut Context, ) -> (Vec>, bool) { - let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); - let lhs = self.lhs.as_ref(); - self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - mutate_excerpts_for_paths( - rhs_multibuffer, - lhs, - &rhs_display_map, - vec![(path.clone(), diff.clone())], + let Some(companion) = self.companion(cx) else { + return self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path( + path, + buffer.clone(), + ranges, + context_line_count, + cx, + ); + if !anchors.is_empty() + && rhs_multibuffer + .diff_for(buffer.read(cx).remote_id()) + .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) + { + rhs_multibuffer.add_diff(diff, cx); + } + (anchors, added_a_new_excerpt) + }); + }; + + let old_rhs_ids: Vec = self + .rhs_multibuffer + .read(cx) + .excerpts_for_path(&path) + .collect(); + + let result = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + ranges, + context_line_count, cx, - |rhs_multibuffer, cx| { - let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path( - path.clone(), - buffer.clone(), - ranges, - context_line_count, - cx, - ); - if !anchors.is_empty() - && rhs_multibuffer - .diff_for(buffer.read(cx).remote_id()) - .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) - { - rhs_multibuffer.add_diff(diff.clone(), cx); - } - (anchors, added_a_new_excerpt) - }, - ) - }) + ); + if !anchors.is_empty() + && rhs_multibuffer + .diff_for(buffer.read(cx).remote_id()) + .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) + { + rhs_multibuffer.add_diff(diff.clone(), cx); + } + (anchors, added_a_new_excerpt) + }); + + self.sync_lhs_for_paths(vec![(path, old_rhs_ids, diff)], &companion, cx); + result } fn expand_excerpts( @@ -1048,78 +1040,209 @@ impl SplittableEditor { direction: ExpandExcerptDirection, cx: &mut Context, ) { - let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); - let lhs = self.lhs.as_ref(); - self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - if lhs.is_some() { - let snapshot = rhs_multibuffer.snapshot(cx); - let paths_with_diffs: Vec<_> = excerpt_ids - .clone() - .filter_map(|excerpt_id| { - let path = rhs_multibuffer.path_for_excerpt(excerpt_id)?; - let buffer = snapshot.buffer_for_excerpt(excerpt_id)?; - let diff = rhs_multibuffer.diff_for(buffer.remote_id())?; - Some((path, diff)) - }) - .collect::>() - .into_iter() - .collect(); - - mutate_excerpts_for_paths( - rhs_multibuffer, - lhs, - &rhs_display_map, - paths_with_diffs, - cx, - |rhs_multibuffer, cx| { - rhs_multibuffer.expand_excerpts(excerpt_ids.clone(), lines, direction, cx); - }, - ); - } else { + let Some(companion) = self.companion(cx) else { + self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx); - } + }); + return; + }; + + let paths_with_old_ids: Vec<_> = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + let snapshot = rhs_multibuffer.snapshot(cx); + let paths = excerpt_ids + .clone() + .filter_map(|excerpt_id| { + let path = rhs_multibuffer.path_for_excerpt(excerpt_id)?; + let buffer = snapshot.buffer_for_excerpt(excerpt_id)?; + let diff = rhs_multibuffer.diff_for(buffer.remote_id())?; + Some((path, diff)) + }) + .collect::>() + .into_iter() + .map(|(path, diff)| { + let old_ids = rhs_multibuffer.excerpts_for_path(&path).collect(); + (path, old_ids, diff) + }) + .collect(); + rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx); + paths }); + + self.sync_lhs_for_paths(paths_with_old_ids, &companion, cx); } pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context) { - let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); - - if let Some(lhs) = &self.lhs { + let Some(lhs) = &self.lhs else { self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - let rhs_excerpt_ids: Vec = - rhs_multibuffer.excerpts_for_path(&path).collect(); - let lhs_excerpt_ids: Vec = - lhs.multibuffer.read(cx).excerpts_for_path(&path).collect(); + rhs_multibuffer.remove_excerpts_for_path(path, cx); + }); + return; + }; - if let Some(companion) = rhs_display_map.read(cx).companion().cloned() { - companion.update(cx, |c, _| { - c.remove_excerpt_mappings(lhs_excerpt_ids, rhs_excerpt_ids); - }); - } + let rhs_excerpt_ids: Vec = self + .rhs_multibuffer + .read(cx) + .excerpts_for_path(&path) + .collect(); + let lhs_excerpt_ids: Vec = + lhs.multibuffer.read(cx).excerpts_for_path(&path).collect(); - rhs_multibuffer.remove_excerpts_for_path(path.clone(), cx); - }); - lhs.multibuffer.update(cx, |lhs_multibuffer, cx| { - lhs_multibuffer.remove_excerpts_for_path(path, cx); - }); - } else { - self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - rhs_multibuffer.remove_excerpts_for_path(path.clone(), cx); + let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); + if let Some(companion) = rhs_display_map.read(cx).companion().cloned() { + companion.update(cx, |c, _| { + c.remove_excerpt_mappings(lhs_excerpt_ids, rhs_excerpt_ids); }); } + + self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + rhs_multibuffer.remove_excerpts_for_path(path.clone(), cx); + }); + lhs.multibuffer.update(cx, |lhs_multibuffer, cx| { + lhs_multibuffer.remove_excerpts_for_path(path, cx); + }); } fn search_token(&self) -> SearchToken { SearchToken::new(self.focused_side() as u64) } - fn editor_for_token(&self, token: SearchToken) -> &Entity { + fn editor_for_token(&self, token: SearchToken) -> Option<&Entity> { if token.value() == SplitSide::Left as u64 { - if let Some(lhs) = &self.lhs { - return &lhs.editor; - } + return self.lhs.as_ref().map(|lhs| &lhs.editor); } - &self.rhs_editor + Some(&self.rhs_editor) + } + + fn companion(&self, cx: &App) -> Option> { + if self.lhs.is_none() { + return None; + } + let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); + rhs_display_map.read(cx).companion().cloned() + } + + fn sync_lhs_for_paths( + &self, + paths_with_old_rhs_ids: Vec<(PathKey, Vec, Entity)>, + companion: &Entity, + cx: &mut Context, + ) { + let Some(lhs) = &self.lhs else { return }; + + self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + for (path, old_rhs_ids, diff) in paths_with_old_rhs_ids { + let old_lhs_ids: Vec = + lhs.multibuffer.read(cx).excerpts_for_path(&path).collect(); + + companion.update(cx, |c, _| { + c.remove_excerpt_mappings(old_lhs_ids, old_rhs_ids); + }); + + let rhs_excerpt_ids: Vec = + rhs_multibuffer.excerpts_for_path(&path).collect(); + let Some(excerpt_id) = rhs_excerpt_ids.first().copied() else { + lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { + lhs_multibuffer.remove_excerpts_for_path(path, lhs_cx); + }); + continue; + }; + let Some(main_buffer_snapshot) = rhs_multibuffer + .snapshot(cx) + .buffer_for_excerpt(excerpt_id) + .cloned() + else { + continue; + }; + let Some(main_buffer) = rhs_multibuffer.buffer(main_buffer_snapshot.remote_id()) + else { + continue; + }; + + let base_text_buffer = diff.read(cx).base_text_buffer().clone(); + let diff_snapshot = diff.read(cx).snapshot(cx); + let base_text_buffer_snapshot = base_text_buffer.read(cx).snapshot(); + + let lhs_ranges: Vec> = rhs_multibuffer + .excerpts_for_buffer(main_buffer_snapshot.remote_id(), cx) + .into_iter() + .filter(|(id, _)| rhs_excerpt_ids.contains(id)) + .map(|(_, excerpt_range)| { + let to_base_text = |range: Range| { + let start = diff_snapshot + .buffer_point_to_base_text_range( + Point::new(range.start.row, 0), + &main_buffer_snapshot, + ) + .start; + let end = diff_snapshot + .buffer_point_to_base_text_range( + Point::new(range.end.row, 0), + &main_buffer_snapshot, + ) + .end; + let end_column = diff_snapshot.base_text().line_len(end.row); + Point::new(start.row, 0)..Point::new(end.row, end_column) + }; + let primary = excerpt_range.primary.to_point(&main_buffer_snapshot); + let context = excerpt_range.context.to_point(&main_buffer_snapshot); + ExcerptRange { + primary: to_base_text(primary), + context: to_base_text(context), + } + }) + .collect(); + + let groups = lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { + let lhs_result = lhs_multibuffer.update_path_excerpts( + path, + base_text_buffer, + &base_text_buffer_snapshot, + lhs_ranges, + lhs_cx, + ); + if !lhs_result.excerpt_ids.is_empty() + && lhs_multibuffer + .diff_for(base_text_buffer_snapshot.remote_id()) + .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) + { + lhs_multibuffer.add_inverted_diff(diff.clone(), main_buffer, lhs_cx); + } + + let mut groups = Vec::new(); + for (lhs_id, chunk) in &lhs_result + .excerpt_ids + .iter() + .copied() + .zip(rhs_excerpt_ids) + .chunk_by(|(lhs_id, _)| *lhs_id) + { + groups.push((lhs_id, chunk.map(|(_, rhs_id)| rhs_id).collect::>())); + } + groups + }); + + let pairs = groups + .into_iter() + .map(|(lhs_id, rhs_group)| { + let rhs_id = if rhs_group.len() == 1 { + rhs_group[0] + } else { + rhs_multibuffer.merge_excerpts(&rhs_group, cx) + }; + (lhs_id, rhs_id) + }) + .collect::>(); + + let lhs_buffer_id = diff.read(cx).base_text(cx).remote_id(); + let rhs_buffer_id = diff.read(cx).buffer_id; + companion.update(cx, |c, _| { + for (lhs_id, rhs_id) in pairs { + c.add_excerpt_mapping(lhs_id, rhs_id); + } + c.add_buffer_mapping(lhs_buffer_id, rhs_buffer_id); + }); + } + }); } } @@ -1758,7 +1881,10 @@ impl SearchableItem for SplittableEditor { window: &mut Window, cx: &mut Context, ) { - self.editor_for_token(token).update(cx, |editor, cx| { + let Some(target) = self.editor_for_token(token) else { + return; + }; + target.update(cx, |editor, cx| { editor.update_matches(matches, active_match_index, token, window, cx); }); } @@ -1804,7 +1930,10 @@ impl SearchableItem for SplittableEditor { window: &mut Window, cx: &mut Context, ) { - self.editor_for_token(token).update(cx, |editor, cx| { + let Some(target) = self.editor_for_token(token) else { + return; + }; + target.update(cx, |editor, cx| { editor.activate_match(index, matches, token, window, cx); }); } @@ -1816,7 +1945,10 @@ impl SearchableItem for SplittableEditor { window: &mut Window, cx: &mut Context, ) { - self.editor_for_token(token).update(cx, |editor, cx| { + let Some(target) = self.editor_for_token(token) else { + return; + }; + target.update(cx, |editor, cx| { editor.select_matches(matches, token, window, cx); }); } @@ -1829,7 +1961,10 @@ impl SearchableItem for SplittableEditor { window: &mut Window, cx: &mut Context, ) { - self.editor_for_token(token).update(cx, |editor, cx| { + let Some(target) = self.editor_for_token(token) else { + return; + }; + target.update(cx, |editor, cx| { editor.replace(identifier, query, token, window, cx); }); } @@ -1873,7 +2008,7 @@ impl SearchableItem for SplittableEditor { window: &mut Window, cx: &mut Context, ) -> Option { - self.editor_for_token(token).update(cx, |editor, cx| { + self.editor_for_token(token)?.update(cx, |editor, cx| { editor.active_match_index(direction, matches, token, window, cx) }) } @@ -1927,209 +2062,6 @@ impl Render for SplittableEditor { } } -fn mutate_excerpts_for_paths( - rhs_multibuffer: &mut MultiBuffer, - lhs: Option<&LhsEditor>, - rhs_display_map: &Entity, - paths_with_diffs: Vec<(PathKey, Entity)>, - cx: &mut Context, - mutate: impl FnOnce(&mut MultiBuffer, &mut Context) -> R, -) -> R { - let old_rhs_ids: Vec<_> = paths_with_diffs - .iter() - .map(|(path, _)| { - rhs_multibuffer - .excerpts_for_path(path) - .collect::>() - }) - .collect(); - - let result = mutate(rhs_multibuffer, cx); - - if let Some(lhs) = lhs { - let mut sync_results = Vec::new(); - let mut diffs_for_mapping = Vec::new(); - - for ((path, diff), old_rhs_ids) in paths_with_diffs.into_iter().zip(old_rhs_ids) { - let sync_result = lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { - LhsEditor::sync_path_excerpts( - path, - old_rhs_ids, - rhs_multibuffer, - lhs_multibuffer, - diff.clone(), - rhs_display_map, - lhs_cx, - ) - }); - if let Some(sync_result) = sync_result { - sync_results.push(sync_result); - diffs_for_mapping.push(diff); - } - } - - for ((lhs_excerpt_ids, rhs_merge_groups), diff) in - sync_results.into_iter().zip(diffs_for_mapping.into_iter()) - { - let mut final_rhs_ids = Vec::with_capacity(lhs_excerpt_ids.len()); - for group in rhs_merge_groups { - if group.len() == 1 { - final_rhs_ids.push(group[0]); - } else { - let merged_id = rhs_multibuffer.merge_excerpts(&group, cx); - final_rhs_ids.push(merged_id); - } - } - - debug_assert_eq!(final_rhs_ids.len(), lhs_excerpt_ids.len()); - - if let Some(companion) = rhs_display_map.read(cx).companion().cloned() { - let lhs_buffer_id = diff.read(cx).base_text(cx).remote_id(); - let rhs_buffer_id = diff.read(cx).buffer_id; - companion.update(cx, |c, _| { - for (rhs_id, lhs_id) in final_rhs_ids.iter().zip(lhs_excerpt_ids.iter()) { - c.add_excerpt_mapping(*lhs_id, *rhs_id); - } - c.add_buffer_mapping(lhs_buffer_id, rhs_buffer_id); - }); - } - } - } - - result -} - -impl LhsEditor { - fn update_path_excerpts_from_rhs( - path_key: PathKey, - rhs_multibuffer: &MultiBuffer, - lhs_multibuffer: &mut MultiBuffer, - diff: Entity, - lhs_cx: &mut Context, - ) -> Option<(Vec, Vec>)> { - let Some(excerpt_id) = rhs_multibuffer.excerpts_for_path(&path_key).next() else { - lhs_multibuffer.remove_excerpts_for_path(path_key, lhs_cx); - return None; - }; - - let rhs_excerpt_ids: Vec = - rhs_multibuffer.excerpts_for_path(&path_key).collect(); - - let rhs_multibuffer_snapshot = rhs_multibuffer.snapshot(lhs_cx); - let main_buffer = rhs_multibuffer_snapshot - .buffer_for_excerpt(excerpt_id) - .unwrap(); - let diff_snapshot; - let base_text_buffer_snapshot; - let remote_id; - { - let diff = diff.read(lhs_cx); - let base_text_buffer = diff.base_text_buffer().read(lhs_cx); - diff_snapshot = diff.snapshot(lhs_cx); - base_text_buffer_snapshot = base_text_buffer.snapshot(); - remote_id = base_text_buffer.remote_id(); - } - let new = rhs_multibuffer - .excerpts_for_buffer(main_buffer.remote_id(), lhs_cx) - .into_iter() - .filter(|(id, _)| rhs_excerpt_ids.contains(&id)) - .map(|(_, excerpt_range)| { - let point_range_to_base_text_point_range = |range: Range| { - let start = diff_snapshot - .buffer_point_to_base_text_range( - Point::new(range.start.row, 0), - main_buffer, - ) - .start; - let end = diff_snapshot - .buffer_point_to_base_text_range(Point::new(range.end.row, 0), main_buffer) - .end; - let end_column = diff_snapshot.base_text().line_len(end.row); - Point::new(start.row, 0)..Point::new(end.row, end_column) - }; - let rhs = excerpt_range.primary.to_point(main_buffer); - let context = excerpt_range.context.to_point(main_buffer); - ExcerptRange { - primary: point_range_to_base_text_point_range(rhs), - context: point_range_to_base_text_point_range(context), - } - }) - .collect(); - - let lhs_result = lhs_multibuffer.update_path_excerpts( - path_key, - diff.read(lhs_cx).base_text_buffer().clone(), - &base_text_buffer_snapshot, - new, - lhs_cx, - ); - if !lhs_result.excerpt_ids.is_empty() - && lhs_multibuffer - .diff_for(remote_id) - .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) - { - let main_buffer_entity = rhs_multibuffer - .buffer(main_buffer.remote_id()) - .expect("main buffer should exist in rhs_multibuffer"); - lhs_multibuffer.add_inverted_diff(diff, main_buffer_entity, lhs_cx); - } - - let rhs_merge_groups: Vec> = { - let mut groups = Vec::new(); - let mut current_group = Vec::new(); - let mut last_id = None; - - for (lhs_id, rhs_id) in lhs_result.excerpt_ids.iter().zip(rhs_excerpt_ids) { - if last_id == Some(lhs_id) { - current_group.push(rhs_id); - } else { - if !current_group.is_empty() { - groups.push(current_group); - } - current_group = vec![rhs_id]; - last_id = Some(lhs_id); - } - } - if !current_group.is_empty() { - groups.push(current_group); - } - groups - }; - - let deduplicated_lhs_ids: Vec = - lhs_result.excerpt_ids.iter().dedup().copied().collect(); - - Some((deduplicated_lhs_ids, rhs_merge_groups)) - } - - fn sync_path_excerpts( - path_key: PathKey, - old_rhs_excerpt_ids: Vec, - rhs_multibuffer: &MultiBuffer, - lhs_multibuffer: &mut MultiBuffer, - diff: Entity, - rhs_display_map: &Entity, - lhs_cx: &mut Context, - ) -> Option<(Vec, Vec>)> { - let old_lhs_excerpt_ids: Vec = - lhs_multibuffer.excerpts_for_path(&path_key).collect(); - - if let Some(companion) = rhs_display_map.read(lhs_cx).companion().cloned() { - companion.update(lhs_cx, |c, _| { - c.remove_excerpt_mappings(old_lhs_excerpt_ids, old_rhs_excerpt_ids); - }); - } - - Self::update_path_excerpts_from_rhs( - path_key, - rhs_multibuffer, - lhs_multibuffer, - diff, - lhs_cx, - ) - } -} - #[cfg(test)] mod tests { use std::sync::Arc; diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index 8052cf215e7ec879dba939a2f66699827bb58aeb..bef2b3fc3ec2b949ffb8288d59b1201f6f3dde90 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -123,8 +123,6 @@ pub fn assert_text_with_selections( assert_eq!(actual, marked_text, "Selections don't match"); } -// RA thinks this is dead code even though it is used in a whole lot of tests -#[allow(dead_code)] #[cfg(any(test, feature = "test-support"))] pub(crate) fn build_editor( buffer: Entity, diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 398f8ef158de4cf0333233eba823e5df68f0cc08..101c1559a7a0fb6e5d0d5bba7281a0cb78ab4b65 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -13,7 +13,7 @@ use gpui::{ }; use itertools::Itertools; use language::{Buffer, BufferSnapshot, LanguageRegistry}; -use multi_buffer::{Anchor, ExcerptRange, MultiBufferOffset, MultiBufferRow}; +use multi_buffer::{Anchor, ExcerptRange, MultiBufferOffset, MultiBufferRow, PathKey}; use parking_lot::RwLock; use project::{FakeFs, Project}; use std::{ @@ -128,10 +128,26 @@ impl EditorTestContext { ) -> EditorTestContext { let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); let buffer = cx.new(|cx| { - for excerpt in excerpts.into_iter() { + for (index, excerpt) in excerpts.into_iter().enumerate() { let (text, ranges) = marked_text_ranges(excerpt, false); let buffer = cx.new(|cx| Buffer::local(text, cx)); - multibuffer.push_excerpts(buffer, ranges.into_iter().map(ExcerptRange::new), cx); + let point_ranges: Vec<_> = { + let snapshot = buffer.read(cx); + ranges + .into_iter() + .map(|range| { + snapshot.offset_to_point(range.start) + ..snapshot.offset_to_point(range.end) + }) + .collect() + }; + multibuffer.set_excerpts_for_path( + PathKey::sorted(index as u64), + buffer, + point_ranges, + 0, + cx, + ); } multibuffer }); diff --git a/crates/etw_tracing/Cargo.toml b/crates/etw_tracing/Cargo.toml index 7f287307bc90e4462257fbeae8d5716dc5056ee7..c46e3b820a950f30f991f7de3dd27510db8825f8 100644 --- a/crates/etw_tracing/Cargo.toml +++ b/crates/etw_tracing/Cargo.toml @@ -21,10 +21,4 @@ workspace.workspace = true [target.'cfg(target_os = "windows")'.dependencies] wprcontrol = { git = "https://github.com/zed-industries/wprcontrol", rev = "cd811f7" } windows-core = "0.61" -windows = { workspace = true, features = [ - "Win32_Foundation", - "Win32_System_Com", - "Win32_System_Ole", - "Win32_System_Variant", - "Win32_UI_Shell", -] } +windows.workspace = true diff --git a/crates/eval/src/instance.rs b/crates/eval/src/instance.rs index cbc5cd1568cf80ad23b9da9dfcaab74730986533..54e6ab0b925191c16885b8b8ed89369039c467f6 100644 --- a/crates/eval/src/instance.rs +++ b/crates/eval/src/instance.rs @@ -564,6 +564,7 @@ impl ExampleInstance { stop: Vec::new(), thinking_allowed: true, thinking_effort: None, + speed: None, }; let model = model.clone(); @@ -682,9 +683,7 @@ impl agent::ThreadEnvironment for EvalThreadEnvironment { fn create_subagent( &self, - _parent_thread: Entity, _label: String, - _initial_prompt: String, _cx: &mut App, ) -> Result> { unimplemented!() diff --git a/crates/explorer_command_injector/src/explorer_command_injector.rs b/crates/explorer_command_injector/src/explorer_command_injector.rs index bfa2a0326c9975037ed860acfdee7cd32e3075d8..1bd85339a9fd8958c496eccf2bedcb1610c56557 100644 --- a/crates/explorer_command_injector/src/explorer_command_injector.rs +++ b/crates/explorer_command_injector/src/explorer_command_injector.rs @@ -106,18 +106,17 @@ impl IClassFactory_Impl for ExplorerCommandInjectorFactory_Impl { riid: *const windows_core::GUID, ppvobject: *mut *mut core::ffi::c_void, ) -> Result<()> { + if ppvobject.is_null() || riid.is_null() { + return Err(windows::Win32::Foundation::E_POINTER.into()); + } + unsafe { *ppvobject = std::ptr::null_mut(); } + if punkouter.is_none() { let factory: IExplorerCommand = ExplorerCommandInjector {}.into(); - let ret = unsafe { factory.query(riid, ppvobject).ok() }; - if ret.is_ok() { - unsafe { - *ppvobject = factory.into_raw(); - } - } - ret + unsafe { factory.query(riid, ppvobject).ok() } } else { Err(E_INVALIDARG.into()) } @@ -145,19 +144,17 @@ extern "system" fn DllGetClassObject( iid: *const GUID, out: *mut *mut std::ffi::c_void, ) -> HRESULT { + if out.is_null() || class_id.is_null() || iid.is_null() { + return E_INVALIDARG; + } + unsafe { *out = std::ptr::null_mut(); } let class_id = unsafe { *class_id }; if class_id == MODULE_ID { let instance: IClassFactory = ExplorerCommandInjectorFactory {}.into(); - let ret = unsafe { instance.query(iid, out) }; - if ret.is_ok() { - unsafe { - *out = instance.into_raw(); - } - } - ret + unsafe { instance.query(iid, out) } } else { CLASS_E_CLASSNOTAVAILABLE } diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index 6699a9dca099177cfd550ba0f68ef62828356d15..c691296d61183c9bb0fcd41ff6c74eed6cb61149 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -32,8 +32,8 @@ use futures::{ select_biased, }; use gpui::{ - App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task, WeakEntity, - actions, + App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task, UpdateGlobal as _, + WeakEntity, actions, }; use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use language::{ @@ -46,7 +46,7 @@ use release_channel::ReleaseChannel; use remote::RemoteClient; use semver::Version; use serde::{Deserialize, Serialize}; -use settings::Settings; +use settings::{SemanticTokenRules, Settings, SettingsStore}; use std::ops::RangeInclusive; use std::str::FromStr; use std::{ @@ -1220,6 +1220,15 @@ impl ExtensionStore { self.proxy .remove_languages(&languages_to_remove, &grammars_to_remove); + // Remove semantic token rules for languages being unloaded. + if !languages_to_remove.is_empty() { + SettingsStore::update_global(cx, |store, cx| { + for language in &languages_to_remove { + store.remove_language_semantic_token_rules(language.as_ref(), cx); + } + }); + } + let mut grammars_to_add = Vec::new(); let mut themes_to_add = Vec::new(); let mut icon_themes_to_add = Vec::new(); @@ -1267,12 +1276,30 @@ impl ExtensionStore { .iter() .filter(|(_, entry)| extensions_to_load.contains(&entry.extension)) .collect::>(); + let mut semantic_token_rules_to_add: Vec<(LanguageName, SemanticTokenRules)> = Vec::new(); for (language_name, language) in languages_to_add { let mut language_path = self.installed_dir.clone(); language_path.extend([ Path::new(language.extension.as_ref()), language.path.as_path(), ]); + + // Load semantic token rules if present in the language directory. + let rules_path = language_path.join("semantic_token_rules.json"); + if let Ok(rules_json) = std::fs::read_to_string(&rules_path) { + match serde_json_lenient::from_str::(&rules_json) { + Ok(rules) => { + semantic_token_rules_to_add.push((language_name.clone(), rules)); + } + Err(err) => { + log::error!( + "Failed to parse semantic token rules from {}: {err:#}", + rules_path.display() + ); + } + } + } + self.proxy.register_language( language_name.clone(), language.grammar.clone(), @@ -1302,6 +1329,15 @@ impl ExtensionStore { ); } + // Register semantic token rules for newly loaded extension languages. + if !semantic_token_rules_to_add.is_empty() { + SettingsStore::update_global(cx, |store, cx| { + for (language_name, rules) in semantic_token_rules_to_add { + store.set_language_semantic_token_rules(language_name.0.clone(), rules, cx); + } + }); + } + let fs = self.fs.clone(); let wasm_host = self.wasm_host.clone(); let root_dir = self.installed_dir.clone(); diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index 8f96de0e7b6d9b385fcda533a31ecc34b5afdbcc..8cbacfd823400f2988738af03a05dfbfc0ed72d4 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -37,16 +37,6 @@ impl FeatureFlag for AgentSharingFeatureFlag { const NAME: &'static str = "agent-sharing"; } -pub struct SubagentsFeatureFlag; - -impl FeatureFlag for SubagentsFeatureFlag { - const NAME: &'static str = "subagents"; - - fn enabled_for_staff() -> bool { - true - } -} - pub struct DiffReviewFeatureFlag; impl FeatureFlag for DiffReviewFeatureFlag { @@ -57,12 +47,18 @@ impl FeatureFlag for DiffReviewFeatureFlag { } } +pub struct GitGraphFeatureFlag; + +impl FeatureFlag for GitGraphFeatureFlag { + const NAME: &'static str = "git-graph"; +} + pub struct StreamingEditFileToolFeatureFlag; impl FeatureFlag for StreamingEditFileToolFeatureFlag { const NAME: &'static str = "streaming-edit-file-tool"; fn enabled_for_staff() -> bool { - false + true } } diff --git a/crates/fs/Cargo.toml b/crates/fs/Cargo.toml index 6355524e4f328df0ca7fcf24c1df0557676ba6a6..04cae2dd2ad18f85a7c2ed663c1c3482febb22d3 100644 --- a/crates/fs/Cargo.toml +++ b/crates/fs/Cargo.toml @@ -58,4 +58,4 @@ gpui = { workspace = true, features = ["test-support"] } git = { workspace = true, features = ["test-support"] } [features] -test-support = ["gpui/test-support", "git/test-support"] +test-support = ["gpui/test-support", "git/test-support", "util/test-support"] diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 6513d5a33b6eb96f7a69c5f96530f1d44a71c3ec..85489b6057cd8214ee512fb477428c93cdb32219 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -20,7 +20,7 @@ use ignore::gitignore::GitignoreBuilder; use parking_lot::Mutex; use rope::Rope; use smol::{channel::Sender, future::FutureExt as _}; -use std::{path::PathBuf, sync::Arc}; +use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool}; use text::LineEnding; use util::{paths::PathStyle, rel_path::RelPath}; @@ -32,6 +32,7 @@ pub struct FakeGitRepository { pub(crate) dot_git_path: PathBuf, pub(crate) repository_dir_path: PathBuf, pub(crate) common_dir_path: PathBuf, + pub(crate) is_trusted: Arc, } #[derive(Debug, Clone)] @@ -406,7 +407,31 @@ impl GitRepository for FakeGitRepository { } fn worktrees(&self) -> BoxFuture<'_, Result>> { - self.with_state_async(false, |state| Ok(state.worktrees.clone())) + let dot_git_path = self.dot_git_path.clone(); + self.with_state_async(false, move |state| { + let work_dir = dot_git_path + .parent() + .map(PathBuf::from) + .unwrap_or(dot_git_path); + let head_sha = state + .refs + .get("HEAD") + .cloned() + .unwrap_or_else(|| "0000000".to_string()); + let branch_ref = state + .current_branch_name + .as_ref() + .map(|name| format!("refs/heads/{name}")) + .unwrap_or_else(|| "refs/heads/main".to_string()); + let main_worktree = Worktree { + path: work_dir, + ref_name: branch_ref.into(), + sha: head_sha.into(), + }; + let mut all = vec![main_worktree]; + all.extend(state.worktrees.iter().cloned()); + Ok(all) + }) } fn create_worktree( @@ -768,6 +793,109 @@ impl GitRepository for FakeGitRepository { unimplemented!() } + fn diff_stat( + &self, + path_prefixes: &[RepoPath], + ) -> BoxFuture<'_, Result> { + fn count_lines(s: &str) -> u32 { + if s.is_empty() { + 0 + } else { + s.lines().count() as u32 + } + } + + fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool { + if prefixes.is_empty() { + return true; + } + prefixes.iter().any(|prefix| { + let prefix_str = prefix.as_unix_str(); + if prefix_str == "." { + return true; + } + path == prefix || path.starts_with(&prefix) + }) + } + + let path_prefixes = path_prefixes.to_vec(); + + let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf(); + let worktree_files: HashMap = self + .fs + .files() + .iter() + .filter_map(|path| { + let repo_path = path.strip_prefix(&workdir_path).ok()?; + if repo_path.starts_with(".git") { + return None; + } + let content = self + .fs + .read_file_sync(path) + .ok() + .and_then(|bytes| String::from_utf8(bytes).ok())?; + let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?; + Some((RepoPath::from_rel_path(&repo_path), content)) + }) + .collect(); + + self.with_state_async(false, move |state| { + let mut entries = Vec::new(); + let all_paths: HashSet<&RepoPath> = state + .head_contents + .keys() + .chain( + worktree_files + .keys() + .filter(|p| state.index_contents.contains_key(*p)), + ) + .collect(); + for path in all_paths { + if !matches_prefixes(path, &path_prefixes) { + continue; + } + let head = state.head_contents.get(path); + let worktree = worktree_files.get(path); + match (head, worktree) { + (Some(old), Some(new)) if old != new => { + entries.push(( + path.clone(), + git::status::DiffStat { + added: count_lines(new), + deleted: count_lines(old), + }, + )); + } + (Some(old), None) => { + entries.push(( + path.clone(), + git::status::DiffStat { + added: 0, + deleted: count_lines(old), + }, + )); + } + (None, Some(new)) => { + entries.push(( + path.clone(), + git::status::DiffStat { + added: count_lines(new), + deleted: 0, + }, + )); + } + _ => {} + } + } + entries.sort_by(|(a, _), (b, _)| a.cmp(b)); + Ok(git::status::GitDiffStat { + entries: entries.into(), + }) + }) + .boxed() + } + fn checkpoint(&self) -> BoxFuture<'static, Result> { let executor = self.executor.clone(); let fs = self.fs.clone(); @@ -881,146 +1009,13 @@ impl GitRepository for FakeGitRepository { fn commit_data_reader(&self) -> Result { anyhow::bail!("commit_data_reader not supported for FakeGitRepository") } -} -#[cfg(test)] -mod tests { - use super::*; - use crate::{FakeFs, Fs}; - use gpui::TestAppContext; - use serde_json::json; - use std::path::Path; - - #[gpui::test] - async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) { - let worktree_dir_settings = &["../worktrees", ".git/zed-worktrees", "my-worktrees/"]; - - for worktree_dir_setting in worktree_dir_settings { - let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/project", json!({".git": {}, "file.txt": "content"})) - .await; - let repo = fs - .open_repo(Path::new("/project/.git"), None) - .expect("should open fake repo"); - - // Initially no worktrees - let worktrees = repo.worktrees().await.unwrap(); - assert!(worktrees.is_empty()); - - let expected_dir = git::repository::resolve_worktree_directory( - Path::new("/project"), - worktree_dir_setting, - ); - - // Create a worktree - repo.create_worktree( - "feature-branch".to_string(), - expected_dir.clone(), - Some("abc123".to_string()), - ) - .await - .unwrap(); - - // List worktrees — should have one - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 1); - assert_eq!( - worktrees[0].path, - expected_dir.join("feature-branch"), - "failed for worktree_directory setting: {worktree_dir_setting:?}" - ); - assert_eq!(worktrees[0].ref_name.as_ref(), "refs/heads/feature-branch"); - assert_eq!(worktrees[0].sha.as_ref(), "abc123"); - - // Directory should exist in FakeFs after create - assert!( - fs.is_dir(&expected_dir.join("feature-branch")).await, - "worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}" - ); - - // Create a second worktree (without explicit commit) - repo.create_worktree("bugfix-branch".to_string(), expected_dir.clone(), None) - .await - .unwrap(); - - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 2); - assert!( - fs.is_dir(&expected_dir.join("bugfix-branch")).await, - "second worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}" - ); - - // Rename the first worktree - repo.rename_worktree( - expected_dir.join("feature-branch"), - expected_dir.join("renamed-branch"), - ) - .await - .unwrap(); - - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 2); - assert!( - worktrees - .iter() - .any(|w| w.path == expected_dir.join("renamed-branch")), - "renamed worktree should exist at new path for setting {worktree_dir_setting:?}" - ); - assert!( - worktrees - .iter() - .all(|w| w.path != expected_dir.join("feature-branch")), - "old path should no longer exist for setting {worktree_dir_setting:?}" - ); - - // Directory should be moved in FakeFs after rename - assert!( - !fs.is_dir(&expected_dir.join("feature-branch")).await, - "old worktree directory should not exist after rename for setting {worktree_dir_setting:?}" - ); - assert!( - fs.is_dir(&expected_dir.join("renamed-branch")).await, - "new worktree directory should exist after rename for setting {worktree_dir_setting:?}" - ); - - // Rename a nonexistent worktree should fail - let result = repo - .rename_worktree(PathBuf::from("/nonexistent"), PathBuf::from("/somewhere")) - .await; - assert!(result.is_err()); - - // Remove a worktree - repo.remove_worktree(expected_dir.join("renamed-branch"), false) - .await - .unwrap(); - - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 1); - assert_eq!(worktrees[0].path, expected_dir.join("bugfix-branch")); - - // Directory should be removed from FakeFs after remove - assert!( - !fs.is_dir(&expected_dir.join("renamed-branch")).await, - "worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}" - ); - - // Remove a nonexistent worktree should fail - let result = repo - .remove_worktree(PathBuf::from("/nonexistent"), false) - .await; - assert!(result.is_err()); - - // Remove the last worktree - repo.remove_worktree(expected_dir.join("bugfix-branch"), false) - .await - .unwrap(); - - let worktrees = repo.worktrees().await.unwrap(); - assert!(worktrees.is_empty()); - assert!( - !fs.is_dir(&expected_dir.join("bugfix-branch")).await, - "last worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}" - ); - } + fn set_trusted(&self, trusted: bool) { + self.is_trusted + .store(trusted, std::sync::atomic::Ordering::Release); + } + + fn is_trusted(&self) -> bool { + self.is_trusted.load(std::sync::atomic::Ordering::Acquire) } } diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 2db9e48a2e77bdb3e49fce0b16ea9b67ffaacbc0..0fde444171042eda859edcac7915c456ab91e265 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -2776,6 +2776,7 @@ impl Fs for FakeFs { repository_dir_path: repository_dir_path.to_owned(), common_dir_path: common_dir_path.to_owned(), checkpoints: Arc::default(), + is_trusted: Arc::default(), }) as _ }, ) diff --git a/crates/fs/tests/integration/fake_git_repo.rs b/crates/fs/tests/integration/fake_git_repo.rs index 36dfcaf168b4f0190c5c49bf4798fac7bc9bd37b..bae7f2fc94dd5161793f85f64cc0a1448a187134 100644 --- a/crates/fs/tests/integration/fake_git_repo.rs +++ b/crates/fs/tests/integration/fake_git_repo.rs @@ -1,9 +1,146 @@ use fs::{FakeFs, Fs}; -use gpui::BackgroundExecutor; +use gpui::{BackgroundExecutor, TestAppContext}; use serde_json::json; -use std::path::Path; +use std::path::{Path, PathBuf}; use util::path; +#[gpui::test] +async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) { + let worktree_dir_settings = &["../worktrees", ".git/zed-worktrees", "my-worktrees/"]; + + for worktree_dir_setting in worktree_dir_settings { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project", json!({".git": {}, "file.txt": "content"})) + .await; + let repo = fs + .open_repo(Path::new("/project/.git"), None) + .expect("should open fake repo"); + + // Initially only the main worktree exists + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 1); + assert_eq!(worktrees[0].path, PathBuf::from("/project")); + + let expected_dir = git::repository::resolve_worktree_directory( + Path::new("/project"), + worktree_dir_setting, + ); + + // Create a worktree + repo.create_worktree( + "feature-branch".to_string(), + expected_dir.clone(), + Some("abc123".to_string()), + ) + .await + .unwrap(); + + // List worktrees — should have main + one created + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 2); + assert_eq!(worktrees[0].path, PathBuf::from("/project")); + assert_eq!( + worktrees[1].path, + expected_dir.join("feature-branch"), + "failed for worktree_directory setting: {worktree_dir_setting:?}" + ); + assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch"); + assert_eq!(worktrees[1].sha.as_ref(), "abc123"); + + // Directory should exist in FakeFs after create + assert!( + fs.is_dir(&expected_dir.join("feature-branch")).await, + "worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}" + ); + + // Create a second worktree (without explicit commit) + repo.create_worktree("bugfix-branch".to_string(), expected_dir.clone(), None) + .await + .unwrap(); + + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 3); + assert!( + fs.is_dir(&expected_dir.join("bugfix-branch")).await, + "second worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}" + ); + + // Rename the first worktree + repo.rename_worktree( + expected_dir.join("feature-branch"), + expected_dir.join("renamed-branch"), + ) + .await + .unwrap(); + + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 3); + assert!( + worktrees + .iter() + .any(|w| w.path == expected_dir.join("renamed-branch")), + "renamed worktree should exist at new path for setting {worktree_dir_setting:?}" + ); + assert!( + worktrees + .iter() + .all(|w| w.path != expected_dir.join("feature-branch")), + "old path should no longer exist for setting {worktree_dir_setting:?}" + ); + + // Directory should be moved in FakeFs after rename + assert!( + !fs.is_dir(&expected_dir.join("feature-branch")).await, + "old worktree directory should not exist after rename for setting {worktree_dir_setting:?}" + ); + assert!( + fs.is_dir(&expected_dir.join("renamed-branch")).await, + "new worktree directory should exist after rename for setting {worktree_dir_setting:?}" + ); + + // Rename a nonexistent worktree should fail + let result = repo + .rename_worktree(PathBuf::from("/nonexistent"), PathBuf::from("/somewhere")) + .await; + assert!(result.is_err()); + + // Remove a worktree + repo.remove_worktree(expected_dir.join("renamed-branch"), false) + .await + .unwrap(); + + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 2); + assert_eq!(worktrees[0].path, PathBuf::from("/project")); + assert_eq!(worktrees[1].path, expected_dir.join("bugfix-branch")); + + // Directory should be removed from FakeFs after remove + assert!( + !fs.is_dir(&expected_dir.join("renamed-branch")).await, + "worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}" + ); + + // Remove a nonexistent worktree should fail + let result = repo + .remove_worktree(PathBuf::from("/nonexistent"), false) + .await; + assert!(result.is_err()); + + // Remove the last worktree + repo.remove_worktree(expected_dir.join("bugfix-branch"), false) + .await + .unwrap(); + + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 1); + assert_eq!(worktrees[0].path, PathBuf::from("/project")); + assert!( + !fs.is_dir(&expected_dir.join("bugfix-branch")).await, + "last worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}" + ); + } +} + #[gpui::test] async fn test_checkpoints(executor: BackgroundExecutor) { let fs = FakeFs::new(executor); diff --git a/crates/git/clippy.toml b/crates/git/clippy.toml new file mode 100644 index 0000000000000000000000000000000000000000..fb3926840493fd5981c1861e7cea96bd54b9647f --- /dev/null +++ b/crates/git/clippy.toml @@ -0,0 +1,28 @@ +allow-private-module-inception = true +avoid-breaking-exported-api = false +ignore-interior-mutability = [ + # Suppresses clippy::mutable_key_type, which is a false positive as the Eq + # and Hash impls do not use fields with interior mutability. + "agent_ui::context::AgentContextKey" +] +disallowed-methods = [ + { path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" }, + { path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" }, + { path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" }, + { path = "std::process::Command::stdin", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdin" }, + { path = "std::process::Command::stdout", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdout" }, + { path = "std::process::Command::stderr", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stderr" }, + { path = "smol::Timer::after", reason = "smol::Timer introduces non-determinism in tests", replacement = "gpui::BackgroundExecutor::timer" }, + { path = "serde_json::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892. Use `serde_json::from_slice` instead." }, + { path = "serde_json_lenient::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892, Use `serde_json_lenient::from_slice` instead." }, + { path = "cocoa::foundation::NSString::alloc", reason = "NSString must be autoreleased to avoid memory leaks. Use `ns_string()` helper instead." }, + { path = "smol::process::Command::new", reason = "Git commands must go through `GitBinary::build_command` to ensure security flags like `-c core.fsmonitor=false` are always applied.", replacement = "GitBinary::build_command" }, + { path = "util::command::new_command", reason = "Git commands must go through `GitBinary::build_command` to ensure security flags like `-c core.fsmonitor=false` are always applied.", replacement = "GitBinary::build_command" }, + { path = "util::command::Command::new", reason = "Git commands must go through `GitBinary::build_command` to ensure security flags like `-c core.fsmonitor=false` are always applied.", replacement = "GitBinary::build_command" }, +] +disallowed-types = [ + # { path = "std::collections::HashMap", replacement = "collections::HashMap" }, + # { path = "std::collections::HashSet", replacement = "collections::HashSet" }, + # { path = "indexmap::IndexSet", replacement = "collections::IndexSet" }, + # { path = "indexmap::IndexMap", replacement = "collections::IndexMap" }, +] \ No newline at end of file diff --git a/crates/git/src/blame.rs b/crates/git/src/blame.rs index 9dc184bf2ac253c8bc24f6203f13d6654ac2b64b..c44aea74051bb7c190a091703d6c60807fc4e27e 100644 --- a/crates/git/src/blame.rs +++ b/crates/git/src/blame.rs @@ -1,11 +1,11 @@ use crate::Oid; use crate::commit::get_messages; -use crate::repository::RepoPath; +use crate::repository::{GitBinary, RepoPath}; use anyhow::{Context as _, Result}; use collections::{HashMap, HashSet}; use futures::AsyncWriteExt; use serde::{Deserialize, Serialize}; -use std::{ops::Range, path::Path}; +use std::ops::Range; use text::{LineEnding, Rope}; use time::OffsetDateTime; use time::UtcOffset; @@ -21,15 +21,13 @@ pub struct Blame { } impl Blame { - pub async fn for_path( - git_binary: &Path, - working_directory: &Path, + pub(crate) async fn for_path( + git: &GitBinary, path: &RepoPath, content: &Rope, line_ending: LineEnding, ) -> Result { - let output = - run_git_blame(git_binary, working_directory, path, content, line_ending).await?; + let output = run_git_blame(git, path, content, line_ending).await?; let mut entries = parse_git_blame(&output)?; entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start)); @@ -40,7 +38,7 @@ impl Blame { } let shas = unique_shas.into_iter().collect::>(); - let messages = get_messages(working_directory, &shas) + let messages = get_messages(git, &shas) .await .context("failed to get commit messages")?; @@ -52,8 +50,7 @@ const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD"; const GIT_BLAME_NO_PATH: &str = "fatal: no such path"; async fn run_git_blame( - git_binary: &Path, - working_directory: &Path, + git: &GitBinary, path: &RepoPath, contents: &Rope, line_ending: LineEnding, @@ -61,12 +58,7 @@ async fn run_git_blame( let mut child = { let span = ztracing::debug_span!("spawning git-blame command", path = path.as_unix_str()); let _enter = span.enter(); - util::command::new_command(git_binary) - .current_dir(working_directory) - .arg("blame") - .arg("--incremental") - .arg("--contents") - .arg("-") + git.build_command(["blame", "--incremental", "--contents", "-"]) .arg(path.as_unix_str()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) diff --git a/crates/git/src/commit.rs b/crates/git/src/commit.rs index 3f3526afc4ba8fa146592684a6d3acfc44ce7e73..46e050ce155fc049a670fdfa26101eb729b34352 100644 --- a/crates/git/src/commit.rs +++ b/crates/git/src/commit.rs @@ -1,11 +1,11 @@ use crate::{ BuildCommitPermalinkParams, GitHostingProviderRegistry, GitRemote, Oid, parse_git_remote_url, - status::StatusCode, + repository::GitBinary, status::StatusCode, }; use anyhow::{Context as _, Result}; use collections::HashMap; use gpui::SharedString; -use std::{path::Path, sync::Arc}; +use std::sync::Arc; #[derive(Clone, Debug, Default)] pub struct ParsedCommitMessage { @@ -48,7 +48,7 @@ impl ParsedCommitMessage { } } -pub async fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result> { +pub(crate) async fn get_messages(git: &GitBinary, shas: &[Oid]) -> Result> { if shas.is_empty() { return Ok(HashMap::default()); } @@ -63,12 +63,12 @@ pub async fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result Result>()) } -async fn get_messages_impl(working_directory: &Path, shas: &[Oid]) -> Result> { +async fn get_messages_impl(git: &GitBinary, shas: &[Oid]) -> Result> { const MARKER: &str = ""; - let output = util::command::new_command("git") - .current_dir(working_directory) - .arg("show") + let output = git + .build_command(["show"]) .arg("-s") .arg(format!("--format=%B{}", MARKER)) .args(shas.iter().map(ToString::to_string)) diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index ab445a1cd830a726491fab1fc6209686e80960b1..45e719fb6d5a586074de523b5974ee11bf225453 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -21,6 +21,7 @@ use text::LineEnding; use std::collections::HashSet; use std::ffi::{OsStr, OsString}; +use std::sync::atomic::AtomicBool; use std::process::ExitStatus; use std::str::FromStr; @@ -55,6 +56,26 @@ pub const GRAPH_CHUNK_SIZE: usize = 1000; /// Default value for the `git.worktree_directory` setting. pub const DEFAULT_WORKTREE_DIRECTORY: &str = "../worktrees"; +/// Given the git common directory (from `commondir()`), derive the original +/// repository's working directory. +/// +/// For a standard checkout, `common_dir` is `/.git`, so the parent +/// is the working directory. For a git worktree, `common_dir` is the **main** +/// repo's `.git` directory, so the parent is the original repo's working directory. +/// +/// Falls back to returning `common_dir` itself if it doesn't end with `.git` +/// (e.g. bare repos or unusual layouts). +pub fn original_repo_path_from_common_dir(common_dir: &Path) -> PathBuf { + if common_dir.file_name() == Some(OsStr::new(".git")) { + common_dir + .parent() + .map(|p| p.to_path_buf()) + .unwrap_or_else(|| common_dir.to_path_buf()) + } else { + common_dir.to_path_buf() + } +} + /// Resolves the configured worktree directory to an absolute path. /// /// `worktree_directory_setting` is the raw string from the user setting @@ -283,6 +304,7 @@ impl Branch { pub struct Worktree { pub path: PathBuf, pub ref_name: SharedString, + // todo(git_worktree) This type should be a Oid pub sha: SharedString, } @@ -320,6 +342,8 @@ pub fn parse_worktrees_from_str>(raw_worktrees: T) -> Vec BoxFuture<'_, Result>; + fn diff_stat( + &self, + path_prefixes: &[RepoPath], + ) -> BoxFuture<'_, Result>; + /// Creates a checkpoint for the repository. fn checkpoint(&self) -> BoxFuture<'static, Result>; @@ -933,6 +962,9 @@ pub trait GitRepository: Send + Sync { ) -> BoxFuture<'_, Result<()>>; fn commit_data_reader(&self) -> Result; + + fn set_trusted(&self, trusted: bool); + fn is_trusted(&self) -> bool; } pub enum DiffType { @@ -959,6 +991,7 @@ pub struct RealGitRepository { pub any_git_binary_path: PathBuf, any_git_binary_help_output: Arc>>, executor: BackgroundExecutor, + is_trusted: Arc, } impl RealGitRepository { @@ -977,6 +1010,7 @@ impl RealGitRepository { any_git_binary_path, executor, any_git_binary_help_output: Arc::new(Mutex::new(None)), + is_trusted: Arc::new(AtomicBool::new(false)), }) } @@ -988,20 +1022,24 @@ impl RealGitRepository { .map(Path::to_path_buf) } + fn git_binary(&self) -> Result { + Ok(GitBinary::new( + self.any_git_binary_path.clone(), + self.working_directory() + .with_context(|| "Can't run git commands without a working directory")?, + self.executor.clone(), + self.is_trusted(), + )) + } + async fn any_git_binary_help_output(&self) -> SharedString { if let Some(output) = self.any_git_binary_help_output.lock().clone() { return output; } - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); let output: SharedString = self .executor - .spawn(async move { - GitBinary::new(git_binary_path, working_directory?, executor) - .run(["help", "-a"]) - .await - }) + .spawn(async move { git_binary?.run(["help", "-a"]).await }) .await .unwrap_or_default() .into(); @@ -1044,6 +1082,7 @@ pub async fn get_git_committer(cx: &AsyncApp) -> GitCommitter { git_binary_path.unwrap_or(PathBuf::from("git")), paths::home_dir().clone(), cx.background_executor().clone(), + true, ); cx.background_spawn(async move { @@ -1075,14 +1114,12 @@ impl GitRepository for RealGitRepository { } fn show(&self, commit: String) -> BoxFuture<'_, Result> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let output = new_command(git_binary_path) - .current_dir(&working_directory) - .args([ + let git = git_binary?; + let output = git + .build_command([ "--no-optional-locks", "show", "--no-patch", @@ -1113,15 +1150,14 @@ impl GitRepository for RealGitRepository { } fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<'_, Result> { - let Some(working_directory) = self.repository.lock().workdir().map(ToOwned::to_owned) - else { + if self.repository.lock().workdir().is_none() { return future::ready(Err(anyhow!("no working directory"))).boxed(); - }; - let git_binary_path = self.any_git_binary_path.clone(); + } + let git_binary = self.git_binary(); cx.background_spawn(async move { - let show_output = util::command::new_command(&git_binary_path) - .current_dir(&working_directory) - .args([ + let git = git_binary?; + let show_output = git + .build_command([ "--no-optional-locks", "show", "--format=", @@ -1142,9 +1178,8 @@ impl GitRepository for RealGitRepository { let changes = parse_git_diff_name_status(&show_stdout); let parent_sha = format!("{}^", commit); - let mut cat_file_process = util::command::new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["--no-optional-locks", "cat-file", "--batch=%(objectsize)"]) + let mut cat_file_process = git + .build_command(["--no-optional-locks", "cat-file", "--batch=%(objectsize)"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) @@ -1251,18 +1286,17 @@ impl GitRepository for RealGitRepository { mode: ResetMode, env: Arc>, ) -> BoxFuture<'_, Result<()>> { + let git_binary = self.git_binary(); async move { - let working_directory = self.working_directory(); - let mode_flag = match mode { ResetMode::Mixed => "--mixed", ResetMode::Soft => "--soft", }; - let output = new_command(&self.any_git_binary_path) + let git = git_binary?; + let output = git + .build_command(["reset", mode_flag, &commit]) .envs(env.iter()) - .current_dir(&working_directory?) - .args(["reset", mode_flag, &commit]) .output() .await?; anyhow::ensure!( @@ -1281,17 +1315,16 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); async move { if paths.is_empty() { return Ok(()); } - let output = new_command(&git_binary_path) - .current_dir(&working_directory?) + let git = git_binary?; + let output = git + .build_command(["checkout", &commit, "--"]) .envs(env.iter()) - .args(["checkout", &commit, "--"]) .args(paths.iter().map(|path| path.as_unix_str())) .output() .await?; @@ -1313,33 +1346,29 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { fn logic(repo: &git2::Repository, path: &RepoPath) -> Result> { - // This check is required because index.get_path() unwraps internally :( let mut index = repo.index()?; index.read(false)?; const STAGE_NORMAL: i32 = 0; - let path = path.as_std_path(); - // `RepoPath` contains a `RelPath` which normalizes `.` into an empty path - // `get_path` unwraps on empty paths though, so undo that normalization here - let path = if path.components().next().is_none() { - ".".as_ref() - } else { - path - }; - let oid = match index.get_path(path, STAGE_NORMAL) { - Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id, - _ => return Ok(None), + // git2 unwraps internally on empty paths or `.` + if path.is_empty() { + bail!("empty path has no index text"); + } + let Some(entry) = index.get_path(path.as_std_path(), STAGE_NORMAL) else { + return Ok(None); }; + if entry.mode == GIT_MODE_SYMLINK { + return Ok(None); + } - let content = repo.find_blob(oid)?.content().to_owned(); + let content = repo.find_blob(entry.id)?.content().to_owned(); Ok(String::from_utf8(content).ok()) } - match logic(&repo.lock(), &path) { - Ok(value) => return value, - Err(err) => log::error!("Error loading index text: {:?}", err), - } - None + logic(&repo.lock(), &path) + .context("loading index text") + .log_err() + .flatten() }) .boxed() } @@ -1348,14 +1377,26 @@ impl GitRepository for RealGitRepository { let repo = self.repository.clone(); self.executor .spawn(async move { - let repo = repo.lock(); - let head = repo.head().ok()?.peel_to_tree().log_err()?; - let entry = head.get_path(path.as_std_path()).ok()?; - if entry.filemode() == i32::from(git2::FileMode::Link) { - return None; + fn logic(repo: &git2::Repository, path: &RepoPath) -> Result> { + let head = repo.head()?.peel_to_tree()?; + // git2 unwraps internally on empty paths or `.` + if path.is_empty() { + return Err(anyhow!("empty path has no committed text")); + } + let Some(entry) = head.get_path(path.as_std_path()).ok() else { + return Ok(None); + }; + if entry.filemode() == i32::from(git2::FileMode::Link) { + return Ok(None); + } + let content = repo.find_blob(entry.id())?.content().to_owned(); + Ok(String::from_utf8(content).ok()) } - let content = repo.find_blob(entry.id()).log_err()?.content().to_owned(); - String::from_utf8(content).ok() + + logic(&repo.lock(), &path) + .context("loading committed text") + .log_err() + .flatten() }) .boxed() } @@ -1378,18 +1419,16 @@ impl GitRepository for RealGitRepository { env: Arc>, is_executable: bool, ) -> BoxFuture<'_, anyhow::Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; + let git = git_binary?; let mode = if is_executable { "100755" } else { "100644" }; if let Some(content) = content { - let mut child = new_command(&git_binary_path) - .current_dir(&working_directory) + let mut child = git + .build_command(["hash-object", "-w", "--stdin"]) .envs(env.iter()) - .args(["hash-object", "-w", "--stdin"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn()?; @@ -1402,10 +1441,9 @@ impl GitRepository for RealGitRepository { log::debug!("indexing SHA: {sha}, path {path:?}"); - let output = new_command(&git_binary_path) - .current_dir(&working_directory) + let output = git + .build_command(["update-index", "--add", "--cacheinfo", mode, sha]) .envs(env.iter()) - .args(["update-index", "--add", "--cacheinfo", mode, sha]) .arg(path.as_unix_str()) .output() .await?; @@ -1417,10 +1455,9 @@ impl GitRepository for RealGitRepository { ); } else { log::debug!("removing path {path:?} from the index"); - let output = new_command(&git_binary_path) - .current_dir(&working_directory) + let output = git + .build_command(["update-index", "--force-remove"]) .envs(env.iter()) - .args(["update-index", "--force-remove"]) .arg(path.as_unix_str()) .output() .await?; @@ -1449,14 +1486,12 @@ impl GitRepository for RealGitRepository { } fn revparse_batch(&self, revs: Vec) -> BoxFuture<'_, Result>>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let mut process = new_command(&git_binary_path) - .current_dir(&working_directory) - .args([ + let git = git_binary?; + let mut process = git + .build_command([ "--no-optional-locks", "cat-file", "--batch-check=%(objectname)", @@ -1509,19 +1544,14 @@ impl GitRepository for RealGitRepository { } fn status(&self, path_prefixes: &[RepoPath]) -> Task> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = match self.working_directory() { - Ok(working_directory) => working_directory, + let git = match self.git_binary() { + Ok(git) => git, Err(e) => return Task::ready(Err(e)), }; let args = git_status_args(path_prefixes); log::debug!("Checking for git status in {path_prefixes:?}"); self.executor.spawn(async move { - let output = new_command(&git_binary_path) - .current_dir(working_directory) - .args(args) - .output() - .await?; + let output = git.build_command(args).output().await?; if output.status.success() { let stdout = String::from_utf8_lossy(&output.stdout); stdout.parse() @@ -1533,9 +1563,8 @@ impl GitRepository for RealGitRepository { } fn diff_tree(&self, request: DiffTreeType) -> BoxFuture<'_, Result> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = match self.working_directory() { - Ok(working_directory) => working_directory, + let git = match self.git_binary() { + Ok(git) => git, Err(e) => return Task::ready(Err(e)).boxed(), }; @@ -1560,11 +1589,7 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { - let output = new_command(&git_binary_path) - .current_dir(working_directory) - .args(args) - .output() - .await?; + let output = git.build_command(args).output().await?; if output.status.success() { let stdout = String::from_utf8_lossy(&output.stdout); stdout.parse() @@ -1577,13 +1602,12 @@ impl GitRepository for RealGitRepository { } fn stash_entries(&self) -> BoxFuture<'_, Result> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let output = new_command(&git_binary_path) - .current_dir(working_directory?) - .args(&["stash", "list", "--pretty=format:%gd%x00%H%x00%ct%x00%s"]) + let git = git_binary?; + let output = git + .build_command(&["stash", "list", "--pretty=format:%gd%x00%H%x00%ct%x00%s"]) .output() .await?; if output.status.success() { @@ -1598,8 +1622,7 @@ impl GitRepository for RealGitRepository { } fn branches(&self) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { let fields = [ @@ -1621,12 +1644,8 @@ impl GitRepository for RealGitRepository { "--format", &fields, ]; - let working_directory = working_directory?; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(args) - .output() - .await?; + let git = git_binary?; + let output = git.build_command(args).output().await?; anyhow::ensure!( output.status.success(), @@ -1640,11 +1659,7 @@ impl GitRepository for RealGitRepository { if branches.is_empty() { let args = vec!["symbolic-ref", "--quiet", "HEAD"]; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(args) - .output() - .await?; + let output = git.build_command(args).output().await?; // git symbolic-ref returns a non-0 exit code if HEAD points // to something other than a branch @@ -1666,13 +1681,12 @@ impl GitRepository for RealGitRepository { } fn worktrees(&self) -> BoxFuture<'_, Result>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let output = new_command(&git_binary_path) - .current_dir(working_directory?) - .args(&["--no-optional-locks", "worktree", "list", "--porcelain"]) + let git = git_binary?; + let output = git + .build_command(&["--no-optional-locks", "worktree", "list", "--porcelain"]) .output() .await?; if output.status.success() { @@ -1692,8 +1706,7 @@ impl GitRepository for RealGitRepository { directory: PathBuf, from_commit: Option, ) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); let final_path = directory.join(&name); let mut args = vec![ OsString::from("--no-optional-locks"), @@ -1713,11 +1726,8 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { std::fs::create_dir_all(final_path.parent().unwrap_or(&final_path))?; - let output = new_command(&git_binary_path) - .current_dir(working_directory?) - .args(args) - .output() - .await?; + let git = git_binary?; + let output = git.build_command(args).output().await?; if output.status.success() { Ok(()) } else { @@ -1729,9 +1739,7 @@ impl GitRepository for RealGitRepository { } fn remove_worktree(&self, path: PathBuf, force: bool) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { @@ -1745,18 +1753,14 @@ impl GitRepository for RealGitRepository { } args.push("--".into()); args.push(path.as_os_str().into()); - GitBinary::new(git_binary_path, working_directory?, executor) - .run(args) - .await?; + git_binary?.run(args).await?; anyhow::Ok(()) }) .boxed() } fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { @@ -1768,9 +1772,7 @@ impl GitRepository for RealGitRepository { old_path.as_os_str().into(), new_path.as_os_str().into(), ]; - GitBinary::new(git_binary_path, working_directory?, executor) - .run(args) - .await?; + git_binary?.run(args).await?; anyhow::Ok(()) }) .boxed() @@ -1778,9 +1780,7 @@ impl GitRepository for RealGitRepository { fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> { let repo = self.repository.clone(); - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); let branch = self.executor.spawn(async move { let repo = repo.lock(); let branch = if let Ok(branch) = repo.find_branch(&name, BranchType::Local) { @@ -1815,9 +1815,7 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { let branch = branch.await?; - GitBinary::new(git_binary_path, working_directory?, executor) - .run(&["checkout", &branch]) - .await?; + git_binary?.run(&["checkout", &branch]).await?; anyhow::Ok(()) }) .boxed() @@ -1828,9 +1826,7 @@ impl GitRepository for RealGitRepository { name: String, base_branch: Option, ) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { @@ -1841,22 +1837,18 @@ impl GitRepository for RealGitRepository { args.push(&base_branch_str); } - GitBinary::new(git_binary_path, working_directory?, executor) - .run(&args) - .await?; + git_binary?.run(&args).await?; anyhow::Ok(()) }) .boxed() } fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - GitBinary::new(git_binary_path, working_directory?, executor) + git_binary? .run(&["branch", "-m", &branch, &new_name]) .await?; anyhow::Ok(()) @@ -1865,15 +1857,11 @@ impl GitRepository for RealGitRepository { } fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - GitBinary::new(git_binary_path, working_directory?, executor) - .run(&["branch", "-d", &name]) - .await?; + git_binary?.run(&["branch", "-d", &name]).await?; anyhow::Ok(()) }) .boxed() @@ -1885,20 +1873,11 @@ impl GitRepository for RealGitRepository { content: Rope, line_ending: LineEnding, ) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); + let git = self.git_binary(); - executor + self.executor .spawn(async move { - crate::blame::Blame::for_path( - &git_binary_path, - &working_directory?, - &path, - &content, - line_ending, - ) - .await + crate::blame::Blame::for_path(&git?, &path, &content, line_ending).await }) .boxed() } @@ -1913,11 +1892,10 @@ impl GitRepository for RealGitRepository { skip: usize, limit: Option, ) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; + let git = git_binary?; // Use a unique delimiter with a hardcoded UUID to separate commits // This essentially eliminates any chance of encountering the delimiter in actual commit data let commit_delimiter = @@ -1945,9 +1923,8 @@ impl GitRepository for RealGitRepository { args.push("--"); - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(&args) + let output = git + .build_command(&args) .arg(path.as_unix_str()) .output() .await?; @@ -1992,30 +1969,17 @@ impl GitRepository for RealGitRepository { } fn diff(&self, diff: DiffType) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; + let git = git_binary?; let output = match diff { DiffType::HeadToIndex => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["diff", "--staged"]) - .output() - .await? - } - DiffType::HeadToWorktree => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["diff"]) - .output() - .await? + git.build_command(["diff", "--staged"]).output().await? } + DiffType::HeadToWorktree => git.build_command(["diff"]).output().await?, DiffType::MergeBase { base_ref } => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["diff", "--merge-base", base_ref.as_ref()]) + git.build_command(["diff", "--merge-base", base_ref.as_ref()]) .output() .await? } @@ -2031,20 +1995,49 @@ impl GitRepository for RealGitRepository { .boxed() } + fn diff_stat( + &self, + path_prefixes: &[RepoPath], + ) -> BoxFuture<'_, Result> { + let path_prefixes = path_prefixes.to_vec(); + let git_binary = self.git_binary(); + + self.executor + .spawn(async move { + let git_binary = git_binary?; + let mut args: Vec = vec![ + "diff".into(), + "--numstat".into(), + "--no-renames".into(), + "HEAD".into(), + ]; + if !path_prefixes.is_empty() { + args.push("--".into()); + args.extend( + path_prefixes + .iter() + .map(|p| p.as_std_path().to_string_lossy().into_owned()), + ); + } + let output = git_binary.run(&args).await?; + Ok(crate::status::parse_numstat(&output)) + }) + .boxed() + } + fn stage_paths( &self, paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { if !paths.is_empty() { - let output = new_command(&git_binary_path) - .current_dir(&working_directory?) + let git = git_binary?; + let output = git + .build_command(["update-index", "--add", "--remove", "--"]) .envs(env.iter()) - .args(["update-index", "--add", "--remove", "--"]) .args(paths.iter().map(|p| p.as_unix_str())) .output() .await?; @@ -2064,16 +2057,15 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { if !paths.is_empty() { - let output = new_command(&git_binary_path) - .current_dir(&working_directory?) + let git = git_binary?; + let output = git + .build_command(["reset", "--quiet", "--"]) .envs(env.iter()) - .args(["reset", "--quiet", "--"]) .args(paths.iter().map(|p| p.as_std_path())) .output() .await?; @@ -2094,19 +2086,16 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let mut cmd = new_command(&git_binary_path); - cmd.current_dir(&working_directory?) + let git = git_binary?; + let output = git + .build_command(["stash", "push", "--quiet", "--include-untracked"]) .envs(env.iter()) - .args(["stash", "push", "--quiet"]) - .arg("--include-untracked"); - - cmd.args(paths.iter().map(|p| p.as_unix_str())); - - let output = cmd.output().await?; + .args(paths.iter().map(|p| p.as_unix_str())) + .output() + .await?; anyhow::ensure!( output.status.success(), @@ -2123,20 +2112,15 @@ impl GitRepository for RealGitRepository { index: Option, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let mut cmd = new_command(git_binary_path); + let git = git_binary?; let mut args = vec!["stash".to_string(), "pop".to_string()]; if let Some(index) = index { args.push(format!("stash@{{{}}}", index)); } - cmd.current_dir(&working_directory?) - .envs(env.iter()) - .args(args); - - let output = cmd.output().await?; + let output = git.build_command(&args).envs(env.iter()).output().await?; anyhow::ensure!( output.status.success(), @@ -2153,20 +2137,15 @@ impl GitRepository for RealGitRepository { index: Option, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let mut cmd = new_command(git_binary_path); + let git = git_binary?; let mut args = vec!["stash".to_string(), "apply".to_string()]; if let Some(index) = index { args.push(format!("stash@{{{}}}", index)); } - cmd.current_dir(&working_directory?) - .envs(env.iter()) - .args(args); - - let output = cmd.output().await?; + let output = git.build_command(&args).envs(env.iter()).output().await?; anyhow::ensure!( output.status.success(), @@ -2183,20 +2162,15 @@ impl GitRepository for RealGitRepository { index: Option, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let mut cmd = new_command(git_binary_path); + let git = git_binary?; let mut args = vec!["stash".to_string(), "drop".to_string()]; if let Some(index) = index { args.push(format!("stash@{{{}}}", index)); } - cmd.current_dir(&working_directory?) - .envs(env.iter()) - .args(args); - - let output = cmd.output().await?; + let output = git.build_command(&args).envs(env.iter()).output().await?; anyhow::ensure!( output.status.success(), @@ -2216,16 +2190,14 @@ impl GitRepository for RealGitRepository { ask_pass: AskPassDelegate, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); let executor = self.executor.clone(); // Note: Do not spawn this command on the background thread, it might pop open the credential helper // which we want to block on. async move { - let mut cmd = new_command(git_binary_path); - cmd.current_dir(&working_directory?) - .envs(env.iter()) - .args(["commit", "--quiet", "-m"]) + let git = git_binary?; + let mut cmd = git.build_command(["commit", "--quiet", "-m"]); + cmd.envs(env.iter()) .arg(&message.to_string()) .arg("--cleanup=strip") .arg("--no-verify") @@ -2264,16 +2236,21 @@ impl GitRepository for RealGitRepository { let working_directory = self.working_directory(); let executor = cx.background_executor().clone(); let git_binary_path = self.system_git_binary_path.clone(); + let is_trusted = self.is_trusted(); // Note: Do not spawn this command on the background thread, it might pop open the credential helper // which we want to block on. async move { let git_binary_path = git_binary_path.context("git not found on $PATH, can't push")?; let working_directory = working_directory?; - let mut command = new_command(git_binary_path); + let git = GitBinary::new( + git_binary_path, + working_directory, + executor.clone(), + is_trusted, + ); + let mut command = git.build_command(["push"]); command .envs(env.iter()) - .current_dir(&working_directory) - .args(["push"]) .args(options.map(|option| match option { PushOptions::SetUpstream => "--set-upstream", PushOptions::Force => "--force-with-lease", @@ -2301,15 +2278,20 @@ impl GitRepository for RealGitRepository { let working_directory = self.working_directory(); let executor = cx.background_executor().clone(); let git_binary_path = self.system_git_binary_path.clone(); + let is_trusted = self.is_trusted(); // Note: Do not spawn this command on the background thread, it might pop open the credential helper // which we want to block on. async move { let git_binary_path = git_binary_path.context("git not found on $PATH, can't pull")?; - let mut command = new_command(git_binary_path); - command - .envs(env.iter()) - .current_dir(&working_directory?) - .arg("pull"); + let working_directory = working_directory?; + let git = GitBinary::new( + git_binary_path, + working_directory, + executor.clone(), + is_trusted, + ); + let mut command = git.build_command(["pull"]); + command.envs(env.iter()); if rebase { command.arg("--rebase"); @@ -2337,15 +2319,21 @@ impl GitRepository for RealGitRepository { let remote_name = format!("{}", fetch_options); let git_binary_path = self.system_git_binary_path.clone(); let executor = cx.background_executor().clone(); + let is_trusted = self.is_trusted(); // Note: Do not spawn this command on the background thread, it might pop open the credential helper // which we want to block on. async move { let git_binary_path = git_binary_path.context("git not found on $PATH, can't fetch")?; - let mut command = new_command(git_binary_path); + let working_directory = working_directory?; + let git = GitBinary::new( + git_binary_path, + working_directory, + executor.clone(), + is_trusted, + ); + let mut command = git.build_command(["fetch", &remote_name]); command .envs(env.iter()) - .current_dir(&working_directory?) - .args(["fetch", &remote_name]) .stdout(Stdio::piped()) .stderr(Stdio::piped()); @@ -2355,14 +2343,12 @@ impl GitRepository for RealGitRepository { } fn get_push_remote(&self, branch: String) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["rev-parse", "--abbrev-ref"]) + let git = git_binary?; + let output = git + .build_command(["rev-parse", "--abbrev-ref"]) .arg(format!("{branch}@{{push}}")) .output() .await?; @@ -2382,14 +2368,12 @@ impl GitRepository for RealGitRepository { } fn get_branch_remote(&self, branch: String) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["config", "--get"]) + let git = git_binary?; + let output = git + .build_command(["config", "--get"]) .arg(format!("branch.{branch}.remote")) .output() .await?; @@ -2406,16 +2390,11 @@ impl GitRepository for RealGitRepository { } fn get_all_remotes(&self) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["remote", "-v"]) - .output() - .await?; + let git = git_binary?; + let output = git.build_command(["remote", "-v"]).output().await?; anyhow::ensure!( output.status.success(), @@ -2464,17 +2443,12 @@ impl GitRepository for RealGitRepository { } fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; + let git = git_binary?; let git_cmd = async |args: &[&str]| -> Result { - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(args) - .output() - .await?; + let output = git.build_command(args).output().await?; anyhow::ensure!( output.status.success(), String::from_utf8_lossy(&output.stderr).to_string() @@ -2523,14 +2497,10 @@ impl GitRepository for RealGitRepository { } fn checkpoint(&self) -> BoxFuture<'static, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let mut git = GitBinary::new(git_binary_path, working_directory.clone(), executor) - .envs(checkpoint_author_envs()); + let mut git = git_binary?.envs(checkpoint_author_envs()); git.with_temp_index(async |git| { let head_sha = git.run(&["rev-parse", "HEAD"]).await.ok(); let mut excludes = exclude_files(git).await?; @@ -2556,15 +2526,10 @@ impl GitRepository for RealGitRepository { } fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; git.run(&[ "restore", "--source", @@ -2595,14 +2560,10 @@ impl GitRepository for RealGitRepository { left: GitRepositoryCheckpoint, right: GitRepositoryCheckpoint, ) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; let result = git .run(&[ "diff-tree", @@ -2633,14 +2594,10 @@ impl GitRepository for RealGitRepository { base_checkpoint: GitRepositoryCheckpoint, target_checkpoint: GitRepositoryCheckpoint, ) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; git.run(&[ "diff", "--find-renames", @@ -2657,14 +2614,10 @@ impl GitRepository for RealGitRepository { &self, include_remote_name: bool, ) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; let strip_prefix = if include_remote_name { "refs/remotes/" @@ -2714,22 +2667,23 @@ impl GitRepository for RealGitRepository { hook: RunHook, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); let repository = self.repository.clone(); - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); let help_output = self.any_git_binary_help_output(); // Note: Do not spawn these commands on the background thread, as this causes some git hooks to hang. async move { - let working_directory = working_directory?; + let git_binary = git_binary?; + + let working_directory = git_binary.working_directory.clone(); if !help_output .await .lines() .any(|line| line.trim().starts_with("hook ")) { let hook_abs_path = repository.lock().path().join("hooks").join(hook.as_str()); - if hook_abs_path.is_file() { + if hook_abs_path.is_file() && git_binary.is_trusted { + #[allow(clippy::disallowed_methods)] let output = new_command(&hook_abs_path) .envs(env.iter()) .current_dir(&working_directory) @@ -2749,10 +2703,12 @@ impl GitRepository for RealGitRepository { return Ok(()); } - let git = GitBinary::new(git_binary_path, working_directory, executor) - .envs(HashMap::clone(&env)); - git.run(&["hook", "run", "--ignore-missing", hook.as_str()]) - .await?; + if git_binary.is_trusted { + let git_binary = git_binary.envs(HashMap::clone(&env)); + git_binary + .run(&["hook", "run", "--ignore-missing", hook.as_str()]) + .await?; + } Ok(()) } .boxed() @@ -2764,13 +2720,10 @@ impl GitRepository for RealGitRepository { log_order: LogOrder, request_tx: Sender>>, ) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); async move { - let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; let mut command = git.build_command([ "log", @@ -2824,19 +2777,12 @@ impl GitRepository for RealGitRepository { } fn commit_data_reader(&self) -> Result { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self - .working_directory() - .map_err(|_| anyhow!("no working directory"))?; - let executor = self.executor.clone(); + let git_binary = self.git_binary()?; let (request_tx, request_rx) = smol::channel::bounded::(64); let task = self.executor.spawn(async move { - if let Err(error) = - run_commit_data_reader(git_binary_path, working_directory, executor, request_rx) - .await - { + if let Err(error) = run_commit_data_reader(git_binary, request_rx).await { log::error!("commit data reader failed: {error:?}"); } }); @@ -2846,15 +2792,21 @@ impl GitRepository for RealGitRepository { _task: task, }) } + + fn set_trusted(&self, trusted: bool) { + self.is_trusted + .store(trusted, std::sync::atomic::Ordering::Release); + } + + fn is_trusted(&self) -> bool { + self.is_trusted.load(std::sync::atomic::Ordering::Acquire) + } } async fn run_commit_data_reader( - git_binary_path: PathBuf, - working_directory: PathBuf, - executor: BackgroundExecutor, + git: GitBinary, request_rx: smol::channel::Receiver, ) -> Result<()> { - let git = GitBinary::new(git_binary_path, working_directory, executor); let mut process = git .build_command(["--no-optional-locks", "cat-file", "--batch"]) .stdin(Stdio::piped()) @@ -2977,11 +2929,6 @@ fn git_status_args(path_prefixes: &[RepoPath]) -> Vec { OsString::from("--no-renames"), OsString::from("-z"), ]; - args.extend( - path_prefixes - .iter() - .map(|path_prefix| path_prefix.as_std_path().into()), - ); args.extend(path_prefixes.iter().map(|path_prefix| { if path_prefix.is_empty() { Path::new(".").into() @@ -3030,19 +2977,21 @@ async fn exclude_files(git: &GitBinary) -> Result { Ok(excludes) } -struct GitBinary { +pub(crate) struct GitBinary { git_binary_path: PathBuf, working_directory: PathBuf, executor: BackgroundExecutor, index_file_path: Option, envs: HashMap, + is_trusted: bool, } impl GitBinary { - fn new( + pub(crate) fn new( git_binary_path: PathBuf, working_directory: PathBuf, executor: BackgroundExecutor, + is_trusted: bool, ) -> Self { Self { git_binary_path, @@ -3050,6 +2999,7 @@ impl GitBinary { executor, index_file_path: None, envs: HashMap::default(), + is_trusted, } } @@ -3154,12 +3104,26 @@ impl GitBinary { Ok(String::from_utf8(output.stdout)?) } - fn build_command(&self, args: impl IntoIterator) -> util::command::Command + #[allow(clippy::disallowed_methods)] + pub(crate) fn build_command( + &self, + args: impl IntoIterator, + ) -> util::command::Command where S: AsRef, { let mut command = new_command(&self.git_binary_path); command.current_dir(&self.working_directory); + command.args(["-c", "core.fsmonitor=false"]); + command.arg("--no-pager"); + + if !self.is_trusted { + command.args(["-c", "core.hooksPath=/dev/null"]); + command.args(["-c", "core.sshCommand=ssh"]); + command.args(["-c", "credential.helper="]); + command.args(["-c", "protocol.ext.allow=never"]); + command.args(["-c", "diff.external="]); + } command.args(args); if let Some(index_file_path) = self.index_file_path.as_ref() { command.env("GIT_INDEX_FILE", index_file_path); @@ -3419,6 +3383,102 @@ mod tests { } } + #[gpui::test] + async fn test_build_command_untrusted_includes_both_safety_args(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + let dir = tempfile::tempdir().unwrap(); + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + cx.executor(), + false, + ); + let output = git + .build_command(["version"]) + .output() + .await + .expect("git version should succeed"); + assert!(output.status.success()); + + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + cx.executor(), + false, + ); + let output = git + .build_command(["config", "--get", "core.fsmonitor"]) + .output() + .await + .expect("git config should run"); + let stdout = String::from_utf8_lossy(&output.stdout); + assert_eq!( + stdout.trim(), + "false", + "fsmonitor should be disabled for untrusted repos" + ); + + git2::Repository::init(dir.path()).unwrap(); + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + cx.executor(), + false, + ); + let output = git + .build_command(["config", "--get", "core.hooksPath"]) + .output() + .await + .expect("git config should run"); + let stdout = String::from_utf8_lossy(&output.stdout); + assert_eq!( + stdout.trim(), + "/dev/null", + "hooksPath should be /dev/null for untrusted repos" + ); + } + + #[gpui::test] + async fn test_build_command_trusted_only_disables_fsmonitor(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + let dir = tempfile::tempdir().unwrap(); + git2::Repository::init(dir.path()).unwrap(); + + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + cx.executor(), + true, + ); + let output = git + .build_command(["config", "--get", "core.fsmonitor"]) + .output() + .await + .expect("git config should run"); + let stdout = String::from_utf8_lossy(&output.stdout); + assert_eq!( + stdout.trim(), + "false", + "fsmonitor should be disabled even for trusted repos" + ); + + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + cx.executor(), + true, + ); + let output = git + .build_command(["config", "--get", "core.hooksPath"]) + .output() + .await + .expect("git config should run"); + assert!( + !output.status.success(), + "hooksPath should NOT be overridden for trusted repos" + ); + } + #[gpui::test] async fn test_checkpoint_basic(cx: &mut TestAppContext) { disable_git_global_config(); @@ -4208,6 +4268,34 @@ mod tests { ); } + #[test] + fn test_original_repo_path_from_common_dir() { + // Normal repo: common_dir is /.git + assert_eq!( + original_repo_path_from_common_dir(Path::new("/code/zed5/.git")), + PathBuf::from("/code/zed5") + ); + + // Worktree: common_dir is the main repo's .git + // (same result — that's the point, it always traces back to the original) + assert_eq!( + original_repo_path_from_common_dir(Path::new("/code/zed5/.git")), + PathBuf::from("/code/zed5") + ); + + // Bare repo: no .git suffix, returns as-is + assert_eq!( + original_repo_path_from_common_dir(Path::new("/code/zed5.git")), + PathBuf::from("/code/zed5.git") + ); + + // Root-level .git directory + assert_eq!( + original_repo_path_from_common_dir(Path::new("/.git")), + PathBuf::from("/") + ); + } + #[test] fn test_validate_worktree_directory() { let work_dir = Path::new("/code/my-project"); @@ -4283,7 +4371,7 @@ mod tests { .spawn(async move { let git_binary_path = git_binary_path.clone(); let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = GitBinary::new(git_binary_path, working_directory, executor, true); git.run(&["gc", "--prune"]).await?; Ok(()) }) diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index 2cf7cc7c1810620f1cf1aaea831fb337810c83d8..e8b5caec505f7bf65cb4f5cd7d789207ccd8784f 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -475,7 +475,12 @@ impl FromStr for GitStatus { } .into(); } - _ => panic!("Unexpected duplicated status entries: {a_status:?} and {b_status:?}"), + (x, y) if x == y => {} + _ => { + log::warn!( + "Unexpected duplicated status entries: {a_status:?} and {b_status:?}" + ); + } } true }); @@ -575,14 +580,165 @@ impl FromStr for TreeDiff { } } +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] +pub struct DiffStat { + pub added: u32, + pub deleted: u32, +} + +#[derive(Clone, Debug)] +pub struct GitDiffStat { + pub entries: Arc<[(RepoPath, DiffStat)]>, +} + +/// Parses the output of `git diff --numstat` where output looks like: +/// +/// ```text +/// 24 12 dir/file.txt +/// ``` +pub fn parse_numstat(output: &str) -> GitDiffStat { + let mut entries = Vec::new(); + for line in output.lines() { + let line = line.trim(); + if line.is_empty() { + continue; + } + let mut parts = line.splitn(3, '\t'); + let (Some(added_str), Some(deleted_str), Some(path_str)) = + (parts.next(), parts.next(), parts.next()) + else { + continue; + }; + let Ok(added) = added_str.parse::() else { + continue; + }; + let Ok(deleted) = deleted_str.parse::() else { + continue; + }; + let Ok(path) = RepoPath::new(path_str) else { + continue; + }; + entries.push((path, DiffStat { added, deleted })); + } + entries.sort_by(|(a, _), (b, _)| a.cmp(b)); + entries.dedup_by(|(a, _), (b, _)| a == b); + + GitDiffStat { + entries: entries.into(), + } +} + #[cfg(test)] mod tests { use crate::{ repository::RepoPath, - status::{TreeDiff, TreeDiffStatus}, + status::{FileStatus, GitStatus, TreeDiff, TreeDiffStatus}, }; + use super::{DiffStat, parse_numstat}; + + fn lookup<'a>(entries: &'a [(RepoPath, DiffStat)], path: &str) -> Option<&'a DiffStat> { + let path = RepoPath::new(path).unwrap(); + entries.iter().find(|(p, _)| p == &path).map(|(_, s)| s) + } + + #[test] + fn test_parse_numstat_normal() { + let input = "10\t5\tsrc/main.rs\n3\t1\tREADME.md\n"; + let result = parse_numstat(input); + assert_eq!(result.entries.len(), 2); + assert_eq!( + lookup(&result.entries, "src/main.rs"), + Some(&DiffStat { + added: 10, + deleted: 5 + }) + ); + assert_eq!( + lookup(&result.entries, "README.md"), + Some(&DiffStat { + added: 3, + deleted: 1 + }) + ); + } + + #[test] + fn test_parse_numstat_binary_files_skipped() { + // git diff --numstat outputs "-\t-\tpath" for binary files + let input = "-\t-\timage.png\n5\t2\tsrc/lib.rs\n"; + let result = parse_numstat(input); + assert_eq!(result.entries.len(), 1); + assert!(lookup(&result.entries, "image.png").is_none()); + assert_eq!( + lookup(&result.entries, "src/lib.rs"), + Some(&DiffStat { + added: 5, + deleted: 2 + }) + ); + } + + #[test] + fn test_parse_numstat_empty_input() { + assert!(parse_numstat("").entries.is_empty()); + assert!(parse_numstat("\n\n").entries.is_empty()); + assert!(parse_numstat(" \n \n").entries.is_empty()); + } + + #[test] + fn test_parse_numstat_malformed_lines_skipped() { + let input = "not_a_number\t5\tfile.rs\n10\t5\tvalid.rs\n"; + let result = parse_numstat(input); + assert_eq!(result.entries.len(), 1); + assert_eq!( + lookup(&result.entries, "valid.rs"), + Some(&DiffStat { + added: 10, + deleted: 5 + }) + ); + } + + #[test] + fn test_parse_numstat_incomplete_lines_skipped() { + // Lines with fewer than 3 tab-separated fields are skipped + let input = "10\t5\n7\t3\tok.rs\n"; + let result = parse_numstat(input); + assert_eq!(result.entries.len(), 1); + assert_eq!( + lookup(&result.entries, "ok.rs"), + Some(&DiffStat { + added: 7, + deleted: 3 + }) + ); + } + + #[test] + fn test_parse_numstat_zero_stats() { + let input = "0\t0\tunchanged_but_present.rs\n"; + let result = parse_numstat(input); + assert_eq!( + lookup(&result.entries, "unchanged_but_present.rs"), + Some(&DiffStat { + added: 0, + deleted: 0 + }) + ); + } + + #[test] + fn test_duplicate_untracked_entries() { + // Regression test for ZED-2XA: git can produce duplicate untracked entries + // for the same path. This should deduplicate them instead of panicking. + let input = "?? file.txt\0?? file.txt"; + let status: GitStatus = input.parse().unwrap(); + assert_eq!(status.entries.len(), 1); + assert_eq!(status.entries[0].1, FileStatus::Untracked); + } + #[test] fn test_tree_diff_parsing() { let input = ":000000 100644 0000000000000000000000000000000000000000 0062c311b8727c3a2e3cd7a41bc9904feacf8f98 A\x00.zed/settings.json\x00".to_owned() + diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index 37f170ada5ecd23daf5ee58ee1011af95bfc6b8d..90ccf94f5f91720972a52d85bc506d12c1a528cb 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -1,5 +1,5 @@ use collections::{BTreeMap, HashMap}; -use feature_flags::{FeatureFlag, FeatureFlagAppExt as _}; +use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag}; use git::{ BuildCommitPermalinkParams, GitHostingProviderRegistry, GitRemote, Oid, ParsedGitRemote, parse_git_remote_url, @@ -18,7 +18,10 @@ use language::line_diff; use menu::{Cancel, SelectNext, SelectPrevious}; use project::{ Project, - git_store::{CommitDataState, GitStoreEvent, Repository, RepositoryEvent, RepositoryId}, + git_store::{ + CommitDataState, GitGraphEvent, GitStoreEvent, GraphDataResponse, Repository, + RepositoryEvent, RepositoryId, + }, }; use settings::Settings; use smallvec::{SmallVec, smallvec}; @@ -39,7 +42,7 @@ use ui::{ }; use workspace::{ Workspace, - item::{Item, ItemEvent, SerializableItem}, + item::{Item, ItemEvent, SerializableItem, TabTooltipContent}, }; const COMMIT_CIRCLE_RADIUS: Pixels = px(3.5); @@ -246,12 +249,6 @@ actions!( ] ); -pub struct GitGraphFeatureFlag; - -impl FeatureFlag for GitGraphFeatureFlag { - const NAME: &'static str = "git-graph"; -} - fn timestamp_format() -> &'static [BorrowedFormatItem<'static>] { static FORMAT: OnceLock>> = OnceLock::new(); FORMAT.get_or_init(|| { @@ -710,29 +707,66 @@ pub fn init(cx: &mut App) { |div| { let workspace = workspace.weak_handle(); - div.on_action(move |_: &git_ui::git_panel::Open, window, cx| { - workspace - .update(cx, |workspace, cx| { - let existing = workspace.items_of_type::(cx).next(); - if let Some(existing) = existing { - workspace.activate_item(&existing, true, true, window, cx); - return; - } + div.on_action({ + let workspace = workspace.clone(); + move |_: &git_ui::git_panel::Open, window, cx| { + workspace + .update(cx, |workspace, cx| { + let existing = workspace.items_of_type::(cx).next(); + if let Some(existing) = existing { + workspace.activate_item(&existing, true, true, window, cx); + return; + } - let project = workspace.project().clone(); - let workspace_handle = workspace.weak_handle(); - let git_graph = cx - .new(|cx| GitGraph::new(project, workspace_handle, window, cx)); - workspace.add_item_to_active_pane( - Box::new(git_graph), - None, - true, - window, - cx, - ); - }) - .ok(); + let project = workspace.project().clone(); + let workspace_handle = workspace.weak_handle(); + let git_graph = cx.new(|cx| { + GitGraph::new(project, workspace_handle, window, cx) + }); + workspace.add_item_to_active_pane( + Box::new(git_graph), + None, + true, + window, + cx, + ); + }) + .ok(); + } }) + .on_action( + move |action: &git_ui::git_panel::OpenAtCommit, window, cx| { + let sha = action.sha.clone(); + workspace + .update(cx, |workspace, cx| { + let existing = workspace.items_of_type::(cx).next(); + if let Some(existing) = existing { + existing.update(cx, |graph, cx| { + graph.select_commit_by_sha(&sha, cx); + }); + workspace.activate_item(&existing, true, true, window, cx); + return; + } + + let project = workspace.project().clone(); + let workspace_handle = workspace.weak_handle(); + let git_graph = cx.new(|cx| { + let mut graph = + GitGraph::new(project, workspace_handle, window, cx); + graph.select_commit_by_sha(&sha, cx); + graph + }); + workspace.add_item_to_active_pane( + Box::new(git_graph), + None, + true, + window, + cx, + ); + }) + .ok(); + }, + ) }, ) }); @@ -821,6 +855,7 @@ pub struct GitGraph { commit_details_split_state: Entity, selected_repo_id: Option, changed_files_scroll_handle: UniformListScrollHandle, + pending_select_sha: Option, } impl GitGraph { @@ -918,6 +953,7 @@ impl GitGraph { commit_details_split_state: cx.new(|_cx| SplitState::new()), selected_repo_id: active_repository, changed_files_scroll_handle: UniformListScrollHandle::new(), + pending_select_sha: None, }; this.fetch_initial_graph_data(cx); @@ -931,21 +967,65 @@ impl GitGraph { cx: &mut Context, ) { match event { - RepositoryEvent::GitGraphCountUpdated((order, source), commit_count) => { - if order != &self.log_order || source != &self.log_source { - return; - } + RepositoryEvent::GraphEvent((source, order), event) + if source == &self.log_source && order == &self.log_order => + { + match event { + GitGraphEvent::FullyLoaded => { + if let Some(pending_sha_index) = + self.pending_select_sha.take().and_then(|oid| { + repository + .read(cx) + .get_graph_data(source.clone(), *order) + .and_then(|data| data.commit_oid_to_index.get(&oid).copied()) + }) + { + self.select_entry(pending_sha_index, cx); + } + } + GitGraphEvent::LoadingError => { + // todo(git_graph): Wire this up with the UI + } + GitGraphEvent::CountUpdated(commit_count) => { + let old_count = self.graph_data.commits.len(); + + if let Some(pending_selection_index) = + repository.update(cx, |repository, cx| { + let GraphDataResponse { + commits, + is_loading, + error: _, + } = repository.graph_data( + source.clone(), + *order, + old_count..*commit_count, + cx, + ); + self.graph_data.add_commits(commits); - let old_count = self.graph_data.commits.len(); + let pending_sha_index = self.pending_select_sha.and_then(|oid| { + repository.get_graph_data(source.clone(), *order).and_then( + |data| data.commit_oid_to_index.get(&oid).copied(), + ) + }); - repository.update(cx, |repository, cx| { - let (commits, _) = - repository.graph_data(source.clone(), *order, old_count..*commit_count, cx); - self.graph_data.add_commits(commits); - }); - cx.notify(); + if !is_loading && pending_sha_index.is_none() { + self.pending_select_sha.take(); + } + + pending_sha_index + }) + { + self.select_entry(pending_selection_index, cx); + self.pending_select_sha.take(); + } + + cx.notify(); + } + } } - RepositoryEvent::BranchChanged | RepositoryEvent::MergeHeadsChanged => { + RepositoryEvent::BranchChanged => { + self.pending_select_sha = None; // Only invalidate if we scanned atleast once, // meaning we are not inside the initial repo loading state // NOTE: this fixes an loading performance regression @@ -954,6 +1034,7 @@ impl GitGraph { cx.notify(); } } + RepositoryEvent::GraphEvent(_, _) => {} _ => {} } } @@ -961,12 +1042,9 @@ impl GitGraph { fn fetch_initial_graph_data(&mut self, cx: &mut App) { if let Some(repository) = self.get_selected_repository(cx) { repository.update(cx, |repository, cx| { - let (commits, _) = repository.graph_data( - self.log_source.clone(), - self.log_order, - 0..usize::MAX, - cx, - ); + let commits = repository + .graph_data(self.log_source.clone(), self.log_order, 0..usize::MAX, cx) + .commits; self.graph_data.add_commits(commits); }); } @@ -1109,6 +1187,10 @@ impl GitGraph { } } + fn confirm(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { + self.open_selected_commit_view(window, cx); + } + fn select_entry(&mut self, idx: usize, cx: &mut Context) { if self.selected_entry_idx == Some(idx) { return; @@ -1153,6 +1235,27 @@ impl GitGraph { cx.notify(); } + pub fn select_commit_by_sha(&mut self, sha: &str, cx: &mut Context) { + let Ok(oid) = sha.parse::() else { + return; + }; + + let Some(selected_repository) = self.get_selected_repository(cx) else { + return; + }; + + let Some(index) = selected_repository + .read(cx) + .get_graph_data(self.log_source.clone(), self.log_order) + .and_then(|data| data.commit_oid_to_index.get(&oid)) + .copied() + else { + return; + }; + + self.select_entry(index, cx); + } + fn open_selected_commit_view(&mut self, window: &mut Window, cx: &mut Context) { let Some(selected_entry_index) = self.selected_entry_idx else { return; @@ -1966,7 +2069,11 @@ impl Render for GitGraph { if let Some(repository) = self.get_selected_repository(cx) { repository.update(cx, |repository, cx| { // Start loading the graph data if we haven't started already - let (commits, is_loading) = repository.graph_data( + let GraphDataResponse { + commits, + is_loading, + error: _, + } = repository.graph_data( self.log_source.clone(), self.log_order, 0..usize::MAX, @@ -2145,16 +2252,17 @@ impl Render for GitGraph { }; div() - .size_full() - .bg(cx.theme().colors().editor_background) .key_context("GitGraph") .track_focus(&self.focus_handle) + .size_full() + .bg(cx.theme().colors().editor_background) .on_action(cx.listener(|this, _: &OpenCommitView, window, cx| { this.open_selected_commit_view(window, cx); })) .on_action(cx.listener(Self::cancel)) .on_action(cx.listener(Self::select_prev)) .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::confirm)) .child(content) .children(self.context_menu.as_ref().map(|(menu, position, _)| { deferred( @@ -2179,8 +2287,39 @@ impl Focusable for GitGraph { impl Item for GitGraph { type Event = ItemEvent; - fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { - "Git Graph".into() + fn tab_icon(&self, _window: &Window, _cx: &App) -> Option { + Some(Icon::new(IconName::GitGraph)) + } + + fn tab_tooltip_content(&self, cx: &App) -> Option { + let repo_name = self.get_selected_repository(cx).and_then(|repo| { + repo.read(cx) + .work_directory_abs_path + .file_name() + .map(|name| name.to_string_lossy().to_string()) + }); + + Some(TabTooltipContent::Custom(Box::new(Tooltip::element({ + move |_, _| { + v_flex() + .child(Label::new("Git Graph")) + .when_some(repo_name.clone(), |this, name| { + this.child(Label::new(name).color(Color::Muted).size(LabelSize::Small)) + }) + .into_any_element() + } + })))) + } + + fn tab_content_text(&self, _detail: usize, cx: &App) -> SharedString { + self.get_selected_repository(cx) + .and_then(|repo| { + repo.read(cx) + .work_directory_abs_path + .file_name() + .map(|name| name.to_string_lossy().to_string()) + }) + .map_or_else(|| "Git Graph".into(), |name| SharedString::from(name)) } fn show_toolbar(&self) -> bool { @@ -2958,7 +3097,7 @@ mod tests { 0..usize::MAX, cx, ) - .0 + .commits .to_vec() }); @@ -3035,19 +3174,10 @@ mod tests { .any(|event| matches!(event, RepositoryEvent::BranchChanged)), "initial repository scan should emit BranchChanged" ); - assert!( - observed_repository_events - .iter() - .any(|event| matches!(event, RepositoryEvent::MergeHeadsChanged)), - "initial repository scan should emit MergeHeadsChanged" - ); - - let graph_data_key = (crate::LogOrder::default(), crate::LogSource::default()); let commit_count_after = repository.read_with(cx, |repo, _| { - repo.initial_graph_data - .get(&graph_data_key) - .map(|(_, data)| data.len()) - .unwrap_or(0) + repo.get_graph_data(crate::LogSource::default(), crate::LogOrder::default()) + .map(|data| data.commit_data.len()) + .unwrap() }); assert_eq!( commits.len(), diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index f779570be471fd1a097e350d59ef2fb1d4003d2b..a25911d65eb87d176a0a987d996e159e2c43628c 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -27,6 +27,7 @@ component.workspace = true db.workspace = true editor.workspace = true futures.workspace = true +feature_flags.workspace = true fuzzy.workspace = true git.workspace = true gpui.workspace = true @@ -43,6 +44,7 @@ panel.workspace = true picker.workspace = true project.workspace = true prompt_store.workspace = true +proto.workspace = true remote_connection.workspace = true remote.workspace = true schemars.workspace = true diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs index 08290cb88a273d1f3f17da5c08a5b4a402aa74cd..d1ab60b9042fb06a3f049625f7c0a809957a1543 100644 --- a/crates/git_ui/src/branch_picker.rs +++ b/crates/git_ui/src/branch_picker.rs @@ -1390,7 +1390,9 @@ mod tests { (branch_list, cx) } - async fn init_fake_repository(cx: &mut TestAppContext) -> Entity { + async fn init_fake_repository( + cx: &mut TestAppContext, + ) -> (Entity, Entity) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( path!("/dir"), @@ -1413,7 +1415,7 @@ mod tests { let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let repository = cx.read(|cx| project.read(cx).active_repository(cx)); - repository.unwrap() + (project, repository.unwrap()) } #[gpui::test] @@ -1476,7 +1478,7 @@ mod tests { #[gpui::test] async fn test_delete_branch(cx: &mut TestAppContext) { init_test(cx); - let repository = init_fake_repository(cx).await; + let (_project, repository) = init_fake_repository(cx).await; let branches = create_test_branches(); @@ -1534,7 +1536,7 @@ mod tests { #[gpui::test] async fn test_delete_remote(cx: &mut TestAppContext) { init_test(cx); - let repository = init_fake_repository(cx).await; + let (_project, repository) = init_fake_repository(cx).await; let branches = vec![ create_test_branch("main", true, Some("origin"), Some(1000)), create_test_branch("feature-auth", false, Some("origin"), Some(900)), @@ -1721,7 +1723,7 @@ mod tests { const NEW_BRANCH: &str = "new-feature-branch"; init_test(test_cx); - let repository = init_fake_repository(test_cx).await; + let (_project, repository) = init_fake_repository(test_cx).await; let branches = vec![ create_test_branch(MAIN_BRANCH, true, None, Some(1000)), @@ -1785,7 +1787,7 @@ mod tests { #[gpui::test] async fn test_remote_url_detection_https(cx: &mut TestAppContext) { init_test(cx); - let repository = init_fake_repository(cx).await; + let (_project, repository) = init_fake_repository(cx).await; let branches = vec![create_test_branch("main", true, None, Some(1000))]; let (branch_list, mut ctx) = init_branch_list_test(repository.into(), branches, cx).await; diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index f5ed23a6a84e7649ddf7f1e7b6b3651a323ee3c6..8f2a019fddf0513c100a53956c81012d11c2ca30 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -3,6 +3,7 @@ use buffer_diff::BufferDiff; use collections::HashMap; use editor::display_map::{BlockPlacement, BlockProperties, BlockStyle}; use editor::{Addon, Editor, EditorEvent, ExcerptRange, MultiBuffer, multibuffer_context_lines}; +use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag}; use git::repository::{CommitDetails, CommitDiff, RepoPath, is_binary_content}; use git::status::{FileStatus, StatusCode, TrackedStatus}; use git::{ @@ -27,7 +28,7 @@ use std::{ sync::Arc, }; use theme::ActiveTheme; -use ui::{ButtonLike, DiffStat, Tooltip, prelude::*}; +use ui::{DiffStat, Divider, Tooltip, prelude::*}; use util::{ResultExt, paths::PathStyle, rel_path::RelPath, truncate_and_trailoff}; use workspace::item::TabTooltipContent; use workspace::{ @@ -450,6 +451,7 @@ impl CommitView { fn render_header(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let commit = &self.commit; let author_name = commit.author_name.clone(); + let author_email = commit.author_email.clone(); let commit_sha = commit.sha.clone(); let commit_date = time::OffsetDateTime::from_unix_timestamp(commit.commit_timestamp) .unwrap_or_else(|_| time::OffsetDateTime::now_utc()); @@ -461,36 +463,6 @@ impl CommitView { time_format::TimestampFormat::MediumAbsolute, ); - let remote_info = self - .remote - .as_ref() - .filter(|_| self.stash.is_none()) - .map(|remote| { - let provider = remote.host.name(); - let parsed_remote = ParsedGitRemote { - owner: remote.owner.as_ref().into(), - repo: remote.repo.as_ref().into(), - }; - let params = BuildCommitPermalinkParams { sha: &commit.sha }; - let url = remote - .host - .build_commit_permalink(&parsed_remote, params) - .to_string(); - (provider, url) - }); - - let (additions, deletions) = self.calculate_changed_lines(cx); - - let commit_diff_stat = if additions > 0 || deletions > 0 { - Some(DiffStat::new( - "commit-diff-stat", - additions as usize, - deletions as usize, - )) - } else { - None - }; - let gutter_width = self.editor.update(cx, |editor, cx| { let snapshot = editor.snapshot(window, cx); let style = editor.style(cx); @@ -501,116 +473,75 @@ impl CommitView { .full_width() }); - let clipboard_has_link = cx + let clipboard_has_sha = cx .read_from_clipboard() .and_then(|entry| entry.text()) .map_or(false, |clipboard_text| { clipboard_text.trim() == commit_sha.as_ref() }); - let (copy_icon, copy_icon_color) = if clipboard_has_link { + let (copy_icon, copy_icon_color) = if clipboard_has_sha { (IconName::Check, Color::Success) } else { (IconName::Copy, Color::Muted) }; h_flex() + .py_2() + .pr_2p5() + .w_full() + .justify_between() .border_b_1() .border_color(cx.theme().colors().border_variant) - .w_full() - .child( - h_flex() - .w(gutter_width) - .justify_center() - .child(self.render_commit_avatar(&commit.sha, rems_from_px(48.), window, cx)), - ) .child( h_flex() - .py_4() - .pl_1() - .pr_4() - .w_full() - .items_start() - .justify_between() - .flex_wrap() + .child(h_flex().w(gutter_width).justify_center().child( + self.render_commit_avatar(&commit.sha, rems_from_px(40.), window, cx), + )) .child( - v_flex() - .child( - h_flex() - .gap_1() - .child(Label::new(author_name).color(Color::Default)) - .child({ - ButtonLike::new("sha") - .child( - h_flex() - .group("sha_btn") - .size_full() - .max_w_32() - .gap_0p5() - .child( - Label::new(commit_sha.clone()) - .color(Color::Muted) - .size(LabelSize::Small) - .truncate() - .buffer_font(cx), - ) - .child( - div().visible_on_hover("sha_btn").child( - Icon::new(copy_icon) - .color(copy_icon_color) - .size(IconSize::Small), - ), - ), - ) - .tooltip({ - let commit_sha = commit_sha.clone(); - move |_, cx| { - Tooltip::with_meta( - "Copy Commit SHA", - None, - commit_sha.clone(), - cx, - ) - } - }) - .on_click(move |_, _, cx| { - cx.stop_propagation(); - cx.write_to_clipboard(ClipboardItem::new_string( - commit_sha.to_string(), - )); - }) - }), - ) - .child( - h_flex() - .gap_1p5() - .child( - Label::new(date_string) - .color(Color::Muted) - .size(LabelSize::Small), - ) - .child( - Label::new("•") - .color(Color::Ignored) - .size(LabelSize::Small), - ) - .children(commit_diff_stat), - ), - ) - .children(remote_info.map(|(provider_name, url)| { - let icon = match provider_name.as_str() { - "GitHub" => IconName::Github, - _ => IconName::Link, - }; - - Button::new("view_on_provider", format!("View on {}", provider_name)) - .icon(icon) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) - .on_click(move |_, _, cx| cx.open_url(&url)) - })), + v_flex().child(Label::new(author_name)).child( + h_flex() + .gap_1p5() + .child( + Label::new(date_string) + .color(Color::Muted) + .size(LabelSize::Small), + ) + .child( + Label::new("•") + .size(LabelSize::Small) + .color(Color::Muted) + .alpha(0.5), + ) + .child( + Label::new(author_email) + .color(Color::Muted) + .size(LabelSize::Small), + ), + ), + ), ) + .when(self.stash.is_none(), |this| { + this.child( + Button::new("sha", "Commit SHA") + .icon(copy_icon) + .icon_color(copy_icon_color) + .icon_position(IconPosition::Start) + .icon_size(IconSize::Small) + .tooltip({ + let commit_sha = commit_sha.clone(); + move |_, cx| { + Tooltip::with_meta("Copy Commit SHA", None, commit_sha.clone(), cx) + } + }) + .on_click(move |_, _, cx| { + cx.stop_propagation(); + cx.write_to_clipboard(ClipboardItem::new_string( + commit_sha.to_string(), + )); + }), + ) + }) } fn apply_stash(workspace: &mut Workspace, window: &mut Window, cx: &mut App) { @@ -898,7 +829,7 @@ impl Item for CommitView { type Event = EditorEvent; fn tab_icon(&self, _window: &Window, _cx: &App) -> Option { - Some(Icon::new(IconName::GitBranch).color(Color::Muted)) + Some(Icon::new(IconName::GitCommit).color(Color::Muted)) } fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement { @@ -1081,8 +1012,93 @@ impl CommitViewToolbar { impl EventEmitter for CommitViewToolbar {} impl Render for CommitViewToolbar { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - div().hidden() + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let Some(commit_view) = self.commit_view.as_ref().and_then(|w| w.upgrade()) else { + return div(); + }; + + let commit_view_ref = commit_view.read(cx); + let is_stash = commit_view_ref.stash.is_some(); + + let (additions, deletions) = commit_view_ref.calculate_changed_lines(cx); + + let commit_sha = commit_view_ref.commit.sha.clone(); + + let remote_info = commit_view_ref.remote.as_ref().map(|remote| { + let provider = remote.host.name(); + let parsed_remote = ParsedGitRemote { + owner: remote.owner.as_ref().into(), + repo: remote.repo.as_ref().into(), + }; + let params = BuildCommitPermalinkParams { sha: &commit_sha }; + let url = remote + .host + .build_commit_permalink(&parsed_remote, params) + .to_string(); + (provider, url) + }); + + let sha_for_graph = commit_sha.to_string(); + + h_flex() + .gap_1() + .when(additions > 0 || deletions > 0, |this| { + this.child( + h_flex() + .gap_2() + .child(DiffStat::new( + "toolbar-diff-stat", + additions as usize, + deletions as usize, + )) + .child(Divider::vertical()), + ) + }) + .child( + IconButton::new("buffer-search", IconName::MagnifyingGlass) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::for_action( + "Buffer Search", + &zed_actions::buffer_search::Deploy::find(), + cx, + ) + }) + .on_click(|_, window, cx| { + window.dispatch_action( + Box::new(zed_actions::buffer_search::Deploy::find()), + cx, + ); + }), + ) + .when(!is_stash, |this| { + this.when(cx.has_flag::(), |this| { + this.child( + IconButton::new("show-in-git-graph", IconName::GitGraph) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Show in Git Graph")) + .on_click(move |_, window, cx| { + window.dispatch_action( + Box::new(crate::git_panel::OpenAtCommit { + sha: sha_for_graph.clone(), + }), + cx, + ); + }), + ) + }) + .children(remote_info.map(|(provider_name, url)| { + let icon = match provider_name.as_str() { + "GitHub" => IconName::Github, + _ => IconName::Link, + }; + + IconButton::new("view_on_provider", icon) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text(format!("View on {}", provider_name))) + .on_click(move |_, _, cx| cx.open_url(&url)) + })) + }) } } @@ -1093,12 +1109,11 @@ impl ToolbarItemView for CommitViewToolbar { _: &mut Window, cx: &mut Context, ) -> ToolbarItemLocation { - if let Some(entity) = active_pane_item.and_then(|i| i.act_as::(cx)) - && entity.read(cx).stash.is_some() - { + if let Some(entity) = active_pane_item.and_then(|i| i.act_as::(cx)) { self.commit_view = Some(entity.downgrade()); return ToolbarItemLocation::PrimaryRight; } + self.commit_view = None; ToolbarItemLocation::Hidden } diff --git a/crates/git_ui/src/conflict_view.rs b/crates/git_ui/src/conflict_view.rs index 838ec886fdb400b67fa284df9182bab9766548bd..82571b541e692141f843a4c3ef6e082c72e55e48 100644 --- a/crates/git_ui/src/conflict_view.rs +++ b/crates/git_ui/src/conflict_view.rs @@ -290,7 +290,7 @@ fn conflicts_updated( blocks.push(BlockProperties { placement: BlockPlacement::Above(anchor), height: Some(1), - style: BlockStyle::Fixed, + style: BlockStyle::Sticky, render: Arc::new({ let conflict = conflict.clone(); move |cx| render_conflict_buttons(&conflict, excerpt_id, editor_handle.clone(), cx) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index b8caf478305609b7ea95874333f1483c448ac242..61d94b68a118525bd9b67217a929ce7462696dc7 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -28,7 +28,7 @@ use git::repository::{ UpstreamTrackingStatus, get_git_committer, }; use git::stash::GitStash; -use git::status::StageStatus; +use git::status::{DiffStat, StageStatus}; use git::{Amend, Signoff, ToggleStaged, repository::RepoPath, status::FileStatus}; use git::{ ExpandCommitEditor, GitHostingProviderRegistry, RestoreTrackedFiles, StageAll, StashAll, @@ -55,6 +55,7 @@ use project::{ project_settings::{GitPathStyle, ProjectSettings}, }; use prompt_store::{BuiltInPrompt, PromptId, PromptStore, RULES_FILE_NAMES}; +use proto::RpcError; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore, StatusStyle}; use smallvec::SmallVec; @@ -123,6 +124,13 @@ actions!( ] ); +/// Opens the Git Graph Tab at a specific commit. +#[derive(Clone, PartialEq, serde::Deserialize, schemars::JsonSchema, gpui::Action)] +#[action(namespace = git_graph)] +pub struct OpenAtCommit { + pub sha: String, +} + fn prompt( msg: &str, detail: Option<&str>, @@ -524,6 +532,7 @@ pub struct GitStatusEntry { pub(crate) repo_path: RepoPath, pub(crate) status: FileStatus, pub(crate) staging: StageStatus, + pub(crate) diff_stat: Option, } impl GitStatusEntry { @@ -644,6 +653,7 @@ pub struct GitPanel { local_committer_task: Option>, bulk_staging: Option, stash_entries: GitStash, + _settings_subscription: Subscription, } @@ -704,18 +714,26 @@ impl GitPanel { let mut was_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path; let mut was_tree_view = GitPanelSettings::get_global(cx).tree_view; + let mut was_diff_stats = GitPanelSettings::get_global(cx).diff_stats; cx.observe_global_in::(window, move |this, window, cx| { let sort_by_path = GitPanelSettings::get_global(cx).sort_by_path; let tree_view = GitPanelSettings::get_global(cx).tree_view; + let diff_stats = GitPanelSettings::get_global(cx).diff_stats; if tree_view != was_tree_view { this.view_mode = GitPanelViewMode::from_settings(cx); } + + let mut update_entries = false; if sort_by_path != was_sort_by_path || tree_view != was_tree_view { this.bulk_staging.take(); + update_entries = true; + } + if (diff_stats != was_diff_stats) || update_entries { this.update_visible_entries(window, cx); } was_sort_by_path = sort_by_path; was_tree_view = tree_view; + was_diff_stats = diff_stats; }) .detach(); @@ -747,9 +765,7 @@ impl GitPanel { move |this, _git_store, event, window, cx| match event { GitStoreEvent::RepositoryUpdated( _, - RepositoryEvent::StatusesChanged - | RepositoryEvent::BranchChanged - | RepositoryEvent::MergeHeadsChanged, + RepositoryEvent::StatusesChanged | RepositoryEvent::BranchChanged, true, ) | GitStoreEvent::RepositoryAdded @@ -2735,6 +2751,7 @@ impl GitPanel { temperature, thinking_allowed: false, thinking_effort: None, + speed: None, }; let stream = model.stream_completion_text(request, cx); @@ -3171,18 +3188,16 @@ impl GitPanel { window: &mut Window, cx: &mut Context, ) -> AskPassDelegate { - let this = cx.weak_entity(); + let workspace = self.workspace.clone(); let operation = operation.into(); let window = window.window_handle(); AskPassDelegate::new(&mut cx.to_async(), move |prompt, tx, cx| { window .update(cx, |_, window, cx| { - this.update(cx, |this, cx| { - this.workspace.update(cx, |workspace, cx| { - workspace.toggle_modal(window, cx, |window, cx| { - AskPassModal::new(operation.clone(), prompt.into(), tx, window, cx) - }); - }) + workspace.update(cx, |workspace, cx| { + workspace.toggle_modal(window, cx, |window, cx| { + AskPassModal::new(operation.clone(), prompt.into(), tx, window, cx) + }); }) }) .ok(); @@ -3526,6 +3541,7 @@ impl GitPanel { repo_path: entry.repo_path.clone(), status: entry.status, staging, + diff_stat: entry.diff_stat, }; if staging.has_staged() { @@ -3562,6 +3578,7 @@ impl GitPanel { repo_path: ops.repo_path.clone(), status: status.status, staging: StageStatus::Staged, + diff_stat: status.diff_stat, }); } } @@ -5108,6 +5125,8 @@ impl GitPanel { } }); + let id_for_diff_stat = id.clone(); + h_flex() .id(id) .h(self.list_item_height()) @@ -5124,6 +5143,16 @@ impl GitPanel { .hover(|s| s.bg(hover_bg)) .active(|s| s.bg(active_bg)) .child(name_row) + .when(GitPanelSettings::get_global(cx).diff_stats, |el| { + el.when_some(entry.diff_stat, move |this, stat| { + let id = format!("diff-stat-{}", id_for_diff_stat); + this.child(ui::DiffStat::new( + id, + stat.added as usize, + stat.deleted as usize, + )) + }) + }) .child( div() .id(checkbox_wrapper_id) @@ -5514,6 +5543,21 @@ impl GitPanel { } } +#[cfg(any(test, feature = "test-support"))] +impl GitPanel { + pub fn new_test( + workspace: &mut Workspace, + window: &mut Window, + cx: &mut Context, + ) -> Entity { + Self::new(workspace, window, cx) + } + + pub fn active_repository(&self) -> Option<&Entity> { + self.active_repository.as_ref() + } +} + impl Render for GitPanel { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let project = self.project.read(cx); @@ -6306,7 +6350,7 @@ pub(crate) fn show_error_toast( cx: &mut App, ) { let action = action.into(); - let message = e.to_string().trim().to_string(); + let message = format_git_error_toast_message(&e); if message .matches(git::repository::REMOTE_CANCELLED_BY_USER) .next() @@ -6332,6 +6376,20 @@ pub(crate) fn show_error_toast( } } +fn rpc_error_raw_message_from_chain(error: &anyhow::Error) -> Option<&str> { + error + .chain() + .find_map(|cause| cause.downcast_ref::().map(RpcError::raw_message)) +} + +fn format_git_error_toast_message(error: &anyhow::Error) -> String { + if let Some(message) = rpc_error_raw_message_from_chain(error) { + message.trim().to_string() + } else { + error.to_string().trim().to_string() + } +} + #[cfg(test)] mod tests { use git::{ @@ -6363,6 +6421,47 @@ mod tests { }); } + #[test] + fn test_format_git_error_toast_message_prefers_raw_rpc_message() { + let rpc_error = RpcError::from_proto( + &proto::Error { + message: + "Your local changes to the following files would be overwritten by merge\n" + .to_string(), + code: proto::ErrorCode::Internal as i32, + tags: Default::default(), + }, + "Pull", + ); + + let message = format_git_error_toast_message(&rpc_error); + assert_eq!( + message, + "Your local changes to the following files would be overwritten by merge" + ); + } + + #[test] + fn test_format_git_error_toast_message_prefers_raw_rpc_message_when_wrapped() { + let rpc_error = RpcError::from_proto( + &proto::Error { + message: + "Your local changes to the following files would be overwritten by merge\n" + .to_string(), + code: proto::ErrorCode::Internal as i32, + tags: Default::default(), + }, + "Pull", + ); + let wrapped = rpc_error.context("sending pull request"); + + let message = format_git_error_toast_message(&wrapped); + assert_eq!( + message, + "Your local changes to the following files would be overwritten by merge" + ); + } + #[gpui::test] async fn test_entry_worktree_paths(cx: &mut TestAppContext) { init_test(cx); @@ -6436,11 +6535,19 @@ mod tests { repo_path: repo_path("crates/gpui/gpui.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }), GitListEntry::Status(GitStatusEntry { repo_path: repo_path("crates/util/util.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), },), ], ); @@ -6461,11 +6568,19 @@ mod tests { repo_path: repo_path("crates/gpui/gpui.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }), GitListEntry::Status(GitStatusEntry { repo_path: repo_path("crates/util/util.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), },), ], ); diff --git a/crates/git_ui/src/git_panel_settings.rs b/crates/git_ui/src/git_panel_settings.rs index 6b5334e55544b465864fe3afb780c4673bb5961e..2a7480de355a6190494211d823e4aa440d191371 100644 --- a/crates/git_ui/src/git_panel_settings.rs +++ b/crates/git_ui/src/git_panel_settings.rs @@ -25,6 +25,7 @@ pub struct GitPanelSettings { pub sort_by_path: bool, pub collapse_untracked_diff: bool, pub tree_view: bool, + pub diff_stats: bool, } impl ScrollbarVisibility for GitPanelSettings { @@ -58,6 +59,7 @@ impl Settings for GitPanelSettings { sort_by_path: git_panel.sort_by_path.unwrap(), collapse_untracked_diff: git_panel.collapse_untracked_diff.unwrap(), tree_view: git_panel.tree_view.unwrap(), + diff_stats: git_panel.diff_stats.unwrap(), } } } diff --git a/crates/git_ui/src/git_picker.rs b/crates/git_ui/src/git_picker.rs index 82ef9c9516b7c145edbf26d6c5b8927189525cab..6cf82327b43abe6c3784e4ec8ca3d16161edfda7 100644 --- a/crates/git_ui/src/git_picker.rs +++ b/crates/git_ui/src/git_picker.rs @@ -15,7 +15,7 @@ use workspace::{ModalView, Workspace, pane}; use crate::branch_picker::{self, BranchList, DeleteBranch, FilterRemotes}; use crate::stash_picker::{self, DropStashItem, ShowStashItem, StashList}; use crate::worktree_picker::{ - self, WorktreeFromDefault, WorktreeFromDefaultOnWindow, WorktreeList, + self, DeleteWorktree, WorktreeFromDefault, WorktreeFromDefaultOnWindow, WorktreeList, }; actions!( @@ -408,6 +408,19 @@ impl GitPicker { } } + fn handle_worktree_delete( + &mut self, + _: &DeleteWorktree, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(worktree_list) = &self.worktree_list { + worktree_list.update(cx, |list, cx| { + list.handle_delete(&DeleteWorktree, window, cx); + }); + } + } + fn handle_drop_stash( &mut self, _: &DropStashItem, @@ -524,6 +537,7 @@ impl Render for GitPicker { .when(self.tab == GitPickerTab::Worktrees, |el| { el.on_action(cx.listener(Self::handle_worktree_from_default)) .on_action(cx.listener(Self::handle_worktree_from_default_on_window)) + .on_action(cx.listener(Self::handle_worktree_delete)) }) .when(self.tab == GitPickerTab::Stash, |el| { el.on_action(cx.listener(Self::handle_drop_stash)) diff --git a/crates/git_ui/src/text_diff_view.rs b/crates/git_ui/src/text_diff_view.rs index 43dd32a24ef0fa6df1f5797742ed65e57ee368a3..1419fa049ee2aae1992dac517aad8371800ac532 100644 --- a/crates/git_ui/src/text_diff_view.rs +++ b/crates/git_ui/src/text_diff_view.rs @@ -145,11 +145,7 @@ impl TextDiffView { let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); - multibuffer.push_excerpts( - source_buffer.clone(), - [editor::ExcerptRange::new(source_range)], - cx, - ); + multibuffer.set_excerpts_for_buffer(source_buffer.clone(), [source_range], 0, cx); multibuffer.add_diff(diff_buffer.clone(), cx); multibuffer diff --git a/crates/git_ui/src/worktree_picker.rs b/crates/git_ui/src/worktree_picker.rs index f2826a2b543a73c5341653c42bbb5f1540213b2a..6c35e7c99ffb8f6efa1a2bd7a07c2ded8d158668 100644 --- a/crates/git_ui/src/worktree_picker.rs +++ b/crates/git_ui/src/worktree_picker.rs @@ -22,7 +22,16 @@ use ui::{HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, prelude::*}; use util::ResultExt; use workspace::{ModalView, MultiWorkspace, Workspace, notifications::DetachAndPromptErr}; -actions!(git, [WorktreeFromDefault, WorktreeFromDefaultOnWindow]); +use crate::git_panel::show_error_toast; + +actions!( + git, + [ + WorktreeFromDefault, + WorktreeFromDefaultOnWindow, + DeleteWorktree + ] +); pub fn open( workspace: &mut Workspace, @@ -181,6 +190,19 @@ impl WorktreeList { ); }) } + + pub fn handle_delete( + &mut self, + _: &DeleteWorktree, + window: &mut Window, + cx: &mut Context, + ) { + self.picker.update(cx, |picker, cx| { + picker + .delegate + .delete_at(picker.delegate.selected_index, window, cx) + }) + } } impl ModalView for WorktreeList {} impl EventEmitter for WorktreeList {} @@ -203,6 +225,9 @@ impl Render for WorktreeList { .on_action(cx.listener(|this, _: &WorktreeFromDefaultOnWindow, w, cx| { this.handle_new_worktree(true, w, cx) })) + .on_action(cx.listener(|this, _: &DeleteWorktree, window, cx| { + this.handle_delete(&DeleteWorktree, window, cx) + })) .child(self.picker.clone()) .when(!self.embedded, |el| { el.on_mouse_down_out({ @@ -275,9 +300,9 @@ impl WorktreeListDelegate { .git .worktree_directory .clone(); - let work_dir = repo.work_directory_abs_path.clone(); + let original_repo = repo.original_repo_abs_path.clone(); let directory = - validate_worktree_directory(&work_dir, &worktree_directory_setting)?; + validate_worktree_directory(&original_repo, &worktree_directory_setting)?; let new_worktree_path = directory.join(&branch); let receiver = repo.create_worktree(branch.clone(), directory, commit); anyhow::Ok((receiver, new_worktree_path)) @@ -420,6 +445,57 @@ impl WorktreeListDelegate { .as_ref() .and_then(|repo| repo.read(cx).branch.as_ref().map(|b| b.name())) } + + fn delete_at(&self, idx: usize, window: &mut Window, cx: &mut Context>) { + let Some(entry) = self.matches.get(idx).cloned() else { + return; + }; + if entry.is_new { + return; + } + let Some(repo) = self.repo.clone() else { + return; + }; + let workspace = self.workspace.clone(); + let path = entry.worktree.path; + + cx.spawn_in(window, async move |picker, cx| { + let result = repo + .update(cx, |repo, _| repo.remove_worktree(path.clone(), false)) + .await?; + + if let Err(e) = result { + log::error!("Failed to remove worktree: {}", e); + if let Some(workspace) = workspace.upgrade() { + cx.update(|_window, cx| { + show_error_toast( + workspace, + format!("worktree remove {}", path.display()), + e, + cx, + ) + })?; + } + return Ok(()); + } + + picker.update_in(cx, |picker, _, cx| { + picker.delegate.matches.retain(|e| e.worktree.path != path); + if let Some(all_worktrees) = &mut picker.delegate.all_worktrees { + all_worktrees.retain(|w| w.path != path); + } + if picker.delegate.matches.is_empty() { + picker.delegate.selected_index = 0; + } else if picker.delegate.selected_index >= picker.delegate.matches.len() { + picker.delegate.selected_index = picker.delegate.matches.len() - 1; + } + cx.notify(); + })?; + + anyhow::Ok(()) + }) + .detach(); + } } async fn open_remote_worktree( @@ -778,6 +854,16 @@ impl PickerDelegate for WorktreeListDelegate { } else { Some( footer_container + .child( + Button::new("delete-worktree", "Delete") + .key_binding( + KeyBinding::for_action_in(&DeleteWorktree, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(|_, window, cx| { + window.dispatch_action(DeleteWorktree.boxed_clone(), cx) + }), + ) .child( Button::new("open-in-new-window", "Open in New Window") .key_binding( diff --git a/crates/google_ai/src/google_ai.rs b/crates/google_ai/src/google_ai.rs index 3a686f97a8825b30a8f02f4149b110c3d1aacb1e..7659be8ab44da35efd16389c4abd0bf99d8cf3a4 100644 --- a/crates/google_ai/src/google_ai.rs +++ b/crates/google_ai/src/google_ai.rs @@ -510,11 +510,9 @@ pub enum Model { alias = "gemini-2.5-pro-preview-06-05" )] Gemini25Pro, - #[serde(rename = "gemini-3-pro-preview")] - Gemini3Pro, #[serde(rename = "gemini-3-flash-preview")] Gemini3Flash, - #[serde(rename = "gemini-3.1-pro-preview")] + #[serde(rename = "gemini-3.1-pro-preview", alias = "gemini-3-pro-preview")] Gemini31Pro, #[serde(rename = "custom")] Custom { @@ -537,7 +535,6 @@ impl Model { Self::Gemini25FlashLite => "gemini-2.5-flash-lite", Self::Gemini25Flash => "gemini-2.5-flash", Self::Gemini25Pro => "gemini-2.5-pro", - Self::Gemini3Pro => "gemini-3-pro-preview", Self::Gemini3Flash => "gemini-3-flash-preview", Self::Gemini31Pro => "gemini-3.1-pro-preview", Self::Custom { name, .. } => name, @@ -548,7 +545,6 @@ impl Model { Self::Gemini25FlashLite => "gemini-2.5-flash-lite", Self::Gemini25Flash => "gemini-2.5-flash", Self::Gemini25Pro => "gemini-2.5-pro", - Self::Gemini3Pro => "gemini-3-pro-preview", Self::Gemini3Flash => "gemini-3-flash-preview", Self::Gemini31Pro => "gemini-3.1-pro-preview", Self::Custom { name, .. } => name, @@ -560,7 +556,6 @@ impl Model { Self::Gemini25FlashLite => "Gemini 2.5 Flash-Lite", Self::Gemini25Flash => "Gemini 2.5 Flash", Self::Gemini25Pro => "Gemini 2.5 Pro", - Self::Gemini3Pro => "Gemini 3 Pro", Self::Gemini3Flash => "Gemini 3 Flash", Self::Gemini31Pro => "Gemini 3.1 Pro", Self::Custom { @@ -574,7 +569,6 @@ impl Model { Self::Gemini25FlashLite | Self::Gemini25Flash | Self::Gemini25Pro - | Self::Gemini3Pro | Self::Gemini3Flash | Self::Gemini31Pro => 1_048_576, Self::Custom { max_tokens, .. } => *max_tokens, @@ -586,7 +580,6 @@ impl Model { Model::Gemini25FlashLite | Model::Gemini25Flash | Model::Gemini25Pro - | Model::Gemini3Pro | Model::Gemini3Flash | Model::Gemini31Pro => Some(65_536), Model::Custom { .. } => None, @@ -603,10 +596,7 @@ impl Model { pub fn mode(&self) -> GoogleModelMode { match self { - Self::Gemini25FlashLite - | Self::Gemini25Flash - | Self::Gemini25Pro - | Self::Gemini3Pro => { + Self::Gemini25FlashLite | Self::Gemini25Flash | Self::Gemini25Pro => { GoogleModelMode::Thinking { // By default these models are set to "auto", so we preserve that behavior // but indicate they are capable of thinking mode diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index cce229a7d3c51d8cff7e6ee4f8880cc8e8d8f73c..c80f97efb6dc8bf1450c08bfe85290096b44815b 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -21,7 +21,6 @@ default = ["font-kit", "wayland", "x11", "windows-manifest"] test-support = [ "leak-detection", "collections/test-support", - "util/test-support", "http_client/test-support", "wayland", "x11", @@ -37,7 +36,7 @@ x11 = [ screen-capture = [ "scap", ] -windows-manifest = [] +windows-manifest = ["dep:embed-resource"] [lib] path = "src/gpui.rs" @@ -54,6 +53,7 @@ ctor.workspace = true derive_more.workspace = true etagere = "0.2" futures.workspace = true +futures-concurrency.workspace = true gpui_macros.workspace = true http_client.workspace = true image.workspace = true @@ -83,19 +83,27 @@ serde.workspace = true serde_json.workspace = true slotmap.workspace = true smallvec.workspace = true -smol.workspace = true +async-channel.workspace = true stacksafe.workspace = true strum.workspace = true sum_tree.workspace = true taffy = "=0.9.0" thiserror.workspace = true -util.workspace = true -uuid.workspace = true +gpui_util.workspace = true waker-fn = "1.2.0" lyon = "1.0" pin-project = "1.1.10" circular-buffer.workspace = true spin = "0.10.0" +pollster.workspace = true +url.workspace = true +uuid.workspace = true +web-time.workspace = true + +[target.'cfg(target_family = "wasm")'.dependencies] +getrandom = { version = "0.3.4", features = ["wasm_js"] } +uuid = { workspace = true, features = ["js"] } + [target.'cfg(target_os = "macos")'.dependencies] block = "0.1" @@ -135,19 +143,23 @@ backtrace.workspace = true collections = { workspace = true, features = ["test-support"] } env_logger.workspace = true gpui_platform.workspace = true -http_client = { workspace = true, features = ["test-support"] } lyon = { version = "1.0", features = ["extra"] } pretty_assertions.workspace = true rand.workspace = true -reqwest_client = { workspace = true, features = ["test-support"] } scheduler = { workspace = true, features = ["test-support"] } unicode-segmentation.workspace = true -util = { workspace = true, features = ["test-support"] } +gpui_util = { workspace = true } +[target.'cfg(not(target_family = "wasm"))'.dev-dependencies] +http_client = { workspace = true, features = ["test-support"] } +reqwest_client = { workspace = true, features = ["test-support"] } +[target.'cfg(target_family = "wasm")'.dev-dependencies] +wasm-bindgen = { workspace = true } +gpui_web.workspace = true -[target.'cfg(target_os = "windows")'.build-dependencies] -embed-resource = "3.0" +[build-dependencies] +embed-resource = { version = "3.0", optional = true } [target.'cfg(target_os = "macos")'.build-dependencies] bindgen = "0.71" diff --git a/crates/gpui/build.rs b/crates/gpui/build.rs index 53f78e3c416d27d73d8593ef1315c4f943712715..b1bfd2194f5059a1894ed0222c97a5869ecf9fdc 100644 --- a/crates/gpui/build.rs +++ b/crates/gpui/build.rs @@ -1,14 +1,17 @@ #![allow(clippy::disallowed_methods, reason = "build scripts are exempt")] -#![cfg_attr(not(target_os = "macos"), allow(unused))] fn main() { println!("cargo::rustc-check-cfg=cfg(gles)"); - #[cfg(all(target_os = "windows", feature = "windows-manifest"))] - embed_resource(); + let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap_or_default(); + + if target_os == "windows" { + #[cfg(feature = "windows-manifest")] + embed_resource(); + } } -#[cfg(all(target_os = "windows", feature = "windows-manifest"))] +#[cfg(feature = "windows-manifest")] fn embed_resource() { let manifest = std::path::Path::new("resources/windows/gpui.manifest.xml"); let rc_file = std::path::Path::new("resources/windows/gpui.rc"); diff --git a/crates/gpui/examples/animation.rs b/crates/gpui/examples/animation.rs index 27a9a0fa35152dfdcd02df207af4fd1f78ec2b7c..6755b49ca0d183be0e47faeccb81f8266757ff43 100644 --- a/crates/gpui/examples/animation.rs +++ b/crates/gpui/examples/animation.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use std::time::Duration; use anyhow::Result; @@ -101,7 +103,7 @@ impl Render for AnimationExample { } } -fn main() { +fn run_example() { application().with_assets(Assets {}).run(|cx: &mut App| { let options = WindowOptions { window_bounds: Some(WindowBounds::Windowed(Bounds::centered( @@ -118,3 +120,15 @@ fn main() { .unwrap(); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/data_table.rs b/crates/gpui/examples/data_table.rs index d32ceea0136e943f30f5150da915ed2957f90628..b3f8737ec4f03ee17eb2b143e0dbbdf230fa6356 100644 --- a/crates/gpui/examples/data_table.rs +++ b/crates/gpui/examples/data_table.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use std::{ops::Range, rc::Rc, time::Duration}; use gpui::{ @@ -447,7 +449,7 @@ impl Render for DataTable { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { cx.open_window( WindowOptions { @@ -472,3 +474,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/drag_drop.rs b/crates/gpui/examples/drag_drop.rs index 734a614bd6c978b45c5dbc397e068e6e87875312..b233bc4107a51b957c0cb6d18f2b94c141b044b3 100644 --- a/crates/gpui/examples/drag_drop.rs +++ b/crates/gpui/examples/drag_drop.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, Context, Half, Hsla, Pixels, Point, Window, WindowBounds, WindowOptions, div, prelude::*, px, rgb, size, @@ -121,7 +123,7 @@ impl Render for DragDrop { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(800.), px(600.0)), cx); cx.open_window( @@ -136,3 +138,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/focus_visible.rs b/crates/gpui/examples/focus_visible.rs index c32ffc62a2fa3696a72f7319cbd5ee843d9308bc..02a171da216e9df7fbe9ff0bf4ad7635df0229cb 100644 --- a/crates/gpui/examples/focus_visible.rs +++ b/crates/gpui/examples/focus_visible.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, Context, Div, ElementId, FocusHandle, KeyBinding, SharedString, Stateful, Window, WindowBounds, WindowOptions, actions, div, prelude::*, px, size, @@ -192,7 +194,7 @@ impl Render for Example { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { cx.bind_keys([ KeyBinding::new("tab", Tab, None), @@ -213,3 +215,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/gif_viewer.rs b/crates/gpui/examples/gif_viewer.rs index 6dea19d7820876ca6334045fa5a98c63c00cf800..59fb8d3794d9289c48841a2bfa8e7ff45436beb2 100644 --- a/crates/gpui/examples/gif_viewer.rs +++ b/crates/gpui/examples/gif_viewer.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{App, Context, Render, Window, WindowOptions, div, img, prelude::*}; use gpui_platform::application; use std::path::PathBuf; @@ -23,8 +25,7 @@ impl Render for GifViewer { } } -fn main() { - env_logger::init(); +fn run_example() { application().run(|cx: &mut App| { let gif_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("examples/image/black-cat-typing.gif"); @@ -40,3 +41,16 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + env_logger::init(); + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/gradient.rs b/crates/gpui/examples/gradient.rs index f931e6a3067b6b922a9ee29ea561d6f1eda7eb78..97321f1071947209aa6464b0c3b2f20e87c5d4ac 100644 --- a/crates/gpui/examples/gradient.rs +++ b/crates/gpui/examples/gradient.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, ColorSpace, Context, Half, Render, Window, WindowOptions, canvas, div, linear_color_stop, linear_gradient, point, prelude::*, px, size, @@ -243,7 +245,7 @@ impl Render for GradientViewer { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { cx.open_window( WindowOptions { @@ -256,3 +258,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/grid_layout.rs b/crates/gpui/examples/grid_layout.rs index 49119a89616758201edb3ca35eff3db364afd908..650a3e37bbc2f0740b9a13eac048ec9cae55232a 100644 --- a/crates/gpui/examples/grid_layout.rs +++ b/crates/gpui/examples/grid_layout.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, Context, Hsla, Window, WindowBounds, WindowOptions, div, prelude::*, px, rgb, size, }; @@ -64,7 +66,7 @@ impl Render for HolyGrailExample { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx); cx.open_window( @@ -78,3 +80,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/hello_world.rs b/crates/gpui/examples/hello_world.rs index 634eca511269a8ad29a03cfdd104af4f081bee1c..50d56ec8df169e62f08a27567c49c886361d5ec8 100644 --- a/crates/gpui/examples/hello_world.rs +++ b/crates/gpui/examples/hello_world.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, Context, SharedString, Window, WindowBounds, WindowOptions, div, prelude::*, px, rgb, size, @@ -87,7 +89,7 @@ impl Render for HelloWorld { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx); cx.open_window( @@ -105,3 +107,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/image/image.rs b/crates/gpui/examples/image/image.rs index 10e40e65320c677a36f3a3027528db044b09e63e..832cdf896a80e84c3ca8b591e0a0956af2cedcac 100644 --- a/crates/gpui/examples/image/image.rs +++ b/crates/gpui/examples/image/image.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use std::fs; use std::path::PathBuf; use std::sync::Arc; @@ -8,7 +10,7 @@ use gpui::{ SharedString, SharedUri, TitlebarOptions, Window, WindowBounds, WindowOptions, actions, div, img, prelude::*, px, rgb, size, }; -use gpui_platform::application; +#[cfg(not(target_family = "wasm"))] use reqwest_client::ReqwestClient; struct Assets { @@ -146,50 +148,77 @@ impl Render for ImageShowcase { actions!(image, [Quit]); -fn main() { - env_logger::init(); - +fn run_example() { let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - application() - .with_assets(Assets { - base: manifest_dir.join("examples"), - }) - .run(move |cx: &mut App| { + #[cfg(not(target_family = "wasm"))] + let app = gpui_platform::application(); + #[cfg(target_family = "wasm")] + let app = gpui_platform::application(); + app.with_assets(Assets { + base: manifest_dir.join("examples"), + }) + .run(move |cx: &mut App| { + #[cfg(not(target_family = "wasm"))] + { let http_client = ReqwestClient::user_agent("gpui example").unwrap(); cx.set_http_client(Arc::new(http_client)); - - cx.activate(true); - cx.on_action(|_: &Quit, cx| cx.quit()); - cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]); - cx.set_menus(vec![Menu { - name: "Image".into(), - items: vec![MenuItem::action("Quit", Quit)], - }]); - - let window_options = WindowOptions { - titlebar: Some(TitlebarOptions { - title: Some(SharedString::from("Image Example")), - appears_transparent: false, - ..Default::default() - }), - - window_bounds: Some(WindowBounds::Windowed(Bounds { - size: size(px(1100.), px(600.)), - origin: Point::new(px(200.), px(200.)), - })), - - ..Default::default() + } + #[cfg(target_family = "wasm")] + { + // Safety: the web examples run single-threaded; the client is + // created and used exclusively on the main thread. + let http_client = unsafe { + gpui_web::FetchHttpClient::with_user_agent("gpui example") + .expect("failed to create FetchHttpClient") }; + cx.set_http_client(Arc::new(http_client)); + } - cx.open_window(window_options, |_, cx| { - cx.new(|_| ImageShowcase { - // Relative path to your root project path - local_resource: manifest_dir.join("examples/image/app-icon.png").into(), - remote_resource: "https://picsum.photos/800/400".into(), - asset_resource: "image/color.svg".into(), - }) + cx.activate(true); + cx.on_action(|_: &Quit, cx| cx.quit()); + cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]); + cx.set_menus(vec![Menu { + name: "Image".into(), + items: vec![MenuItem::action("Quit", Quit)], + }]); + + let window_options = WindowOptions { + titlebar: Some(TitlebarOptions { + title: Some(SharedString::from("Image Example")), + appears_transparent: false, + ..Default::default() + }), + + window_bounds: Some(WindowBounds::Windowed(Bounds { + size: size(px(1100.), px(600.)), + origin: Point::new(px(200.), px(200.)), + })), + + ..Default::default() + }; + + cx.open_window(window_options, |_, cx| { + cx.new(|_| ImageShowcase { + // Relative path to your root project path + local_resource: manifest_dir.join("examples/image/app-icon.png").into(), + remote_resource: "https://picsum.photos/800/400".into(), + asset_resource: "image/color.svg".into(), }) - .unwrap(); - }); + }) + .unwrap(); + }); +} + +#[cfg(not(target_family = "wasm"))] +fn main() { + env_logger::init(); + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); } diff --git a/crates/gpui/examples/image_gallery.rs b/crates/gpui/examples/image_gallery.rs index 881ef5307ffebeba60daab30fe098b2f5a6cabb6..9d8ac29ff8c9762417ff59acbfc83db6ad9c8346 100644 --- a/crates/gpui/examples/image_gallery.rs +++ b/crates/gpui/examples/image_gallery.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use futures::FutureExt; use gpui::{ App, AppContext, Asset as _, AssetLogger, Bounds, ClickEvent, Context, ElementId, Entity, @@ -5,7 +7,7 @@ use gpui::{ RetainAllImageCache, SharedString, TitlebarOptions, Window, WindowBounds, WindowOptions, actions, div, hash, image_cache, img, prelude::*, px, rgb, size, }; -use gpui_platform::application; +#[cfg(not(target_family = "wasm"))] use reqwest_client::ReqwestClient; use std::{collections::HashMap, sync::Arc}; @@ -245,12 +247,28 @@ impl ImageCache for SimpleLruCache { actions!(image, [Quit]); -fn main() { - env_logger::init(); +fn run_example() { + #[cfg(not(target_family = "wasm"))] + let app = gpui_platform::application(); + #[cfg(target_family = "wasm")] + let app = gpui_platform::single_threaded_web(); - application().run(move |cx: &mut App| { - let http_client = ReqwestClient::user_agent("gpui example").unwrap(); - cx.set_http_client(Arc::new(http_client)); + app.run(move |cx: &mut App| { + #[cfg(not(target_family = "wasm"))] + { + let http_client = ReqwestClient::user_agent("gpui example").unwrap(); + cx.set_http_client(Arc::new(http_client)); + } + #[cfg(target_family = "wasm")] + { + // Safety: the web examples run single-threaded; the client is + // created and used exclusively on the main thread. + let http_client = unsafe { + gpui_web::FetchHttpClient::with_user_agent("gpui example") + .expect("failed to create FetchHttpClient") + }; + cx.set_http_client(Arc::new(http_client)); + } cx.activate(true); cx.on_action(|_: &Quit, cx| cx.quit()); @@ -287,3 +305,16 @@ fn main() { .unwrap(); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + env_logger::init(); + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/image_loading.rs b/crates/gpui/examples/image_loading.rs index 2de18fd7576ee91b3d54479ada909e04aa49475e..c2aab95f12a8736b28bf2fb2e6bab0d538ea27fd 100644 --- a/crates/gpui/examples/image_loading.rs +++ b/crates/gpui/examples/image_loading.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use std::{path::Path, sync::Arc, time::Duration}; use gpui::{ @@ -192,8 +194,7 @@ impl Render for ImageLoadingExample { } } -fn main() { - env_logger::init(); +fn run_example() { application().with_assets(Assets {}).run(|cx: &mut App| { let options = WindowOptions { window_bounds: Some(WindowBounds::Windowed(Bounds::centered( @@ -210,3 +211,16 @@ fn main() { .unwrap(); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + env_logger::init(); + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/input.rs b/crates/gpui/examples/input.rs index 1f8a9806ebee1f69973e4a54a5746aabda6f3f0c..d15d791cd008883506389cc7bb16dbad765969c0 100644 --- a/crates/gpui/examples/input.rs +++ b/crates/gpui/examples/input.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use std::ops::Range; use gpui::{ @@ -682,7 +684,7 @@ impl Render for InputExample { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(300.0), px(300.0)), cx); cx.bind_keys([ @@ -752,3 +754,15 @@ fn main() { cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/layer_shell.rs b/crates/gpui/examples/layer_shell.rs index 49958711318ef70dc4e0e89dbc096f5f8761dc41..1437b05b5e91ab2db06b2f2ea4f36c8b35dd4739 100644 --- a/crates/gpui/examples/layer_shell.rs +++ b/crates/gpui/examples/layer_shell.rs @@ -1,4 +1,6 @@ -fn main() { +#![cfg_attr(target_family = "wasm", no_main)] + +fn run_example() { #[cfg(all(target_os = "linux", feature = "wayland"))] example::main(); @@ -6,6 +8,18 @@ fn main() { panic!("This example requires the `wayland` feature and a linux system."); } +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} + #[cfg(all(target_os = "linux", feature = "wayland"))] mod example { use std::time::{Duration, SystemTime, UNIX_EPOCH}; diff --git a/crates/gpui/examples/mouse_pressure.rs b/crates/gpui/examples/mouse_pressure.rs index 1d0fe01b820caaed115d8b1d8baa46fa48266f64..24c3906f61e8fc8e78bd2b16b59fa4fc0db98063 100644 --- a/crates/gpui/examples/mouse_pressure.rs +++ b/crates/gpui/examples/mouse_pressure.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, Context, MousePressureEvent, PressureStage, Window, WindowBounds, WindowOptions, div, prelude::*, px, rgb, size, @@ -44,7 +46,7 @@ impl MousePressureExample { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx); @@ -65,3 +67,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/on_window_close_quit.rs b/crates/gpui/examples/on_window_close_quit.rs index 6aa0887db5efea6cf093dc2fa1c4e8f6bd4fb908..e71a142d991c87ccbccb9c078fdb50d1fa3dba49 100644 --- a/crates/gpui/examples/on_window_close_quit.rs +++ b/crates/gpui/examples/on_window_close_quit.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, Context, FocusHandle, KeyBinding, Window, WindowBounds, WindowOptions, actions, div, prelude::*, px, rgb, size, @@ -35,7 +37,7 @@ impl Render for ExampleWindow { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let mut bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx); @@ -81,3 +83,15 @@ fn main() { .unwrap(); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/opacity.rs b/crates/gpui/examples/opacity.rs index 31094f49343074e6494250ba08b3062daea6b7f7..ba61d0f5daca2eacb382324544dff95570824368 100644 --- a/crates/gpui/examples/opacity.rs +++ b/crates/gpui/examples/opacity.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use std::{fs, path::PathBuf}; use anyhow::Result; @@ -156,7 +158,7 @@ impl Render for HelloWorld { } } -fn main() { +fn run_example() { application() .with_assets(Assets { base: PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("examples"), @@ -174,3 +176,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/ownership_post.rs b/crates/gpui/examples/ownership_post.rs index ef9143f0c0685424738be54f56c7bd64af8a8f56..a4421b970bc8703ac97de7422e1b417b7f12ef3a 100644 --- a/crates/gpui/examples/ownership_post.rs +++ b/crates/gpui/examples/ownership_post.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{App, Context, Entity, EventEmitter, prelude::*}; use gpui_platform::application; @@ -11,7 +13,7 @@ struct Change { impl EventEmitter for Counter {} -fn main() { +fn run_example() { application().run(|cx: &mut App| { let counter: Entity = cx.new(|_cx| Counter { count: 0 }); let subscriber = cx.new(|cx: &mut Context| { @@ -34,3 +36,15 @@ fn main() { assert_eq!(subscriber.read(cx).count, 4); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/painting.rs b/crates/gpui/examples/painting.rs index fa73a38136d287f357eb8b44bea732ad84185a25..18ef6b9fa3741297ddfebc1b5df3ea4a3594fc05 100644 --- a/crates/gpui/examples/painting.rs +++ b/crates/gpui/examples/painting.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ Background, Bounds, ColorSpace, Context, MouseDownEvent, Path, PathBuilder, PathStyle, Pixels, Point, Render, StrokeOptions, Window, WindowOptions, canvas, div, linear_color_stop, @@ -445,7 +447,7 @@ impl Render for PaintingViewer { } } -fn main() { +fn run_example() { application().run(|cx| { cx.open_window( WindowOptions { @@ -462,3 +464,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/paths_bench.rs b/crates/gpui/examples/paths_bench.rs index 17f80b0ff470901af4da1213e9ed12ad1585673d..4e12f1e50ab53d69117a72a231aef1eb5c39fa2e 100644 --- a/crates/gpui/examples/paths_bench.rs +++ b/crates/gpui/examples/paths_bench.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ Background, Bounds, ColorSpace, Context, Path, PathBuilder, Pixels, Render, TitlebarOptions, Window, WindowBounds, WindowOptions, canvas, div, linear_color_stop, linear_gradient, point, @@ -69,7 +71,7 @@ impl Render for PaintingViewer { } } -fn main() { +fn run_example() { application().run(|cx| { cx.open_window( WindowOptions { @@ -91,3 +93,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/pattern.rs b/crates/gpui/examples/pattern.rs index bc9237268d70157a415a8819984db7d96e477e5b..3113d39d808ea01675b5cf5e8be976572aeaad8d 100644 --- a/crates/gpui/examples/pattern.rs +++ b/crates/gpui/examples/pattern.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, AppContext, Bounds, Context, Window, WindowBounds, WindowOptions, div, linear_color_stop, linear_gradient, pattern_slash, prelude::*, px, rgb, size, @@ -99,7 +101,7 @@ impl Render for PatternExample { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(600.0), px(600.0)), cx); cx.open_window( @@ -114,3 +116,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/popover.rs b/crates/gpui/examples/popover.rs index 429eb17c0629938dcbd9fb21698ab887503fd51a..bd112b0e69a62c1303e9d90945e24cfb3f659b82 100644 --- a/crates/gpui/examples/popover.rs +++ b/crates/gpui/examples/popover.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Context, Corner, Div, Hsla, Stateful, Window, WindowOptions, anchored, deferred, div, prelude::*, px, @@ -161,7 +163,7 @@ impl Render for HelloWorld { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { cx.open_window(WindowOptions::default(), |_, cx| { cx.new(|_| HelloWorld { @@ -173,3 +175,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/scrollable.rs b/crates/gpui/examples/scrollable.rs index 6e4865ee496366da69494334152b703a509780d3..39864c834aedae414191afb61bba27d98696d7dd 100644 --- a/crates/gpui/examples/scrollable.rs +++ b/crates/gpui/examples/scrollable.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{App, Bounds, Context, Window, WindowBounds, WindowOptions, div, prelude::*, px, size}; use gpui_platform::application; @@ -42,7 +44,7 @@ impl Render for Scrollable { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(500.), px(500.0)), cx); cx.open_window( @@ -56,3 +58,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/set_menus.rs b/crates/gpui/examples/set_menus.rs index 30f8ef0f32a0d8c56bafeb71faa1c2435ef9fff3..683793c35fd4d356c068a3c36b041fba1dbc5ecf 100644 --- a/crates/gpui/examples/set_menus.rs +++ b/crates/gpui/examples/set_menus.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Context, Global, Menu, MenuItem, SharedString, SystemMenuType, Window, WindowOptions, actions, div, prelude::*, rgb, @@ -20,7 +22,7 @@ impl Render for SetMenus { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { cx.set_global(AppState::new()); @@ -36,6 +38,18 @@ fn main() { }); } +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} + #[derive(PartialEq)] enum ViewMode { List, diff --git a/crates/gpui/examples/shadow.rs b/crates/gpui/examples/shadow.rs index 519053ae9293d51df86ba14b66e1182d718035a0..d39a2eb62ed74bfdbbef733bf5154184909911cf 100644 --- a/crates/gpui/examples/shadow.rs +++ b/crates/gpui/examples/shadow.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, BoxShadow, Context, Div, SharedString, Window, WindowBounds, WindowOptions, div, hsla, point, prelude::*, px, relative, rgb, size, @@ -569,7 +571,7 @@ impl Render for Shadow { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(1000.0), px(800.0)), cx); cx.open_window( @@ -584,3 +586,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/svg/svg.rs b/crates/gpui/examples/svg/svg.rs index 54e99320bd59a9d8fffeea65c7825105781d9226..e9d234167777a94784207601b01a9f1befb76ead 100644 --- a/crates/gpui/examples/svg/svg.rs +++ b/crates/gpui/examples/svg/svg.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use std::fs; use std::path::PathBuf; @@ -68,7 +70,7 @@ impl Render for SvgExample { } } -fn main() { +fn run_example() { application() .with_assets(Assets { base: PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("examples"), @@ -86,3 +88,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/tab_stop.rs b/crates/gpui/examples/tab_stop.rs index 6fa0ee4929db62b2122748729686d955f96932bb..3fec59ad9e6a53b067b1aa4bf9894f986c2e9b27 100644 --- a/crates/gpui/examples/tab_stop.rs +++ b/crates/gpui/examples/tab_stop.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, Context, Div, ElementId, FocusHandle, KeyBinding, SharedString, Stateful, Window, WindowBounds, WindowOptions, actions, div, prelude::*, px, size, @@ -178,7 +180,7 @@ impl Render for Example { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { cx.bind_keys([ KeyBinding::new("tab", Tab, None), @@ -198,3 +200,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/testing.rs b/crates/gpui/examples/testing.rs index a3d09d1395a165635cb785a910009c8e96401f2f..f6e15791d63aa1a3dad0b74f51a2c7a9cce4b3f3 100644 --- a/crates/gpui/examples/testing.rs +++ b/crates/gpui/examples/testing.rs @@ -1,3 +1,4 @@ +#![cfg_attr(target_family = "wasm", no_main)] //! Example demonstrating GPUI's testing infrastructure. //! //! When run normally, this displays an interactive counter window. @@ -176,7 +177,7 @@ impl Render for Counter { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { cx.bind_keys([ gpui::KeyBinding::new("up", Increment, Some("Counter")), @@ -199,6 +200,18 @@ fn main() { }); } +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/gpui/examples/text.rs b/crates/gpui/examples/text.rs index d4effbbce91cc6e8261a5ec44d196a1868a772a6..acaf4fe83a49726e0a3c641ca577bf75c54e224d 100644 --- a/crates/gpui/examples/text.rs +++ b/crates/gpui/examples/text.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use std::{ ops::{Deref, DerefMut}, sync::Arc, @@ -298,7 +300,7 @@ impl Render for TextExample { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { cx.set_menus(vec![Menu { name: "GPUI Typography".into(), @@ -332,3 +334,15 @@ fn main() { .unwrap(); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/text_layout.rs b/crates/gpui/examples/text_layout.rs index 5c9ef368b8406e0882ab159f82c3eecba52a19da..4bb930e052875b044f10f72fffc5c3656bb9645f 100644 --- a/crates/gpui/examples/text_layout.rs +++ b/crates/gpui/examples/text_layout.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, Context, FontStyle, FontWeight, StyledText, Window, WindowBounds, WindowOptions, div, prelude::*, px, size, @@ -81,7 +83,7 @@ impl Render for HelloWorld { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(800.0), px(600.0)), cx); cx.open_window( @@ -95,3 +97,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/text_wrapper.rs b/crates/gpui/examples/text_wrapper.rs index cfc981e9e2b0bff6a63ecc78125292d6ff43ce48..3750c3e32b524991cf5544bdd39421961ce660bb 100644 --- a/crates/gpui/examples/text_wrapper.rs +++ b/crates/gpui/examples/text_wrapper.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, Context, TextOverflow, Window, WindowBounds, WindowOptions, div, prelude::*, px, size, @@ -108,7 +110,7 @@ impl Render for HelloWorld { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(800.0), px(600.0)), cx); cx.open_window( @@ -122,3 +124,15 @@ fn main() { cx.activate(true); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/tree.rs b/crates/gpui/examples/tree.rs index 43607b6648f3a7894f90a3c42ab0af8d8663790c..9c4ea2cd127e169c85dac1a0957f1237c08e6817 100644 --- a/crates/gpui/examples/tree.rs +++ b/crates/gpui/examples/tree.rs @@ -1,3 +1,4 @@ +#![cfg_attr(target_family = "wasm", no_main)] //! Renders a div with deep children hierarchy. This example is useful to exemplify that Zed can //! handle deep hierarchies (even though it cannot just yet!). use std::sync::LazyLock; @@ -29,7 +30,7 @@ impl Render for Tree { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(300.0), px(300.0)), cx); cx.open_window( @@ -42,3 +43,15 @@ fn main() { .unwrap(); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/uniform_list.rs b/crates/gpui/examples/uniform_list.rs index c287cdfb45568d32d939881df5b7e289c4a41727..fabcde5c4bca50a2aae09f58b66c6e3297aab1b0 100644 --- a/crates/gpui/examples/uniform_list.rs +++ b/crates/gpui/examples/uniform_list.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, Context, Window, WindowBounds, WindowOptions, div, prelude::*, px, rgb, size, uniform_list, @@ -36,7 +38,7 @@ impl Render for UniformListExample { } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(300.0), px(300.0)), cx); cx.open_window( @@ -49,3 +51,15 @@ fn main() { .unwrap(); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/window.rs b/crates/gpui/examples/window.rs index 80d4f46ac09d8adb483f909f00bd88fd97f1f990..c51f43fe66deff0daf3de9a8442b46b7a5d8a6e3 100644 --- a/crates/gpui/examples/window.rs +++ b/crates/gpui/examples/window.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, Context, KeyBinding, PromptButton, PromptLevel, Window, WindowBounds, WindowKind, WindowOptions, actions, div, prelude::*, px, rgb, size, @@ -306,7 +308,7 @@ impl Render for WindowDemo { actions!(window, [Quit]); -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(800.0), px(600.0)), cx); @@ -333,3 +335,15 @@ fn main() { cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/window_positioning.rs b/crates/gpui/examples/window_positioning.rs index 45ac3fcd78fc811dcb450609c293d81900d3c67b..036a2fcdba750c0e35ea7b05fa3822f7a1c1b0db 100644 --- a/crates/gpui/examples/window_positioning.rs +++ b/crates/gpui/examples/window_positioning.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, Context, DisplayId, Hsla, Pixels, SharedString, Size, Window, WindowBackgroundAppearance, WindowBounds, WindowKind, WindowOptions, div, point, prelude::*, @@ -68,7 +70,7 @@ fn build_window_options(display_id: DisplayId, bounds: Bounds) -> Window } } -fn main() { +fn run_example() { application().run(|cx: &mut App| { // Create several new windows, positioned in the top right corner of each screen let size = Size { @@ -218,3 +220,15 @@ fn main() { } }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/examples/window_shadow.rs b/crates/gpui/examples/window_shadow.rs index c8e37b67e98c38d45608836e5752dee8a575091a..b8c052693da9f3408dd95b39dac4733abe3965c6 100644 --- a/crates/gpui/examples/window_shadow.rs +++ b/crates/gpui/examples/window_shadow.rs @@ -1,3 +1,5 @@ +#![cfg_attr(target_family = "wasm", no_main)] + use gpui::{ App, Bounds, Context, CursorStyle, Decorations, HitboxBehavior, Hsla, MouseButton, Pixels, Point, ResizeEdge, Size, Window, WindowBackgroundAppearance, WindowBounds, WindowDecorations, @@ -203,7 +205,7 @@ fn resize_edge(pos: Point, shadow_size: Pixels, size: Size) -> O Some(edge) } -fn main() { +fn run_example() { application().run(|cx: &mut App| { let bounds = Bounds::centered(None, size(px(600.0), px(600.0)), cx); cx.open_window( @@ -226,3 +228,15 @@ fn main() { .unwrap(); }); } + +#[cfg(not(target_family = "wasm"))] +fn main() { + run_example(); +} + +#[cfg(target_family = "wasm")] +#[wasm_bindgen::prelude::wasm_bindgen(start)] +pub fn start() { + gpui_platform::web_init(); + run_example(); +} diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 2a83045aa0f9b776eb247f40ba39312f2cd15d4a..f1fe264f4ef4ccb09081a6672c7c4ddb1d24dc97 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -1,3 +1,4 @@ +use scheduler::Instant; use std::{ any::{TypeId, type_name}, cell::{BorrowMutError, Cell, Ref, RefCell, RefMut}, @@ -7,7 +8,7 @@ use std::{ path::{Path, PathBuf}, rc::{Rc, Weak}, sync::{Arc, atomic::Ordering::SeqCst}, - time::{Duration, Instant}, + time::Duration, }; use anyhow::{Context as _, Result, anyhow}; @@ -25,11 +26,11 @@ pub use async_context::*; use collections::{FxHashMap, FxHashSet, HashMap, VecDeque}; pub use context::*; pub use entity_map::*; +use gpui_util::{ResultExt, debug_panic}; use http_client::{HttpClient, Url}; use smallvec::SmallVec; #[cfg(any(test, feature = "test-support"))] pub use test_context::*; -use util::{ResultExt, debug_panic}; #[cfg(all(target_os = "macos", any(test, feature = "test-support")))] pub use visual_test_context::*; @@ -752,6 +753,37 @@ impl App { app } + #[doc(hidden)] + pub fn ref_counts_drop_handle(&self) -> impl Sized + use<> { + self.entities.ref_counts_drop_handle() + } + + /// Captures a snapshot of all entities that currently have alive handles. + /// + /// The returned [`LeakDetectorSnapshot`] can later be passed to + /// [`assert_no_new_leaks`](Self::assert_no_new_leaks) to verify that no + /// entities created after the snapshot are still alive. + #[cfg(any(test, feature = "leak-detection"))] + pub fn leak_detector_snapshot(&self) -> LeakDetectorSnapshot { + self.entities.leak_detector_snapshot() + } + + /// Asserts that no entities created after `snapshot` still have alive handles. + /// + /// Entities that were already tracked at the time of the snapshot are ignored, + /// even if they still have handles. Only *new* entities (those whose + /// `EntityId` was not present in the snapshot) are considered leaks. + /// + /// # Panics + /// + /// Panics if any new entity handles exist. The panic message lists every + /// leaked entity with its type name, and includes allocation-site backtraces + /// when `LEAK_BACKTRACE` is set. + #[cfg(any(test, feature = "leak-detection"))] + pub fn assert_no_new_leaks(&self, snapshot: &LeakDetectorSnapshot) { + self.entities.assert_no_new_leaks(snapshot) + } + /// Quit the application gracefully. Handlers registered with [`Context::on_app_quit`] /// will be given 100ms to complete before exiting. pub fn shutdown(&mut self) { diff --git a/crates/gpui/src/app/async_context.rs b/crates/gpui/src/app/async_context.rs index 6b9f572fc880b35f719b6a064f0904cfa12153d9..e2fd203c78364a4d096f9792dcea7e6f7b8113ea 100644 --- a/crates/gpui/src/app/async_context.rs +++ b/crates/gpui/src/app/async_context.rs @@ -4,10 +4,10 @@ use crate::{ PromptLevel, Render, Reservation, Result, Subscription, Task, VisualContext, Window, WindowHandle, }; -use anyhow::Context as _; +use anyhow::{Context as _, bail}; use derive_more::{Deref, DerefMut}; use futures::channel::oneshot; -use smol::future::FutureExt; +use futures::future::FutureExt; use std::{future::Future, rc::Weak}; use super::{Context, WeakEntity}; @@ -88,6 +88,9 @@ impl AppContext for AsyncApp { { let app = self.app.upgrade().context("app was released")?; let mut lock = app.try_borrow_mut()?; + if lock.quitting { + bail!("app is quitting"); + } lock.update_window(window, f) } @@ -101,6 +104,9 @@ impl AppContext for AsyncApp { { let app = self.app.upgrade().context("app was released")?; let lock = app.borrow(); + if lock.quitting { + bail!("app is quitting"); + } lock.read_window(window, read) } @@ -174,6 +180,9 @@ impl AsyncApp { { let app = self.app(); let mut lock = app.borrow_mut(); + if lock.quitting { + bail!("app is quitting"); + } lock.open_window(options, build_root_view) } @@ -211,6 +220,9 @@ impl AsyncApp { pub fn try_read_global(&self, read: impl FnOnce(&G, &App) -> R) -> Option { let app = self.app(); let app = app.borrow_mut(); + if app.quitting { + return None; + } Some(read(app.try_global()?, &app)) } @@ -241,10 +253,10 @@ impl AsyncApp { &self, entity: &WeakEntity, f: Callback, - ) -> util::Deferred> { + ) -> gpui_util::Deferred> { let entity = entity.clone(); let mut cx = self.clone(); - util::defer(move || { + gpui_util::defer(move || { entity.update(&mut cx, f).ok(); }) } diff --git a/crates/gpui/src/app/context.rs b/crates/gpui/src/app/context.rs index 28d30ab37e7d7b502afc3f471416f2589380ce85..c30a76bd9c8861d4d5b4d9dc4b5893ffeb2eb4b8 100644 --- a/crates/gpui/src/app/context.rs +++ b/crates/gpui/src/app/context.rs @@ -5,6 +5,7 @@ use crate::{ }; use anyhow::Result; use futures::FutureExt; +use gpui_util::Deferred; use std::{ any::{Any, TypeId}, borrow::{Borrow, BorrowMut}, @@ -12,7 +13,6 @@ use std::{ ops, sync::Arc, }; -use util::Deferred; use super::{App, AsyncWindowContext, Entity, KeystrokeEvent}; @@ -278,7 +278,7 @@ impl<'a, T: 'static> Context<'a, T> { ) -> Deferred { let this = self.weak_entity(); let mut cx = self.to_async(); - util::defer(move || { + gpui_util::defer(move || { this.update(&mut cx, f).ok(); }) } diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index 2df01a1b6e05434f0b9a6822f12b0038efc5c10a..c12f952cc82ae8c161c5263ea47533bdef55e5e5 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -83,6 +83,32 @@ impl EntityMap { } } + #[doc(hidden)] + pub fn ref_counts_drop_handle(&self) -> impl Sized + use<> { + self.ref_counts.clone() + } + + /// Captures a snapshot of all entities that currently have alive handles. + /// + /// The returned [`LeakDetectorSnapshot`] can later be passed to + /// [`assert_no_new_leaks`](Self::assert_no_new_leaks) to verify that no + /// entities created after the snapshot are still alive. + #[cfg(any(test, feature = "leak-detection"))] + pub fn leak_detector_snapshot(&self) -> LeakDetectorSnapshot { + self.ref_counts.read().leak_detector.snapshot() + } + + /// Asserts that no entities created after `snapshot` still have alive handles. + /// + /// See [`LeakDetector::assert_no_new_leaks`] for details. + #[cfg(any(test, feature = "leak-detection"))] + pub fn assert_no_new_leaks(&self, snapshot: &LeakDetectorSnapshot) { + self.ref_counts + .read() + .leak_detector + .assert_no_new_leaks(snapshot) + } + /// Reserve a slot for an entity, which you can subsequently use with `insert`. pub fn reserve(&self) -> Slot { let id = self.ref_counts.write().counts.insert(1.into()); @@ -225,7 +251,12 @@ pub struct AnyEntity { } impl AnyEntity { - fn new(id: EntityId, entity_type: TypeId, entity_map: Weak>) -> Self { + fn new( + id: EntityId, + entity_type: TypeId, + entity_map: Weak>, + #[cfg(any(test, feature = "leak-detection"))] type_name: &'static str, + ) -> Self { Self { entity_id: id, entity_type, @@ -236,7 +267,7 @@ impl AnyEntity { .unwrap() .write() .leak_detector - .handle_created(id), + .handle_created(id, Some(type_name)), entity_map, } } @@ -299,7 +330,7 @@ impl Clone for AnyEntity { .unwrap() .write() .leak_detector - .handle_created(self.entity_id), + .handle_created(self.entity_id, None), } } } @@ -395,7 +426,13 @@ impl Entity { T: 'static, { Self { - any_entity: AnyEntity::new(id, TypeId::of::(), entity_map), + any_entity: AnyEntity::new( + id, + TypeId::of::(), + entity_map, + #[cfg(any(test, feature = "leak-detection"))] + std::any::type_name::(), + ), entity_type: PhantomData, } } @@ -574,7 +611,7 @@ impl AnyWeakEntity { .unwrap() .write() .leak_detector - .handle_created(self.entity_id), + .handle_created(self.entity_id, None), }) } @@ -892,7 +929,23 @@ pub(crate) struct HandleId { #[cfg(any(test, feature = "leak-detection"))] pub(crate) struct LeakDetector { next_handle_id: u64, - entity_handles: HashMap>>, + entity_handles: HashMap, +} + +/// A snapshot of the set of alive entities at a point in time. +/// +/// Created by [`LeakDetector::snapshot`]. Can later be passed to +/// [`LeakDetector::assert_no_new_leaks`] to verify that no new entity +/// handles remain between the snapshot and the current state. +#[cfg(any(test, feature = "leak-detection"))] +pub struct LeakDetectorSnapshot { + entity_ids: collections::HashSet, +} + +#[cfg(any(test, feature = "leak-detection"))] +struct EntityLeakData { + handles: HashMap>, + type_name: &'static str, } #[cfg(any(test, feature = "leak-detection"))] @@ -903,11 +956,21 @@ impl LeakDetector { /// the handle is dropped. If `LEAK_BACKTRACE` is set, captures a backtrace /// at the allocation site. #[track_caller] - pub fn handle_created(&mut self, entity_id: EntityId) -> HandleId { - let id = util::post_inc(&mut self.next_handle_id); + pub fn handle_created( + &mut self, + entity_id: EntityId, + type_name: Option<&'static str>, + ) -> HandleId { + let id = gpui_util::post_inc(&mut self.next_handle_id); let handle_id = HandleId { id }; - let handles = self.entity_handles.entry(entity_id).or_default(); - handles.insert( + let handles = self + .entity_handles + .entry(entity_id) + .or_insert_with(|| EntityLeakData { + handles: HashMap::default(), + type_name: type_name.unwrap_or(""), + }); + handles.handles.insert( handle_id, LEAK_BACKTRACE.then(backtrace::Backtrace::new_unresolved), ); @@ -919,8 +982,14 @@ impl LeakDetector { /// This removes the handle from tracking. The `handle_id` should be the same /// one returned by `handle_created` when the handle was allocated. pub fn handle_released(&mut self, entity_id: EntityId, handle_id: HandleId) { - let handles = self.entity_handles.entry(entity_id).or_default(); - handles.remove(&handle_id); + if let std::collections::hash_map::Entry::Occupied(mut data) = + self.entity_handles.entry(entity_id) + { + data.get_mut().handles.remove(&handle_id); + if data.get().handles.is_empty() { + data.remove(); + } + } } /// Asserts that all handles to the given entity have been released. @@ -932,11 +1001,10 @@ impl LeakDetector { /// otherwise it suggests setting the environment variable to get more info. pub fn assert_released(&mut self, entity_id: EntityId) { use std::fmt::Write as _; - let handles = self.entity_handles.entry(entity_id).or_default(); - if !handles.is_empty() { + if let Some(data) = self.entity_handles.remove(&entity_id) { let mut out = String::new(); - for backtrace in handles.values_mut() { - if let Some(mut backtrace) = backtrace.take() { + for (_, backtrace) in data.handles { + if let Some(mut backtrace) = backtrace { backtrace.resolve(); writeln!(out, "Leaked handle:\n{:?}", backtrace).unwrap(); } else { @@ -950,6 +1018,96 @@ impl LeakDetector { panic!("{out}"); } } + + /// Captures a snapshot of all entity IDs that currently have alive handles. + /// + /// The returned [`LeakDetectorSnapshot`] can later be passed to + /// [`assert_no_new_leaks`](Self::assert_no_new_leaks) to verify that no + /// entities created after the snapshot are still alive. + pub fn snapshot(&self) -> LeakDetectorSnapshot { + LeakDetectorSnapshot { + entity_ids: self.entity_handles.keys().copied().collect(), + } + } + + /// Asserts that no entities created after `snapshot` still have alive handles. + /// + /// Entities that were already tracked at the time of the snapshot are ignored, + /// even if they still have handles. Only *new* entities (those whose + /// `EntityId` was not present in the snapshot) are considered leaks. + /// + /// # Panics + /// + /// Panics if any new entity handles exist. The panic message lists every + /// leaked entity with its type name, and includes allocation-site backtraces + /// when `LEAK_BACKTRACE` is set. + pub fn assert_no_new_leaks(&self, snapshot: &LeakDetectorSnapshot) { + use std::fmt::Write as _; + + let mut out = String::new(); + for (entity_id, data) in &self.entity_handles { + if snapshot.entity_ids.contains(entity_id) { + continue; + } + for (_, backtrace) in &data.handles { + if let Some(backtrace) = backtrace { + let mut backtrace = backtrace.clone(); + backtrace.resolve(); + writeln!( + out, + "Leaked handle for entity {} ({entity_id:?}):\n{:?}", + data.type_name, backtrace + ) + .unwrap(); + } else { + writeln!( + out, + "Leaked handle for entity {} ({entity_id:?}): (export LEAK_BACKTRACE to find allocation site)", + data.type_name + ) + .unwrap(); + } + } + } + + if !out.is_empty() { + panic!("New entity leaks detected since snapshot:\n{out}"); + } + } +} + +#[cfg(any(test, feature = "leak-detection"))] +impl Drop for LeakDetector { + fn drop(&mut self) { + use std::fmt::Write; + + if self.entity_handles.is_empty() || std::thread::panicking() { + return; + } + + let mut out = String::new(); + for (entity_id, data) in self.entity_handles.drain() { + for (_handle, backtrace) in data.handles { + if let Some(mut backtrace) = backtrace { + backtrace.resolve(); + writeln!( + out, + "Leaked handle for entity {} ({entity_id:?}):\n{:?}", + data.type_name, backtrace + ) + .unwrap(); + } else { + writeln!( + out, + "Leaked handle for entity {} ({entity_id:?}): (export LEAK_BACKTRACE to find allocation site)", + data.type_name + ) + .unwrap(); + } + } + } + panic!("Exited with leaked handles:\n{out}"); + } } #[cfg(test)] @@ -1007,4 +1165,42 @@ mod test { vec![1], ); } + + #[test] + fn test_leak_detector_snapshot_no_leaks() { + let mut entity_map = EntityMap::new(); + + let slot = entity_map.reserve::(); + let pre_existing = entity_map.insert(slot, TestEntity { i: 1 }); + + let snapshot = entity_map.leak_detector_snapshot(); + + let slot = entity_map.reserve::(); + let temporary = entity_map.insert(slot, TestEntity { i: 2 }); + drop(temporary); + + entity_map.assert_no_new_leaks(&snapshot); + + drop(pre_existing); + } + + #[test] + #[should_panic(expected = "New entity leaks detected since snapshot")] + fn test_leak_detector_snapshot_detects_new_leak() { + let mut entity_map = EntityMap::new(); + + let slot = entity_map.reserve::(); + let pre_existing = entity_map.insert(slot, TestEntity { i: 1 }); + + let snapshot = entity_map.leak_detector_snapshot(); + + let slot = entity_map.reserve::(); + let leaked = entity_map.insert(slot, TestEntity { i: 2 }); + + // `leaked` is still alive, so this should panic. + entity_map.assert_no_new_leaks(&snapshot); + + drop(pre_existing); + drop(leaked); + } } diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index 8077632778e2f42af8790a0823a21d4d62efe6e5..0f0f0e14fbd8565d8f948579ed1ab23381c80108 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -5,7 +5,7 @@ use crate::{ ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Platform, Point, Render, Result, Size, Task, TestDispatcher, TestPlatform, TestScreenCaptureSource, TestWindow, TextSystem, VisualContext, Window, WindowBounds, - WindowHandle, WindowOptions, app::GpuiMode, + WindowHandle, WindowOptions, app::GpuiMode, window::ElementArenaScope, }; use anyhow::{anyhow, bail}; use futures::{Stream, StreamExt, channel::oneshot}; @@ -18,18 +18,17 @@ use std::{ /// an implementation of `Context` with additional methods that are useful in tests. #[derive(Clone)] pub struct TestAppContext { - #[doc(hidden)] - pub app: Rc, #[doc(hidden)] pub background_executor: BackgroundExecutor, #[doc(hidden)] pub foreground_executor: ForegroundExecutor, - #[doc(hidden)] - pub dispatcher: TestDispatcher, + dispatcher: TestDispatcher, test_platform: Rc, text_system: Arc, fn_name: Option<&'static str>, on_quit: Rc>>>, + #[doc(hidden)] + pub app: Rc, } impl AppContext for TestAppContext { @@ -402,8 +401,8 @@ impl TestAppContext { } /// Wait until there are no more pending tasks. - pub fn run_until_parked(&mut self) { - self.background_executor.run_until_parked() + pub fn run_until_parked(&self) { + self.dispatcher.run_until_parked(); } /// Simulate dispatching an action to the currently focused node in the window. @@ -521,22 +520,25 @@ impl TestAppContext { let mut notifications = self.notifications(entity); use futures::FutureExt as _; - use smol::future::FutureExt as _; + use futures_concurrency::future::Race as _; - async { - loop { - if entity.update(self, &mut predicate) { - return Ok(()); - } + ( + async { + loop { + if entity.update(self, &mut predicate) { + return Ok(()); + } - if notifications.next().await.is_none() { - bail!("entity dropped") + if notifications.next().await.is_none() { + bail!("entity dropped") + } } - } - } - .race(timer.map(|_| Err(anyhow!("condition timed out")))) - .await - .unwrap(); + }, + timer.map(|_| Err(anyhow!("condition timed out"))), + ) + .race() + .await + .unwrap(); } /// Set a name for this App. @@ -816,6 +818,8 @@ impl VisualTestContext { E: Element, { self.update(|window, cx| { + let _arena_scope = ElementArenaScope::enter(&cx.element_arena); + window.invalidator.set_phase(DrawPhase::Prepaint); let mut element = Drawable::new(f(window, cx)); element.layout_as_root(space.into(), window, cx); @@ -827,6 +831,9 @@ impl VisualTestContext { window.invalidator.set_phase(DrawPhase::None); window.refresh(); + drop(element); + cx.element_arena.borrow_mut().clear(); + (request_layout_state, prepaint_state) }) } diff --git a/crates/gpui/src/app/visual_test_context.rs b/crates/gpui/src/app/visual_test_context.rs index 22389b5b27566db05be7c462e87596f17def880a..f0fbf47f1f82008c592884b21d6372a9794b8a4a 100644 --- a/crates/gpui/src/app/visual_test_context.rs +++ b/crates/gpui/src/app/visual_test_context.rs @@ -356,7 +356,7 @@ impl VisualTestAppContext { predicate: impl Fn(&T) -> bool, timeout: Duration, ) -> Result<()> { - let start = std::time::Instant::now(); + let start = web_time::Instant::now(); loop { { let app = self.app.borrow(); diff --git a/crates/gpui/src/elements/animation.rs b/crates/gpui/src/elements/animation.rs index e72fb00456d14dec74ffc56e040511c189af1d18..8a42c8bd492469a8952d9dd15e410859cd4e6217 100644 --- a/crates/gpui/src/elements/animation.rs +++ b/crates/gpui/src/elements/animation.rs @@ -1,7 +1,5 @@ -use std::{ - rc::Rc, - time::{Duration, Instant}, -}; +use scheduler::Instant; +use std::{rc::Rc, time::Duration}; use crate::{ AnyElement, App, Element, ElementId, GlobalElementId, InspectorElementId, IntoElement, Window, diff --git a/crates/gpui/src/elements/deferred.rs b/crates/gpui/src/elements/deferred.rs index 9498734198dbe58798867ebe7f20138e5667777b..25245fa4b6ea70284658bf0b91b53ca395b750dd 100644 --- a/crates/gpui/src/elements/deferred.rs +++ b/crates/gpui/src/elements/deferred.rs @@ -62,7 +62,7 @@ impl Element for Deferred { ) { let child = self.child.take().unwrap(); let element_offset = window.element_offset(); - window.defer_draw(child, element_offset, self.priority) + window.defer_draw(child, element_offset, self.priority, None) } fn paint( diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index 4537cfc22f6ed8c4ea3d5443327723207af88620..58f11a7fa1fb876ef4b4ef80fedf1948423a24f5 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -26,6 +26,7 @@ use crate::{ size, }; use collections::HashMap; +use gpui_util::ResultExt; use refineable::Refineable; use smallvec::SmallVec; use stacksafe::{StackSafe, stacksafe}; @@ -40,7 +41,6 @@ use std::{ sync::Arc, time::Duration, }; -use util::ResultExt; use super::ImageCacheProvider; @@ -1886,18 +1886,18 @@ impl Interactivity { // high for the maximum scroll, we round the scroll max to 2 decimal // places here. let padded_content_size = self.content_size + padding_size; - let scroll_max = (padded_content_size - bounds.size) + let scroll_max = Point::from(padded_content_size - bounds.size) .map(round_to_two_decimals) .max(&Default::default()); // Clamp scroll offset in case scroll max is smaller now (e.g., if children // were removed or the bounds became larger). let mut scroll_offset = scroll_offset.borrow_mut(); - scroll_offset.x = scroll_offset.x.clamp(-scroll_max.width, px(0.)); + scroll_offset.x = scroll_offset.x.clamp(-scroll_max.x, px(0.)); if scroll_to_bottom { - scroll_offset.y = -scroll_max.height; + scroll_offset.y = -scroll_max.y; } else { - scroll_offset.y = scroll_offset.y.clamp(-scroll_max.height, px(0.)); + scroll_offset.y = scroll_offset.y.clamp(-scroll_max.y, px(0.)); } if let Some(mut scroll_handle_state) = tracked_scroll_handle { @@ -3285,7 +3285,7 @@ impl ScrollAnchor { struct ScrollHandleState { offset: Rc>>, bounds: Bounds, - max_offset: Size, + max_offset: Point, child_bounds: Vec>, scroll_to_bottom: bool, overflow: Point, @@ -3329,7 +3329,7 @@ impl ScrollHandle { } /// Get the maximum scroll offset. - pub fn max_offset(&self) -> Size { + pub fn max_offset(&self) -> Point { self.0.borrow().max_offset } diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index fcba6a6a4e5b3d82262129bc9f7d9bdc72c88da9..875f9e6dc1cc7d248f9e70488e52480dcca53fa3 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -4,13 +4,15 @@ use crate::{ Interactivity, IntoElement, LayoutId, Length, ObjectFit, Pixels, RenderImage, Resource, SharedString, SharedUri, StyleRefinement, Styled, Task, Window, px, }; -use anyhow::{Context as _, Result}; +use anyhow::Result; -use futures::{AsyncReadExt, Future}; +use futures::Future; +use gpui_util::ResultExt; use image::{ AnimationDecoder, DynamicImage, Frame, ImageError, ImageFormat, Rgba, codecs::{gif::GifDecoder, webp::WebPDecoder}, }; +use scheduler::Instant; use smallvec::SmallVec; use std::{ fs, @@ -19,10 +21,9 @@ use std::{ path::{Path, PathBuf}, str::FromStr, sync::Arc, - time::{Duration, Instant}, + time::Duration, }; use thiserror::Error; -use util::ResultExt; use super::{Stateful, StatefulInteractiveElement}; @@ -49,7 +50,7 @@ pub enum ImageSource { } fn is_uri(uri: &str) -> bool { - http_client::Uri::from_str(uri).is_ok() + url::Url::from_str(uri).is_ok() } impl From for ImageSource { @@ -602,6 +603,9 @@ impl Asset for ImageAssetLoader { let bytes = match source.clone() { Resource::Path(uri) => fs::read(uri.as_ref())?, Resource::Uri(uri) => { + use anyhow::Context as _; + use futures::AsyncReadExt as _; + let mut response = client .get(uri.as_ref(), ().into(), true) .await diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index 5403bf10eb9a078dfd113462644636b49d1840e4..92b5389fecf219c0c113f682463498902df4c07d 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -491,7 +491,7 @@ impl ListState { /// Returns the maximum scroll offset according to the items we have measured. /// This value remains constant while dragging to prevent the scrollbar from moving away unexpectedly. - pub fn max_offset_for_scrollbar(&self) -> Size { + pub fn max_offset_for_scrollbar(&self) -> Point { let state = self.0.borrow(); let bounds = state.last_layout_bounds.unwrap_or_default(); @@ -499,7 +499,7 @@ impl ListState { .scrollbar_drag_start_height .unwrap_or_else(|| state.items.summary().height); - Size::new(Pixels::ZERO, Pixels::ZERO.max(height - bounds.size.height)) + point(Pixels::ZERO, Pixels::ZERO.max(height - bounds.size.height)) } /// Returns the current scroll offset adjusted for the scrollbar diff --git a/crates/gpui/src/elements/svg.rs b/crates/gpui/src/elements/svg.rs index 57b2d712e54c501cb7eaf59f6433748ddf36d3fc..a29b106c0e223b01340ecab27b45fdb94163d207 100644 --- a/crates/gpui/src/elements/svg.rs +++ b/crates/gpui/src/elements/svg.rs @@ -3,10 +3,9 @@ use std::{fs, path::Path, sync::Arc}; use crate::{ App, Asset, Bounds, Element, GlobalElementId, Hitbox, InspectorElementId, InteractiveElement, Interactivity, IntoElement, LayoutId, Pixels, Point, Radians, SharedString, Size, - StyleRefinement, Styled, TransformationMatrix, Window, geometry::Negate as _, point, px, - radians, size, + StyleRefinement, Styled, TransformationMatrix, Window, point, px, radians, size, }; -use util::ResultExt; +use gpui_util::ResultExt; /// An SVG element. pub struct Svg { @@ -254,7 +253,7 @@ impl Transformation { .translate(center.scale(scale_factor) + self.translate.scale(scale_factor)) .rotate(self.rotate) .scale(self.scale) - .translate(center.scale(scale_factor).negate()) + .translate(center.scale(-scale_factor)) } } diff --git a/crates/gpui/src/elements/text.rs b/crates/gpui/src/elements/text.rs index ab861be5a29fcbfb575a48cf743407f1c6e927d6..ded0f596dcea2f6c992961906503adb6829e885f 100644 --- a/crates/gpui/src/elements/text.rs +++ b/crates/gpui/src/elements/text.rs @@ -6,6 +6,7 @@ use crate::{ WrappedLineLayout, register_tooltip_mouse_handlers, set_tooltip_on_window, }; use anyhow::Context as _; +use gpui_util::ResultExt; use itertools::Itertools; use smallvec::SmallVec; use std::{ @@ -16,7 +17,6 @@ use std::{ rc::Rc, sync::Arc, }; -use util::ResultExt; impl Element for &'static str { type RequestLayoutState = TextLayout; diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index d4299b73e8401faa0fc4a5aae8b7773cd920e709..31c1ed80b92efb5dfa9ead6dcaf9050fe68ea399 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -1,18 +1,13 @@ use crate::{App, PlatformDispatcher, PlatformScheduler}; use futures::channel::mpsc; +use futures::prelude::*; +use gpui_util::TryFutureExt; +use scheduler::Instant; use scheduler::Scheduler; -use smol::prelude::*; use std::{ - fmt::Debug, - future::Future, - marker::PhantomData, - mem, - pin::Pin, - rc::Rc, - sync::Arc, - time::{Duration, Instant}, + fmt::Debug, future::Future, marker::PhantomData, mem, pin::Pin, rc::Rc, sync::Arc, + time::Duration, }; -use util::TryFutureExt; pub use scheduler::{FallibleTask, ForegroundExecutor as SchedulerForegroundExecutor, Priority}; diff --git a/crates/gpui/src/geometry.rs b/crates/gpui/src/geometry.rs index 73fa9906267412c9f1c840d8403beeef4718119e..76157a06a587ac851d19f19fc5a4ed23c634bab5 100644 --- a/crates/gpui/src/geometry.rs +++ b/crates/gpui/src/geometry.rs @@ -78,6 +78,7 @@ pub trait Along { Deserialize, JsonSchema, Hash, + Neg, )] #[refineable(Debug, PartialEq, Serialize, Deserialize, JsonSchema)] #[repr(C)] @@ -182,12 +183,6 @@ impl Along for Point { } } -impl Negate for Point { - fn negate(self) -> Self { - self.map(Negate::negate) - } -} - impl Point { /// Scales the point by a given factor, which is typically derived from the resolution /// of a target display to ensure proper sizing of UI elements. @@ -393,7 +388,9 @@ impl Display for Point { /// /// This struct is generic over the type `T`, which can be any type that implements `Clone`, `Default`, and `Debug`. /// It is commonly used to specify dimensions for elements in a UI, such as a window or element. -#[derive(Refineable, Default, Clone, Copy, PartialEq, Div, Hash, Serialize, Deserialize)] +#[derive( + Add, Clone, Copy, Default, Deserialize, Div, Hash, Neg, PartialEq, Refineable, Serialize, Sub, +)] #[refineable(Debug, PartialEq, Serialize, Deserialize, JsonSchema)] #[repr(C)] pub struct Size { @@ -598,34 +595,6 @@ where } } -impl Sub for Size -where - T: Sub + Clone + Debug + Default + PartialEq, -{ - type Output = Size; - - fn sub(self, rhs: Self) -> Self::Output { - Size { - width: self.width - rhs.width, - height: self.height - rhs.height, - } - } -} - -impl Add for Size -where - T: Add + Clone + Debug + Default + PartialEq, -{ - type Output = Size; - - fn add(self, rhs: Self) -> Self::Output { - Size { - width: self.width + rhs.width, - height: self.height + rhs.height, - } - } -} - impl Mul for Size where T: Mul + Clone + Debug + Default + PartialEq, @@ -1245,6 +1214,15 @@ where } } +impl From> for Point { + fn from(size: Size) -> Self { + Self { + x: size.width, + y: size.height, + } + } +} + impl Bounds where T: Add + Clone + Debug + Default + PartialEq, @@ -3754,48 +3732,6 @@ impl Half for Rems { } } -/// Provides a trait for types that can negate their values. -pub trait Negate { - /// Returns the negation of the given value - fn negate(self) -> Self; -} - -impl Negate for i32 { - fn negate(self) -> Self { - -self - } -} - -impl Negate for f32 { - fn negate(self) -> Self { - -self - } -} - -impl Negate for DevicePixels { - fn negate(self) -> Self { - Self(-self.0) - } -} - -impl Negate for ScaledPixels { - fn negate(self) -> Self { - Self(-self.0) - } -} - -impl Negate for Pixels { - fn negate(self) -> Self { - Self(-self.0) - } -} - -impl Negate for Rems { - fn negate(self) -> Self { - Self(-self.0) - } -} - /// A trait for checking if a value is zero. /// /// This trait provides a method to determine if a value is considered to be zero. diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 7e2f0a981493cc12bca9f02e47a90ed6d6f21595..ff36dbce500b8e7472f3d7faa31d9e5cb17e087e 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -1,5 +1,5 @@ #![doc = include_str!("../README.md")] -#![deny(missing_docs)] +#![warn(missing_docs)] #![allow(clippy::type_complexity)] // Not useful, GPUI makes heavy use of callbacks #![allow(clippy::collapsible_else_if)] // False positives in platform specific code #![allow(unused_mut)] // False positives in platform specific code @@ -35,7 +35,7 @@ mod platform; pub mod prelude; /// Profiling utilities for task timing and thread performance tracking. pub mod profiler; -#[cfg(any(target_os = "windows", target_os = "linux"))] +#[cfg(any(target_os = "windows", target_os = "linux", target_family = "wasm"))] #[expect(missing_docs)] pub mod queue; mod scene; @@ -87,6 +87,7 @@ pub use executor::*; pub use geometry::*; pub use global::*; pub use gpui_macros::{AppContext, IntoElement, Render, VisualContext, register_action, test}; +pub use gpui_util::arc_cow::ArcCow; pub use http_client; pub use input::*; pub use inspector::*; @@ -96,7 +97,7 @@ pub use keymap::*; pub use path_builder::*; pub use platform::*; pub use profiler::*; -#[cfg(any(target_os = "windows", target_os = "linux"))] +#[cfg(any(target_os = "windows", target_os = "linux", target_family = "wasm"))] pub use queue::{PriorityQueueReceiver, PriorityQueueSender}; pub use refineable::*; pub use scene::*; @@ -113,7 +114,7 @@ pub use taffy::{AvailableSpace, LayoutId}; #[cfg(any(test, feature = "test-support"))] pub use test::*; pub use text_system::*; -pub use util::{FutureExt, Timeout, arc_cow::ArcCow}; +pub use util::{FutureExt, Timeout}; pub use view::*; pub use window::*; diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index 401c70ea488a8896151de047c04898f4f6b7e15a..a6714ff250f2f854c51d30bfea5e2e5911ce60ee 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -44,6 +44,7 @@ use image::RgbaImage; use image::codecs::gif::GifDecoder; use image::{AnimationDecoder as _, Frame}; use raw_window_handle::{HasDisplayHandle, HasWindowHandle}; +use scheduler::Instant; pub use scheduler::RunnableMeta; use schemars::JsonSchema; use seahash::SeaHasher; @@ -53,7 +54,7 @@ use std::borrow::Cow; use std::hash::{Hash, Hasher}; use std::io::Cursor; use std::ops; -use std::time::{Duration, Instant}; +use std::time::Duration; use std::{ fmt::{self, Debug}, ops::Range, @@ -228,7 +229,7 @@ pub trait Platform: 'static { } /// A handle to a platform's display, e.g. a monitor or laptop screen. -pub trait PlatformDisplay: Send + Sync + Debug { +pub trait PlatformDisplay: Debug { /// Get the ID for this display fn id(&self) -> DisplayId; @@ -560,7 +561,7 @@ pub trait PlatformWindow: HasWindowHandle + HasDisplayHandle { pub type RunnableVariant = Runnable; #[doc(hidden)] -pub type TimerResolutionGuard = util::Deferred>; +pub type TimerResolutionGuard = gpui_util::Deferred>; /// This type is public so that our test macro can generate and use it, but it should not /// be considered part of our public API. @@ -579,7 +580,7 @@ pub trait PlatformDispatcher: Send + Sync { } fn increase_timer_resolution(&self) -> TimerResolutionGuard { - util::defer(Box::new(|| {})) + gpui_util::defer(Box::new(|| {})) } #[cfg(any(test, feature = "test-support"))] @@ -827,7 +828,7 @@ impl From for AtlasKey { } #[expect(missing_docs)] -pub trait PlatformAtlas: Send + Sync { +pub trait PlatformAtlas { fn get_or_insert_with<'a>( &self, key: &AtlasKey, @@ -1235,7 +1236,7 @@ pub struct WindowOptions { ), allow(dead_code) )] -#[expect(missing_docs)] +#[allow(missing_docs)] pub struct WindowParams { pub bounds: Bounds, diff --git a/crates/gpui/src/platform/scap_screen_capture.rs b/crates/gpui/src/platform/scap_screen_capture.rs index 2c827bb0d80b330440f050b01c024faa700329ad..797e19ba23f715c372d76fd349577dde16a66f6f 100644 --- a/crates/gpui/src/platform/scap_screen_capture.rs +++ b/crates/gpui/src/platform/scap_screen_capture.rs @@ -126,7 +126,7 @@ fn start_default_target_screen_capture( ) { // Due to use of blocking APIs, a dedicated thread is used. std::thread::spawn(|| { - let start_result = util::maybe!({ + let start_result = gpui_util::maybe!({ let mut capturer = new_scap_capturer(None)?; capturer.start_capture(); let first_frame = capturer diff --git a/crates/gpui/src/platform/test/dispatcher.rs b/crates/gpui/src/platform/test/dispatcher.rs index e744802ceaa507c527e8af5b0be732074ded7f10..c40ec8f669d1e2e58f8af3bcf0fbd64fbddbe4d8 100644 --- a/crates/gpui/src/platform/test/dispatcher.rs +++ b/crates/gpui/src/platform/test/dispatcher.rs @@ -1,11 +1,12 @@ use crate::{PlatformDispatcher, Priority, RunnableVariant}; +use scheduler::Instant; use scheduler::{Clock, Scheduler, SessionId, TestScheduler, TestSchedulerConfig, Yield}; use std::{ sync::{ Arc, atomic::{AtomicUsize, Ordering}, }, - time::{Duration, Instant}, + time::Duration, }; /// TestDispatcher provides deterministic async execution for tests. @@ -47,6 +48,10 @@ impl TestDispatcher { self.session_id } + pub fn drain_tasks(&self) { + self.scheduler.drain_tasks(); + } + pub fn advance_clock(&self, by: Duration) { self.scheduler.advance_clock(by); } diff --git a/crates/gpui/src/platform/test/window.rs b/crates/gpui/src/platform/test/window.rs index 2c97b5fe087411768f3c039322be00a391e05dfe..feb3b162abe09d8cdef008aa9f794b046da22cc6 100644 --- a/crates/gpui/src/platform/test/window.rs +++ b/crates/gpui/src/platform/test/window.rs @@ -19,6 +19,7 @@ pub(crate) struct TestWindowState { pub(crate) title: Option, pub(crate) edited: bool, platform: Weak, + // TODO: Replace with `Rc` sprite_atlas: Arc, pub(crate) should_close_handler: Option bool>>, hit_test_window_control_callback: Option Option>>, diff --git a/crates/gpui/src/platform_scheduler.rs b/crates/gpui/src/platform_scheduler.rs index 2043d2a33387ab9aa0acda48321911a1503a6da8..900cd6041d38380f4d9cb3ff9b87a3605b0ebd78 100644 --- a/crates/gpui/src/platform_scheduler.rs +++ b/crates/gpui/src/platform_scheduler.rs @@ -2,7 +2,10 @@ use crate::{PlatformDispatcher, RunnableMeta}; use async_task::Runnable; use chrono::{DateTime, Utc}; use futures::channel::oneshot; +use scheduler::Instant; use scheduler::{Clock, Priority, Scheduler, SessionId, TestScheduler, Timer}; +#[cfg(not(target_family = "wasm"))] +use std::task::{Context, Poll}; use std::{ future::Future, pin::Pin, @@ -10,10 +13,8 @@ use std::{ Arc, atomic::{AtomicU16, Ordering}, }, - task::{Context, Poll}, - time::{Duration, Instant}, + time::Duration, }; -use waker_fn::waker_fn; /// A production implementation of [`Scheduler`] that wraps a [`PlatformDispatcher`]. /// @@ -43,37 +44,48 @@ impl Scheduler for PlatformScheduler { fn block( &self, _session_id: Option, - mut future: Pin<&mut dyn Future>, - timeout: Option, + #[cfg_attr(target_family = "wasm", allow(unused_mut))] mut future: Pin< + &mut dyn Future, + >, + #[cfg_attr(target_family = "wasm", allow(unused_variables))] timeout: Option, ) -> bool { - let deadline = timeout.map(|t| Instant::now() + t); - let parker = parking::Parker::new(); - let unparker = parker.unparker(); - let waker = waker_fn(move || { - unparker.unpark(); - }); - let mut cx = Context::from_waker(&waker); - if let Poll::Ready(()) = future.as_mut().poll(&mut cx) { - return true; + #[cfg(target_family = "wasm")] + { + let _ = (&future, &timeout); + panic!("Cannot block on wasm") } + #[cfg(not(target_family = "wasm"))] + { + use waker_fn::waker_fn; + let deadline = timeout.map(|t| Instant::now() + t); + let parker = parking::Parker::new(); + let unparker = parker.unparker(); + let waker = waker_fn(move || { + unparker.unpark(); + }); + let mut cx = Context::from_waker(&waker); + if let Poll::Ready(()) = future.as_mut().poll(&mut cx) { + return true; + } - let park_deadline = |deadline: Instant| { - // Timer expirations are only delivered every ~15.6 milliseconds by default on Windows. - // We increase the resolution during this wait so that short timeouts stay reasonably short. - let _timer_guard = self.dispatcher.increase_timer_resolution(); - parker.park_deadline(deadline) - }; - - loop { - match deadline { - Some(deadline) if !park_deadline(deadline) && deadline <= Instant::now() => { - return false; + let park_deadline = |deadline: Instant| { + // Timer expirations are only delivered every ~15.6 milliseconds by default on Windows. + // We increase the resolution during this wait so that short timeouts stay reasonably short. + let _timer_guard = self.dispatcher.increase_timer_resolution(); + parker.park_deadline(deadline) + }; + + loop { + match deadline { + Some(deadline) if !park_deadline(deadline) && deadline <= Instant::now() => { + return false; + } + Some(_) => (), + None => parker.park(), + } + if let Poll::Ready(()) = future.as_mut().poll(&mut cx) { + break true; } - Some(_) => (), - None => parker.park(), - } - if let Poll::Ready(()) = future.as_mut().poll(&mut cx) { - break true; } } } diff --git a/crates/gpui/src/profiler.rs b/crates/gpui/src/profiler.rs index 0863aa8cdaaa6bb7cbf593adea1fd4d12726acce..ccbc86e3fe35a095b2de9de159286250a24d7a05 100644 --- a/crates/gpui/src/profiler.rs +++ b/crates/gpui/src/profiler.rs @@ -1,3 +1,4 @@ +use scheduler::Instant; use std::{ cell::LazyCell, collections::HashMap, @@ -5,7 +6,6 @@ use std::{ hash::{DefaultHasher, Hash}, sync::Arc, thread::ThreadId, - time::Instant, }; use serde::{Deserialize, Serialize}; diff --git a/crates/gpui/src/queue.rs b/crates/gpui/src/queue.rs index 45712ba27e1c022a0be18056a9df7960ecac380f..6e7cf2445e3d6d1723721325e0e145c0c55d3236 100644 --- a/crates/gpui/src/queue.rs +++ b/crates/gpui/src/queue.rs @@ -41,6 +41,32 @@ impl PriorityQueueState { } let mut queues = self.queues.lock(); + Self::push(&mut queues, priority, item); + self.condvar.notify_one(); + Ok(()) + } + + fn spin_send(&self, priority: Priority, item: T) -> Result<(), SendError> { + if self + .receiver_count + .load(std::sync::atomic::Ordering::Relaxed) + == 0 + { + return Err(SendError(item)); + } + + let mut queues = loop { + if let Some(guard) = self.queues.try_lock() { + break guard; + } + std::hint::spin_loop(); + }; + Self::push(&mut queues, priority, item); + self.condvar.notify_one(); + Ok(()) + } + + fn push(queues: &mut PriorityQueues, priority: Priority, item: T) { match priority { Priority::RealtimeAudio => unreachable!( "Realtime audio priority runs on a dedicated thread and is never queued" @@ -49,8 +75,6 @@ impl PriorityQueueState { Priority::Medium => queues.medium_priority.push_back(item), Priority::Low => queues.low_priority.push_back(item), }; - self.condvar.notify_one(); - Ok(()) } fn recv<'a>(&'a self) -> Result>, RecvError> { @@ -84,6 +108,28 @@ impl PriorityQueueState { Ok(Some(queues)) } } + + fn spin_try_recv<'a>( + &'a self, + ) -> Result>>, RecvError> { + let queues = loop { + if let Some(guard) = self.queues.try_lock() { + break guard; + } + std::hint::spin_loop(); + }; + + let sender_count = self.sender_count.load(std::sync::atomic::Ordering::Relaxed); + if queues.is_empty() && sender_count == 0 { + return Err(crate::queue::RecvError); + } + + if queues.is_empty() { + Ok(None) + } else { + Ok(Some(queues)) + } + } } #[doc(hidden)] @@ -100,6 +146,11 @@ impl PriorityQueueSender { self.state.send(priority, item)?; Ok(()) } + + pub fn spin_send(&self, priority: Priority, item: T) -> Result<(), SendError> { + self.state.spin_send(priority, item)?; + Ok(()) + } } impl Drop for PriorityQueueSender { @@ -183,6 +234,44 @@ impl PriorityQueueReceiver { self.pop_inner(false) } + pub fn spin_try_pop(&mut self) -> Result, RecvError> { + use Priority as P; + + let Some(mut queues) = self.state.spin_try_recv()? else { + return Ok(None); + }; + + let high = P::High.weight() * !queues.high_priority.is_empty() as u32; + let medium = P::Medium.weight() * !queues.medium_priority.is_empty() as u32; + let low = P::Low.weight() * !queues.low_priority.is_empty() as u32; + let mut mass = high + medium + low; + + if !queues.high_priority.is_empty() { + let flip = self.rand.random_ratio(P::High.weight(), mass); + if flip { + return Ok(queues.high_priority.pop_front()); + } + mass -= P::High.weight(); + } + + if !queues.medium_priority.is_empty() { + let flip = self.rand.random_ratio(P::Medium.weight(), mass); + if flip { + return Ok(queues.medium_priority.pop_front()); + } + mass -= P::Medium.weight(); + } + + if !queues.low_priority.is_empty() { + let flip = self.rand.random_ratio(P::Low.weight(), mass); + if flip { + return Ok(queues.low_priority.pop_front()); + } + } + + Ok(None) + } + /// Pops an element from the priority queue blocking if necessary. /// /// This method is best suited if you only intend to pop one element, for better performance diff --git a/crates/gpui/src/scene.rs b/crates/gpui/src/scene.rs index 7b841da1f231f85073dd20d51769ed406d539ce8..7e0ffe017024cc7914885df9ea713a3ec3db820e 100644 --- a/crates/gpui/src/scene.rs +++ b/crates/gpui/src/scene.rs @@ -459,7 +459,7 @@ impl<'a> Iterator for BatchIterator<'a> { ), allow(dead_code) )] -#[expect(missing_docs)] +#[allow(missing_docs)] pub enum PrimitiveBatch { Shadows(Range), Quads(Range), @@ -711,7 +711,7 @@ impl From for Primitive { } #[derive(Clone, Debug)] -#[expect(missing_docs)] +#[allow(missing_docs)] pub struct PaintSurface { pub order: DrawOrder, pub bounds: Bounds, diff --git a/crates/gpui/src/shared_string.rs b/crates/gpui/src/shared_string.rs index 350184d350aec8c5995fe7d2f0856f1fe1cfea0f..4fd2f8c32112feb0199408825355f60d6554e19c 100644 --- a/crates/gpui/src/shared_string.rs +++ b/crates/gpui/src/shared_string.rs @@ -1,12 +1,12 @@ use derive_more::{Deref, DerefMut}; +use gpui_util::arc_cow::ArcCow; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::{ borrow::{Borrow, Cow}, sync::Arc, }; -use util::arc_cow::ArcCow; /// A shared string is an immutable string that can be cheaply cloned in GPUI /// tasks. Essentially an abstraction over an `Arc` and `&'static str`, diff --git a/crates/gpui/src/subscription.rs b/crates/gpui/src/subscription.rs index bd869f8d32cdfc81917fc2287b7dc62fac7d727d..cf44b68d2bcbf7ca7d02c4b9e956f15079f8bdb6 100644 --- a/crates/gpui/src/subscription.rs +++ b/crates/gpui/src/subscription.rs @@ -1,11 +1,11 @@ use collections::{BTreeMap, BTreeSet}; +use gpui_util::post_inc; use std::{ cell::{Cell, RefCell}, fmt::Debug, mem, rc::Rc, }; -use util::post_inc; pub(crate) struct SubscriberSet( Rc>>, diff --git a/crates/gpui/src/test.rs b/crates/gpui/src/test.rs index 9f8fd8b1984ff72a2d773454bf78956fa91f7ef0..9e76d97e97e941121417d872e8c6f596cf658e20 100644 --- a/crates/gpui/src/test.rs +++ b/crates/gpui/src/test.rs @@ -27,7 +27,6 @@ //! ``` use crate::{Entity, Subscription, TestAppContext, TestDispatcher}; use futures::StreamExt as _; -use smol::channel; use std::{ env, panic::{self, RefUnwindSafe}, @@ -136,7 +135,7 @@ fn calculate_seeds( /// A test struct for converting an observation callback into a stream. pub struct Observation { - rx: Pin>>, + rx: Pin>>, _subscription: Subscription, } @@ -153,10 +152,10 @@ impl futures::Stream for Observation { /// observe returns a stream of the change events from the given `Entity` pub fn observe(entity: &Entity, cx: &mut TestAppContext) -> Observation<()> { - let (tx, rx) = smol::channel::unbounded(); + let (tx, rx) = async_channel::unbounded(); let _subscription = cx.update(|cx| { cx.observe(entity, move |_, _| { - let _ = smol::block_on(tx.send(())); + let _ = pollster::block_on(tx.send(())); }) }); let rx = Box::pin(rx); diff --git a/crates/gpui/src/util.rs b/crates/gpui/src/util.rs index 1a1e3e7b5089c93d552898f2f491aaece854e8a7..8a7411e0ac29e86beff8f6a803c5f6a31518048e 100644 --- a/crates/gpui/src/util.rs +++ b/crates/gpui/src/util.rs @@ -7,8 +7,6 @@ use std::{ time::Duration, }; -pub use util::*; - /// A helper trait for building complex objects with imperative conditionals in a fluent style. pub trait FluentBuilder { /// Imperatively modify self with the given closure. diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 7301bdaa6ec2dfe78a7454482fa0aeca48f2fd90..3fcb911d2c58f8968bc6b0c66f26ed2de365dd53 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -26,11 +26,14 @@ use core_video::pixel_buffer::CVPixelBuffer; use derive_more::{Deref, DerefMut}; use futures::FutureExt; use futures::channel::oneshot; +use gpui_util::post_inc; +use gpui_util::{ResultExt, measure}; use itertools::FoldWhile::{Continue, Done}; use itertools::Itertools; use parking_lot::RwLock; use raw_window_handle::{HandleError, HasDisplayHandle, HasWindowHandle}; use refineable::Refineable; +use scheduler::Instant; use slotmap::SlotMap; use smallvec::SmallVec; use std::{ @@ -48,10 +51,8 @@ use std::{ Arc, Weak, atomic::{AtomicUsize, Ordering::SeqCst}, }, - time::{Duration, Instant}, + time::Duration, }; -use util::post_inc; -use util::{ResultExt, measure}; use uuid::Uuid; mod prompts; @@ -725,6 +726,7 @@ pub(crate) struct DeferredDraw { parent_node: DispatchNodeId, element_id_stack: SmallVec<[ElementId; 32]>, text_style_stack: Vec, + content_mask: Option>, rem_size: Pixels, element: Option, absolute_offset: Point, @@ -2428,15 +2430,18 @@ impl Window { .set_active_node(deferred_draw.parent_node); let prepaint_start = self.prepaint_index(); + let content_mask = deferred_draw.content_mask.clone(); if let Some(element) = deferred_draw.element.as_mut() { self.with_rendered_view(deferred_draw.current_view, |window| { - window.with_rem_size(Some(deferred_draw.rem_size), |window| { - window.with_absolute_element_offset( - deferred_draw.absolute_offset, - |window| { - element.prepaint(window, cx); - }, - ); + window.with_content_mask(content_mask, |window| { + window.with_rem_size(Some(deferred_draw.rem_size), |window| { + window.with_absolute_element_offset( + deferred_draw.absolute_offset, + |window| { + element.prepaint(window, cx); + }, + ); + }); }); }) } else { @@ -2468,10 +2473,13 @@ impl Window { .set_active_node(deferred_draw.parent_node); let paint_start = self.paint_index(); + let content_mask = deferred_draw.content_mask.clone(); if let Some(element) = deferred_draw.element.as_mut() { self.with_rendered_view(deferred_draw.current_view, |window| { - window.with_rem_size(Some(deferred_draw.rem_size), |window| { - element.paint(window, cx); + window.with_content_mask(content_mask, |window| { + window.with_rem_size(Some(deferred_draw.rem_size), |window| { + element.paint(window, cx); + }); }) }) } else { @@ -2535,6 +2543,7 @@ impl Window { parent_node: reused_subtree.refresh_node_id(deferred_draw.parent_node), element_id_stack: deferred_draw.element_id_stack.clone(), text_style_stack: deferred_draw.text_style_stack.clone(), + content_mask: deferred_draw.content_mask.clone(), rem_size: deferred_draw.rem_size, priority: deferred_draw.priority, element: None, @@ -3018,12 +3027,16 @@ impl Window { /// at a later time. The `priority` parameter determines the drawing order relative to other deferred elements, /// with higher values being drawn on top. /// + /// When `content_mask` is provided, the deferred element will be clipped to that region during + /// both prepaint and paint. When `None`, no additional clipping is applied. + /// /// This method should only be called as part of the prepaint phase of element drawing. pub fn defer_draw( &mut self, element: AnyElement, absolute_offset: Point, priority: usize, + content_mask: Option>, ) { self.invalidator.debug_assert_prepaint(); let parent_node = self.next_frame.dispatch_tree.active_node_id().unwrap(); @@ -3032,6 +3045,7 @@ impl Window { parent_node, element_id_stack: self.element_id_stack.clone(), text_style_stack: self.text_style_stack.clone(), + content_mask, rem_size: self.rem_size(), priority, element: Some(element), diff --git a/crates/gpui_linux/Cargo.toml b/crates/gpui_linux/Cargo.toml index e650765c912bd1fa7bfec72235f9d9acd07160a4..9078fa82c2884421c6cd11c6d3384645621b7e6f 100644 --- a/crates/gpui_linux/Cargo.toml +++ b/crates/gpui_linux/Cargo.toml @@ -18,8 +18,7 @@ wayland = [ "bitflags", "gpui_wgpu", "ashpd/wayland", - "cosmic-text", - "font-kit", + "calloop-wayland-source", "wayland-backend", "wayland-client", @@ -35,8 +34,7 @@ wayland = [ x11 = [ "gpui_wgpu", "ashpd", - "cosmic-text", - "font-kit", + "as-raw-xcb-connection", "x11rb", "xkbcommon", @@ -58,22 +56,24 @@ bytemuck = "1" collections.workspace = true futures.workspace = true gpui.workspace = true -gpui_wgpu = { workspace = true, optional = true } +gpui_wgpu = { workspace = true, optional = true, features = ["font-kit"] } http_client.workspace = true itertools.workspace = true libc.workspace = true log.workspace = true parking_lot.workspace = true pathfinder_geometry = "0.5" +pollster.workspace = true profiling.workspace = true smallvec.workspace = true smol.workspace = true strum.workspace = true +url.workspace = true util.workspace = true uuid.workspace = true # Always used -oo7 = { version = "0.5.0", default-features = false, features = [ +oo7 = { version = "0.6", default-features = false, features = [ "async-std", "native_crypto", ] } @@ -82,12 +82,7 @@ raw-window-handle = "0.6" # Used in both windowing options ashpd = { workspace = true, optional = true } -cosmic-text = { version = "0.17.0", optional = true } swash = { version = "0.2.6" } -# WARNING: If you change this, you must also publish a new version of zed-font-kit to crates.io -font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "110523127440aefb11ce0cf280ae7c5071337ec5", package = "zed-font-kit", version = "0.14.1-zed", features = [ - "source-fontconfig-dlopen", -], optional = true } bitflags = { workspace = true, optional = true } filedescriptor = { version = "0.8.2", optional = true } open = { version = "5.2.0", optional = true } @@ -126,6 +121,7 @@ x11rb = { version = "0.13.1", features = [ "cursor", "resource_manager", "sync", + "dri3", ], optional = true } # WARNING: If you change this, you must also publish a new version of zed-xim to crates.io xim = { git = "https://github.com/zed-industries/xim-rs.git", rev = "16f35a2c881b815a2b6cdfd6687988e84f8447d8", features = [ diff --git a/crates/gpui_linux/src/linux/headless/client.rs b/crates/gpui_linux/src/linux/headless/client.rs index 6dbdc556751b27d144feb4a40c916910bc6ff5f7..56cc9e8df008abcb0904c7178e5b333eaade1d84 100644 --- a/crates/gpui_linux/src/linux/headless/client.rs +++ b/crates/gpui_linux/src/linux/headless/client.rs @@ -64,6 +64,7 @@ impl LinuxClient for HeadlessClient { None } + #[cfg(feature = "screen-capture")] fn screen_capture_sources( &self, ) -> futures::channel::oneshot::Receiver>>> diff --git a/crates/gpui_linux/src/linux/platform.rs b/crates/gpui_linux/src/linux/platform.rs index 5929533951738a474cdb76f3047162451de5ce1e..4cd89f35d1e757ca30acd33b1362d147a95b63ef 100644 --- a/crates/gpui_linux/src/linux/platform.rs +++ b/crates/gpui_linux/src/linux/platform.rs @@ -55,10 +55,12 @@ pub(crate) trait LinuxClient { fn display(&self, id: DisplayId) -> Option>; fn primary_display(&self) -> Option>; + #[cfg(feature = "screen-capture")] fn is_screen_capture_supported(&self) -> bool { false } + #[cfg(feature = "screen-capture")] fn screen_capture_sources( &self, ) -> oneshot::Receiver>>> { @@ -122,7 +124,7 @@ impl LinuxCommon { let (main_sender, main_receiver) = PriorityQueueCalloopReceiver::new(); #[cfg(any(feature = "wayland", feature = "x11"))] - let text_system = Arc::new(crate::linux::CosmicTextSystem::new()); + let text_system = Arc::new(crate::linux::CosmicTextSystem::new("IBM Plex Sans")); #[cfg(not(any(feature = "wayland", feature = "x11")))] let text_system = Arc::new(gpui::NoopTextSystem::new()); @@ -227,17 +229,14 @@ impl Platform for LinuxPlatform

{ log::info!("Restarting process, using app path: {:?}", app_path); // Script to wait for the current process to exit and then restart the app. - let script = format!( - r#" - while kill -0 {pid} 2>/dev/null; do + // Pass dynamic values as positional parameters to avoid shell interpolation issues. + let script = r#" + while kill -0 "$0" 2>/dev/null; do sleep 0.1 done - {app_path} - "#, - pid = app_pid, - app_path = app_path.display() - ); + "$1" + "#; #[allow( clippy::disallowed_methods, @@ -247,6 +246,8 @@ impl Platform for LinuxPlatform

{ .arg("bash") .arg("-c") .arg(script) + .arg(&app_pid) + .arg(&app_path) .process_group(0) .spawn(); @@ -364,7 +365,8 @@ impl Platform for LinuxPlatform

{ response .uris() .iter() - .filter_map(|uri| uri.to_file_path().ok()) + .filter_map(|uri: &ashpd::Uri| url::Url::parse(uri.as_str()).ok()) + .filter_map(|uri: url::Url| uri.to_file_path().ok()) .collect::>(), )), Err(ashpd::Error::Response(_)) => Ok(None), @@ -426,7 +428,8 @@ impl Platform for LinuxPlatform

{ Ok(response) => Ok(response .uris() .first() - .and_then(|uri| uri.to_file_path().ok())), + .and_then(|uri: &ashpd::Uri| url::Url::parse(uri.as_str()).ok()) + .and_then(|uri: url::Url| uri.to_file_path().ok())), Err(ashpd::Error::Response(_)) => Ok(None), Err(e) => Err(e.into()), }; @@ -627,7 +630,7 @@ pub(super) fn open_uri_internal( uri: &str, activation_token: Option, ) { - if let Some(uri) = ashpd::url::Url::parse(uri).log_err() { + if let Some(uri) = ashpd::Uri::parse(uri).log_err() { executor .spawn(async move { match ashpd::desktop::open_uri::OpenFileRequest::default() @@ -1034,6 +1037,46 @@ pub(super) fn capslock_from_xkb(keymap_state: &State) -> gpui::Capslock { gpui::Capslock { on } } +/// Resolve a Linux `dev_t` to PCI vendor/device IDs via sysfs, returning a +/// [`CompositorGpuHint`] that the GPU adapter selection code can use to +/// prioritize the compositor's rendering device. +#[cfg(any(feature = "wayland", feature = "x11"))] +pub(super) fn compositor_gpu_hint_from_dev_t(dev: u64) -> Option { + fn dev_major(dev: u64) -> u32 { + ((dev >> 8) & 0xfff) as u32 | (((dev >> 32) & !0xfff) as u32) + } + + fn dev_minor(dev: u64) -> u32 { + (dev & 0xff) as u32 | (((dev >> 12) & !0xff) as u32) + } + + fn read_sysfs_hex_id(path: &str) -> Option { + let content = std::fs::read_to_string(path).ok()?; + let trimmed = content.trim().strip_prefix("0x").unwrap_or(content.trim()); + u32::from_str_radix(trimmed, 16).ok() + } + + let major = dev_major(dev); + let minor = dev_minor(dev); + + let vendor_path = format!("/sys/dev/char/{major}:{minor}/device/vendor"); + let device_path = format!("/sys/dev/char/{major}:{minor}/device/device"); + + let vendor_id = read_sysfs_hex_id(&vendor_path)?; + let device_id = read_sysfs_hex_id(&device_path)?; + + log::info!( + "Compositor GPU hint: vendor={:#06x}, device={:#06x} (from dev {major}:{minor})", + vendor_id, + device_id, + ); + + Some(gpui_wgpu::CompositorGpuHint { + vendor_id, + device_id, + }) +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/gpui_linux/src/linux/text_system.rs b/crates/gpui_linux/src/linux/text_system.rs index af0298e5961e500fe9e01495905ba53a85f74f37..d6571021e0647453844b7564c8cdac32926bc6a6 100644 --- a/crates/gpui_linux/src/linux/text_system.rs +++ b/crates/gpui_linux/src/linux/text_system.rs @@ -1,538 +1 @@ -use anyhow::{Context as _, Ok, Result}; -use collections::HashMap; -use cosmic_text::{ - Attrs, AttrsList, Family, Font as CosmicTextFont, FontFeatures as CosmicFontFeatures, - FontSystem, ShapeBuffer, ShapeLine, -}; -use gpui::{ - Bounds, DevicePixels, Font, FontFeatures, FontId, FontMetrics, FontRun, GlyphId, LineLayout, - Pixels, PlatformTextSystem, RenderGlyphParams, SUBPIXEL_VARIANTS_X, SUBPIXEL_VARIANTS_Y, - ShapedGlyph, ShapedRun, SharedString, Size, TextRenderingMode, point, size, -}; - -use itertools::Itertools; -use parking_lot::RwLock; -use smallvec::SmallVec; -use std::{borrow::Cow, sync::Arc}; -use swash::{ - scale::{Render, ScaleContext, Source, StrikeWith}, - zeno::{Format, Vector}, -}; - -pub(crate) struct CosmicTextSystem(RwLock); - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -struct FontKey { - family: SharedString, - features: FontFeatures, -} - -impl FontKey { - fn new(family: SharedString, features: FontFeatures) -> Self { - Self { family, features } - } -} - -struct CosmicTextSystemState { - font_system: FontSystem, - scratch: ShapeBuffer, - swash_scale_context: ScaleContext, - /// Contains all already loaded fonts, including all faces. Indexed by `FontId`. - loaded_fonts: Vec, - /// Caches the `FontId`s associated with a specific family to avoid iterating the font database - /// for every font face in a family. - font_ids_by_family_cache: HashMap>, -} - -struct LoadedFont { - font: Arc, - features: CosmicFontFeatures, - is_known_emoji_font: bool, -} - -impl CosmicTextSystem { - pub(crate) fn new() -> Self { - // todo(linux) make font loading non-blocking - let font_system = FontSystem::new(); - - Self(RwLock::new(CosmicTextSystemState { - font_system, - scratch: ShapeBuffer::default(), - swash_scale_context: ScaleContext::new(), - loaded_fonts: Vec::new(), - font_ids_by_family_cache: HashMap::default(), - })) - } -} - -impl Default for CosmicTextSystem { - fn default() -> Self { - Self::new() - } -} - -impl PlatformTextSystem for CosmicTextSystem { - fn add_fonts(&self, fonts: Vec>) -> Result<()> { - self.0.write().add_fonts(fonts) - } - - fn all_font_names(&self) -> Vec { - let mut result = self - .0 - .read() - .font_system - .db() - .faces() - .filter_map(|face| face.families.first().map(|family| family.0.clone())) - .collect_vec(); - result.sort(); - result.dedup(); - result - } - - fn font_id(&self, font: &Font) -> Result { - // todo(linux): Do we need to use CosmicText's Font APIs? Can we consolidate this to use font_kit? - let mut state = self.0.write(); - let key = FontKey::new(font.family.clone(), font.features.clone()); - let candidates = if let Some(font_ids) = state.font_ids_by_family_cache.get(&key) { - font_ids.as_slice() - } else { - let font_ids = state.load_family(&font.family, &font.features)?; - state.font_ids_by_family_cache.insert(key.clone(), font_ids); - state.font_ids_by_family_cache[&key].as_ref() - }; - - // todo(linux) ideally we would make fontdb's `find_best_match` pub instead of using font-kit here - let candidate_properties = candidates - .iter() - .map(|font_id| { - let database_id = state.loaded_font(*font_id).font.id(); - let face_info = state.font_system.db().face(database_id).expect(""); - face_info_into_properties(face_info) - }) - .collect::>(); - - let ix = - font_kit::matching::find_best_match(&candidate_properties, &font_into_properties(font)) - .context("requested font family contains no font matching the other parameters")?; - - Ok(candidates[ix]) - } - - fn font_metrics(&self, font_id: FontId) -> FontMetrics { - let metrics = self - .0 - .read() - .loaded_font(font_id) - .font - .as_swash() - .metrics(&[]); - - FontMetrics { - units_per_em: metrics.units_per_em as u32, - ascent: metrics.ascent, - descent: -metrics.descent, // todo(linux) confirm this is correct - line_gap: metrics.leading, - underline_position: metrics.underline_offset, - underline_thickness: metrics.stroke_size, - cap_height: metrics.cap_height, - x_height: metrics.x_height, - // todo(linux): Compute this correctly - bounding_box: Bounds { - origin: point(0.0, 0.0), - size: size(metrics.max_width, metrics.ascent + metrics.descent), - }, - } - } - - fn typographic_bounds(&self, font_id: FontId, glyph_id: GlyphId) -> Result> { - let lock = self.0.read(); - let glyph_metrics = lock.loaded_font(font_id).font.as_swash().glyph_metrics(&[]); - let glyph_id = glyph_id.0 as u16; - // todo(linux): Compute this correctly - // see https://github.com/servo/font-kit/blob/master/src/loaders/freetype.rs#L614-L620 - Ok(Bounds { - origin: point(0.0, 0.0), - size: size( - glyph_metrics.advance_width(glyph_id), - glyph_metrics.advance_height(glyph_id), - ), - }) - } - - fn advance(&self, font_id: FontId, glyph_id: GlyphId) -> Result> { - self.0.read().advance(font_id, glyph_id) - } - - fn glyph_for_char(&self, font_id: FontId, ch: char) -> Option { - self.0.read().glyph_for_char(font_id, ch) - } - - fn glyph_raster_bounds(&self, params: &RenderGlyphParams) -> Result> { - self.0.write().raster_bounds(params) - } - - fn rasterize_glyph( - &self, - params: &RenderGlyphParams, - raster_bounds: Bounds, - ) -> Result<(Size, Vec)> { - self.0.write().rasterize_glyph(params, raster_bounds) - } - - fn layout_line(&self, text: &str, font_size: Pixels, runs: &[FontRun]) -> LineLayout { - self.0.write().layout_line(text, font_size, runs) - } - - fn recommended_rendering_mode( - &self, - _font_id: FontId, - _font_size: Pixels, - ) -> TextRenderingMode { - // Ideally, we'd use fontconfig to read the user preference. - TextRenderingMode::Subpixel - } -} - -impl CosmicTextSystemState { - fn loaded_font(&self, font_id: FontId) -> &LoadedFont { - &self.loaded_fonts[font_id.0] - } - - #[profiling::function] - fn add_fonts(&mut self, fonts: Vec>) -> Result<()> { - let db = self.font_system.db_mut(); - for bytes in fonts { - match bytes { - Cow::Borrowed(embedded_font) => { - db.load_font_data(embedded_font.to_vec()); - } - Cow::Owned(bytes) => { - db.load_font_data(bytes); - } - } - } - Ok(()) - } - - #[profiling::function] - fn load_family( - &mut self, - name: &str, - features: &FontFeatures, - ) -> Result> { - // TODO: Determine the proper system UI font. - let name = gpui::font_name_with_fallbacks(name, "IBM Plex Sans"); - - let families = self - .font_system - .db() - .faces() - .filter(|face| face.families.iter().any(|family| *name == family.0)) - .map(|face| (face.id, face.post_script_name.clone())) - .collect::>(); - - let mut loaded_font_ids = SmallVec::new(); - for (font_id, postscript_name) in families { - let font = self - .font_system - .get_font(font_id, cosmic_text::Weight::NORMAL) - .context("Could not load font")?; - - // HACK: To let the storybook run and render Windows caption icons. We should actually do better font fallback. - let allowed_bad_font_names = [ - "SegoeFluentIcons", // NOTE: Segoe fluent icons postscript name is inconsistent - "Segoe Fluent Icons", - ]; - - if font.as_swash().charmap().map('m') == 0 - && !allowed_bad_font_names.contains(&postscript_name.as_str()) - { - self.font_system.db_mut().remove_face(font.id()); - continue; - }; - - let font_id = FontId(self.loaded_fonts.len()); - loaded_font_ids.push(font_id); - self.loaded_fonts.push(LoadedFont { - font, - features: cosmic_font_features(features)?, - is_known_emoji_font: check_is_known_emoji_font(&postscript_name), - }); - } - - Ok(loaded_font_ids) - } - - fn advance(&self, font_id: FontId, glyph_id: GlyphId) -> Result> { - let glyph_metrics = self.loaded_font(font_id).font.as_swash().glyph_metrics(&[]); - Ok(Size { - width: glyph_metrics.advance_width(glyph_id.0 as u16), - height: glyph_metrics.advance_height(glyph_id.0 as u16), - }) - } - - fn glyph_for_char(&self, font_id: FontId, ch: char) -> Option { - let glyph_id = self.loaded_font(font_id).font.as_swash().charmap().map(ch); - if glyph_id == 0 { - None - } else { - Some(GlyphId(glyph_id.into())) - } - } - - fn raster_bounds(&mut self, params: &RenderGlyphParams) -> Result> { - let image = self.render_glyph_image(params)?; - Ok(Bounds { - origin: point(image.placement.left.into(), (-image.placement.top).into()), - size: size(image.placement.width.into(), image.placement.height.into()), - }) - } - - #[profiling::function] - fn rasterize_glyph( - &mut self, - params: &RenderGlyphParams, - glyph_bounds: Bounds, - ) -> Result<(Size, Vec)> { - if glyph_bounds.size.width.0 == 0 || glyph_bounds.size.height.0 == 0 { - anyhow::bail!("glyph bounds are empty"); - } - - let mut image = self.render_glyph_image(params)?; - let bitmap_size = glyph_bounds.size; - match image.content { - swash::scale::image::Content::Color | swash::scale::image::Content::SubpixelMask => { - // Convert from RGBA to BGRA. - for pixel in image.data.chunks_exact_mut(4) { - pixel.swap(0, 2); - } - Ok((bitmap_size, image.data)) - } - swash::scale::image::Content::Mask => Ok((bitmap_size, image.data)), - } - } - - fn render_glyph_image( - &mut self, - params: &RenderGlyphParams, - ) -> Result { - let loaded_font = &self.loaded_fonts[params.font_id.0]; - let font_ref = loaded_font.font.as_swash(); - let pixel_size = f32::from(params.font_size); - - let subpixel_offset = Vector::new( - params.subpixel_variant.x as f32 / SUBPIXEL_VARIANTS_X as f32 / params.scale_factor, - params.subpixel_variant.y as f32 / SUBPIXEL_VARIANTS_Y as f32 / params.scale_factor, - ); - - let mut scaler = self - .swash_scale_context - .builder(font_ref) - .size(pixel_size * params.scale_factor) - .hint(true) - .build(); - - let sources: &[Source] = if params.is_emoji { - &[ - Source::ColorOutline(0), - Source::ColorBitmap(StrikeWith::BestFit), - Source::Outline, - ] - } else { - &[Source::Outline] - }; - - let mut renderer = Render::new(sources); - if params.subpixel_rendering { - // There seems to be a bug in Swash where the B and R values are swapped. - renderer - .format(Format::subpixel_bgra()) - .offset(subpixel_offset); - } else { - renderer.format(Format::Alpha).offset(subpixel_offset); - } - - let glyph_id: u16 = params.glyph_id.0.try_into()?; - renderer - .render(&mut scaler, glyph_id) - .with_context(|| format!("unable to render glyph via swash for {params:?}")) - } - - /// This is used when cosmic_text has chosen a fallback font instead of using the requested - /// font, typically to handle some unicode characters. When this happens, `loaded_fonts` may not - /// yet have an entry for this fallback font, and so one is added. - /// - /// Note that callers shouldn't use this `FontId` somewhere that will retrieve the corresponding - /// `LoadedFont.features`, as it will have an arbitrarily chosen or empty value. The only - /// current use of this field is for the *input* of `layout_line`, and so it's fine to use - /// `font_id_for_cosmic_id` when computing the *output* of `layout_line`. - fn font_id_for_cosmic_id(&mut self, id: cosmic_text::fontdb::ID) -> FontId { - if let Some(ix) = self - .loaded_fonts - .iter() - .position(|loaded_font| loaded_font.font.id() == id) - { - FontId(ix) - } else { - let font = self - .font_system - .get_font(id, cosmic_text::Weight::NORMAL) - .unwrap(); - let face = self.font_system.db().face(id).unwrap(); - - let font_id = FontId(self.loaded_fonts.len()); - self.loaded_fonts.push(LoadedFont { - font, - features: CosmicFontFeatures::new(), - is_known_emoji_font: check_is_known_emoji_font(&face.post_script_name), - }); - - font_id - } - } - - #[profiling::function] - fn layout_line(&mut self, text: &str, font_size: Pixels, font_runs: &[FontRun]) -> LineLayout { - let mut attrs_list = AttrsList::new(&Attrs::new()); - let mut offs = 0; - for run in font_runs { - let loaded_font = self.loaded_font(run.font_id); - let font = self.font_system.db().face(loaded_font.font.id()).unwrap(); - - attrs_list.add_span( - offs..(offs + run.len), - &Attrs::new() - .metadata(run.font_id.0) - .family(Family::Name(&font.families.first().unwrap().0)) - .stretch(font.stretch) - .style(font.style) - .weight(font.weight) - .font_features(loaded_font.features.clone()), - ); - offs += run.len; - } - - let line = ShapeLine::new( - &mut self.font_system, - text, - &attrs_list, - cosmic_text::Shaping::Advanced, - 4, - ); - let mut layout_lines = Vec::with_capacity(1); - line.layout_to_buffer( - &mut self.scratch, - f32::from(font_size), - None, // We do our own wrapping - cosmic_text::Wrap::None, - None, - &mut layout_lines, - None, - cosmic_text::Hinting::Disabled, - ); - let layout = layout_lines.first().unwrap(); - - let mut runs: Vec = Vec::new(); - for glyph in &layout.glyphs { - let mut font_id = FontId(glyph.metadata); - let mut loaded_font = self.loaded_font(font_id); - if loaded_font.font.id() != glyph.font_id { - font_id = self.font_id_for_cosmic_id(glyph.font_id); - loaded_font = self.loaded_font(font_id); - } - let is_emoji = loaded_font.is_known_emoji_font; - - // HACK: Prevent crash caused by variation selectors. - if glyph.glyph_id == 3 && is_emoji { - continue; - } - - let shaped_glyph = ShapedGlyph { - id: GlyphId(glyph.glyph_id as u32), - position: point(glyph.x.into(), glyph.y.into()), - index: glyph.start, - is_emoji, - }; - - if let Some(last_run) = runs - .last_mut() - .filter(|last_run| last_run.font_id == font_id) - { - last_run.glyphs.push(shaped_glyph); - } else { - runs.push(ShapedRun { - font_id, - glyphs: vec![shaped_glyph], - }); - } - } - - LineLayout { - font_size, - width: layout.w.into(), - ascent: layout.max_ascent.into(), - descent: layout.max_descent.into(), - runs, - len: text.len(), - } - } -} - -fn cosmic_font_features(features: &FontFeatures) -> Result { - let mut result = CosmicFontFeatures::new(); - for feature in features.0.iter() { - let name_bytes: [u8; 4] = feature - .0 - .as_bytes() - .try_into() - .context("Incorrect feature flag format")?; - - let tag = cosmic_text::FeatureTag::new(&name_bytes); - - result.set(tag, feature.1); - } - Ok(result) -} - -fn font_into_properties(font: &gpui::Font) -> font_kit::properties::Properties { - font_kit::properties::Properties { - style: match font.style { - gpui::FontStyle::Normal => font_kit::properties::Style::Normal, - gpui::FontStyle::Italic => font_kit::properties::Style::Italic, - gpui::FontStyle::Oblique => font_kit::properties::Style::Oblique, - }, - weight: font_kit::properties::Weight(font.weight.0), - stretch: Default::default(), - } -} - -fn face_info_into_properties( - face_info: &cosmic_text::fontdb::FaceInfo, -) -> font_kit::properties::Properties { - font_kit::properties::Properties { - style: match face_info.style { - cosmic_text::Style::Normal => font_kit::properties::Style::Normal, - cosmic_text::Style::Italic => font_kit::properties::Style::Italic, - cosmic_text::Style::Oblique => font_kit::properties::Style::Oblique, - }, - // both libs use the same values for weight - weight: font_kit::properties::Weight(face_info.weight.0.into()), - stretch: match face_info.stretch { - cosmic_text::Stretch::Condensed => font_kit::properties::Stretch::CONDENSED, - cosmic_text::Stretch::Expanded => font_kit::properties::Stretch::EXPANDED, - cosmic_text::Stretch::ExtraCondensed => font_kit::properties::Stretch::EXTRA_CONDENSED, - cosmic_text::Stretch::ExtraExpanded => font_kit::properties::Stretch::EXTRA_EXPANDED, - cosmic_text::Stretch::Normal => font_kit::properties::Stretch::NORMAL, - cosmic_text::Stretch::SemiCondensed => font_kit::properties::Stretch::SEMI_CONDENSED, - cosmic_text::Stretch::SemiExpanded => font_kit::properties::Stretch::SEMI_EXPANDED, - cosmic_text::Stretch::UltraCondensed => font_kit::properties::Stretch::ULTRA_CONDENSED, - cosmic_text::Stretch::UltraExpanded => font_kit::properties::Stretch::ULTRA_EXPANDED, - }, - } -} - -fn check_is_known_emoji_font(postscript_name: &str) -> bool { - // TODO: Include other common emoji fonts - postscript_name == "NotoColorEmoji" -} +pub(crate) use gpui_wgpu::CosmicTextSystem; diff --git a/crates/gpui_linux/src/linux/wayland/client.rs b/crates/gpui_linux/src/linux/wayland/client.rs index a810a00af642c3a252a9a144b884837f82eac7e7..b49e269a72459d52c13c21b8d1a474ab310dbffd 100644 --- a/crates/gpui_linux/src/linux/wayland/client.rs +++ b/crates/gpui_linux/src/linux/wayland/client.rs @@ -95,7 +95,10 @@ use gpui::{ ScrollDelta, ScrollWheelEvent, SharedString, Size, TaskTiming, TouchPhase, WindowParams, point, profiler, px, size, }; -use gpui_wgpu::WgpuContext; +use gpui_wgpu::{CompositorGpuHint, WgpuContext}; +use wayland_protocols::wp::linux_dmabuf::zv1::client::{ + zwp_linux_dmabuf_feedback_v1, zwp_linux_dmabuf_v1, +}; /// Used to convert evdev scancode to xkb scancode const MIN_KEYCODE: u32 = 8; @@ -202,6 +205,7 @@ pub(crate) struct WaylandClientState { serial_tracker: SerialTracker, globals: Globals, pub gpu_context: Option, + pub compositor_gpu: Option, wl_seat: wl_seat::WlSeat, // TODO: Multi seat support wl_pointer: Option, wl_keyboard: Option, @@ -515,6 +519,7 @@ impl WaylandClient { }) .unwrap(); + let compositor_gpu = detect_compositor_gpu(); let gpu_context = None; let seat = seat.unwrap(); @@ -571,6 +576,7 @@ impl WaylandClient { serial_tracker: SerialTracker::new(), globals, gpu_context, + compositor_gpu, wl_seat: seat, wl_pointer: None, wl_keyboard: None, @@ -715,10 +721,12 @@ impl LinuxClient for WaylandClient { let parent = state.keyboard_focused_window.clone(); let appearance = state.common.appearance; + let compositor_gpu = state.compositor_gpu.take(); let (window, surface_id) = WaylandWindow::new( handle, state.globals.clone(), &mut state.gpu_context, + compositor_gpu, WaylandClientStatePtr(Rc::downgrade(&self.0)), params, appearance, @@ -904,6 +912,70 @@ impl LinuxClient for WaylandClient { } } +struct DmabufProbeState { + device: Option, +} + +impl Dispatch for DmabufProbeState { + fn event( + _: &mut Self, + _: &wl_registry::WlRegistry, + _: wl_registry::Event, + _: &GlobalListContents, + _: &Connection, + _: &QueueHandle, + ) { + } +} + +impl Dispatch for DmabufProbeState { + fn event( + _: &mut Self, + _: &zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1, + _: zwp_linux_dmabuf_v1::Event, + _: &(), + _: &Connection, + _: &QueueHandle, + ) { + } +} + +impl Dispatch for DmabufProbeState { + fn event( + state: &mut Self, + _: &zwp_linux_dmabuf_feedback_v1::ZwpLinuxDmabufFeedbackV1, + event: zwp_linux_dmabuf_feedback_v1::Event, + _: &(), + _: &Connection, + _: &QueueHandle, + ) { + if let zwp_linux_dmabuf_feedback_v1::Event::MainDevice { device } = event { + if let Ok(bytes) = <[u8; 8]>::try_from(device.as_slice()) { + state.device = Some(u64::from_ne_bytes(bytes)); + } + } + } +} + +fn detect_compositor_gpu() -> Option { + let connection = Connection::connect_to_env().ok()?; + let (globals, mut event_queue) = registry_queue_init::(&connection).ok()?; + let queue_handle = event_queue.handle(); + + let dmabuf: zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1 = + globals.bind(&queue_handle, 4..=4, ()).ok()?; + let feedback = dmabuf.get_default_feedback(&queue_handle, ()); + + let mut state = DmabufProbeState { device: None }; + + event_queue.roundtrip(&mut state).ok()?; + + feedback.destroy(); + dmabuf.destroy(); + + crate::linux::compositor_gpu_hint_from_dev_t(state.device?) +} + impl Dispatch for WaylandClientStatePtr { fn event( this: &mut Self, diff --git a/crates/gpui_linux/src/linux/wayland/window.rs b/crates/gpui_linux/src/linux/wayland/window.rs index c1006a816a3844db22ea8932177b0f0b2ff1c99f..4c0dbae530ee254f5232eaead187b93d10b0b8e3 100644 --- a/crates/gpui_linux/src/linux/wayland/window.rs +++ b/crates/gpui_linux/src/linux/wayland/window.rs @@ -34,7 +34,7 @@ use gpui::{ WindowDecorations, WindowKind, WindowParams, layer_shell::LayerShellNotSupportedError, px, size, }; -use gpui_wgpu::{WgpuContext, WgpuRenderer, WgpuSurfaceConfig}; +use gpui_wgpu::{CompositorGpuHint, WgpuContext, WgpuRenderer, WgpuSurfaceConfig}; #[derive(Default)] pub(crate) struct Callbacks { @@ -318,6 +318,7 @@ impl WaylandWindowState { client: WaylandClientStatePtr, globals: Globals, gpu_context: &mut Option, + compositor_gpu: Option, options: WindowParams, parent: Option, ) -> anyhow::Result { @@ -338,13 +339,19 @@ impl WaylandWindowState { }, transparent: true, }; - WgpuRenderer::new(gpu_context, &raw_window, config)? + WgpuRenderer::new(gpu_context, &raw_window, config, compositor_gpu)? }; if let WaylandSurfaceState::Xdg(ref xdg_state) = surface_state { if let Some(title) = options.titlebar.and_then(|titlebar| titlebar.title) { xdg_state.toplevel.set_title(title.to_string()); } + // Set max window size based on the GPU's maximum texture dimension. + // This prevents the window from being resized larger than what the GPU can render. + let max_texture_size = renderer.max_texture_size() as i32; + xdg_state + .toplevel + .set_max_size(max_texture_size, max_texture_size); } Ok(Self { @@ -482,6 +489,7 @@ impl WaylandWindow { handle: AnyWindowHandle, globals: Globals, gpu_context: &mut Option, + compositor_gpu: Option, client: WaylandClientStatePtr, params: WindowParams, appearance: WindowAppearance, @@ -509,6 +517,7 @@ impl WaylandWindow { client, globals, gpu_context, + compositor_gpu, params, parent, )?)), @@ -640,19 +649,19 @@ impl WaylandWindowStatePtr { match mode { WEnum::Value(zxdg_toplevel_decoration_v1::Mode::ServerSide) => { self.state.borrow_mut().decorations = WindowDecorations::Server; - if let Some(appearance_changed) = - self.callbacks.borrow_mut().appearance_changed.as_mut() - { - appearance_changed(); + let callback = self.callbacks.borrow_mut().appearance_changed.take(); + if let Some(mut fun) = callback { + fun(); + self.callbacks.borrow_mut().appearance_changed = Some(fun); } } WEnum::Value(zxdg_toplevel_decoration_v1::Mode::ClientSide) => { self.state.borrow_mut().decorations = WindowDecorations::Client; // Update background to be transparent - if let Some(appearance_changed) = - self.callbacks.borrow_mut().appearance_changed.as_mut() - { - appearance_changed(); + let callback = self.callbacks.borrow_mut().appearance_changed.take(); + if let Some(mut fun) = callback { + fun(); + self.callbacks.borrow_mut().appearance_changed = Some(fun); } } WEnum::Value(_) => { @@ -924,8 +933,10 @@ impl WaylandWindowStatePtr { (state.bounds.size, state.scale) }; - if let Some(ref mut fun) = self.callbacks.borrow_mut().resize { + let callback = self.callbacks.borrow_mut().resize.take(); + if let Some(mut fun) = callback { fun(size, scale); + self.callbacks.borrow_mut().resize = Some(fun); } { @@ -971,10 +982,13 @@ impl WaylandWindowStatePtr { if self.is_blocked() { return; } - if let Some(ref mut fun) = self.callbacks.borrow_mut().input - && !fun(input.clone()).propagate - { - return; + let callback = self.callbacks.borrow_mut().input.take(); + if let Some(mut fun) = callback { + let result = fun(input.clone()); + self.callbacks.borrow_mut().input = Some(fun); + if !result.propagate { + return; + } } if let PlatformInput::KeyDown(event) = input && event.keystroke.modifiers.is_subset_of(&Modifiers::shift()) @@ -991,23 +1005,28 @@ impl WaylandWindowStatePtr { pub fn set_focused(&self, focus: bool) { self.state.borrow_mut().active = focus; - if let Some(ref mut fun) = self.callbacks.borrow_mut().active_status_change { + let callback = self.callbacks.borrow_mut().active_status_change.take(); + if let Some(mut fun) = callback { fun(focus); + self.callbacks.borrow_mut().active_status_change = Some(fun); } } pub fn set_hovered(&self, focus: bool) { - if let Some(ref mut fun) = self.callbacks.borrow_mut().hover_status_change { + let callback = self.callbacks.borrow_mut().hover_status_change.take(); + if let Some(mut fun) = callback { fun(focus); + self.callbacks.borrow_mut().hover_status_change = Some(fun); } } pub fn set_appearance(&mut self, appearance: WindowAppearance) { self.state.borrow_mut().appearance = appearance; - let mut callbacks = self.callbacks.borrow_mut(); - if let Some(ref mut fun) = callbacks.appearance_changed { - (fun)() + let callback = self.callbacks.borrow_mut().appearance_changed.take(); + if let Some(mut fun) = callback { + fun(); + self.callbacks.borrow_mut().appearance_changed = Some(fun); } } diff --git a/crates/gpui_linux/src/linux/x11/client.rs b/crates/gpui_linux/src/linux/x11/client.rs index 7e3f67c9bf5fe3176f3badd9b33375ffdeb9dc19..3a970d9f72e1dc82215fc0d11297d222835df431 100644 --- a/crates/gpui_linux/src/linux/x11/client.rs +++ b/crates/gpui_linux/src/linux/x11/client.rs @@ -31,7 +31,7 @@ use x11rb::{ AtomEnum, ChangeWindowAttributesAux, ClientMessageData, ClientMessageEvent, ConnectionExt as _, EventMask, ModMask, Visibility, }, - protocol::{Event, randr, render, xinput, xkb, xproto}, + protocol::{Event, dri3, randr, render, xinput, xkb, xproto}, resource_manager::Database, wrapper::ConnectionExt as _, xcb_ffi::XCBConnection, @@ -64,7 +64,7 @@ use gpui::{ PlatformKeyboardLayout, PlatformWindow, Point, RequestFrameOptions, ScrollDelta, Size, TouchPhase, WindowParams, point, px, }; -use gpui_wgpu::WgpuContext; +use gpui_wgpu::{CompositorGpuHint, WgpuContext}; /// Value for DeviceId parameters which selects all devices. pub(crate) const XINPUT_ALL_DEVICES: xinput::DeviceId = 0; @@ -178,6 +178,7 @@ pub struct X11ClientState { pub(crate) current_count: usize, pub(crate) gpu_context: Option, + pub(crate) compositor_gpu: Option, pub(crate) scale_factor: f32, @@ -430,6 +431,9 @@ impl X11Client { let clipboard = Clipboard::new().context("Failed to initialize clipboard")?; + let screen = &xcb_connection.setup().roots[x_root_index]; + let compositor_gpu = detect_compositor_gpu(&xcb_connection, screen); + let xcb_connection = Rc::new(xcb_connection); let ximc = X11rbClient::init(Rc::clone(&xcb_connection), x_root_index, None).ok(); @@ -490,6 +494,7 @@ impl X11Client { last_location: Point::new(px(0.0), px(0.0)), current_count: 0, gpu_context: None, + compositor_gpu, scale_factor, xkb_context, @@ -1514,11 +1519,13 @@ impl LinuxClient for X11Client { let atoms = state.atoms; let scale_factor = state.scale_factor; let appearance = state.common.appearance; + let compositor_gpu = state.compositor_gpu.take(); let window = X11Window::new( handle, X11ClientStatePtr(Rc::downgrade(&self.0)), state.common.foreground_executor.clone(), &mut state.gpu_context, + compositor_gpu, params, &xcb_connection, client_side_decorations_supported, @@ -1976,7 +1983,30 @@ fn fp3232_to_f32(value: xinput::Fp3232) -> f32 { value.integral as f32 + value.frac as f32 / u32::MAX as f32 } -fn check_compositor_present(xcb_connection: &XCBConnection, root: u32) -> bool { +fn detect_compositor_gpu( + xcb_connection: &XCBConnection, + screen: &xproto::Screen, +) -> Option { + use std::os::fd::AsRawFd; + use std::os::unix::fs::MetadataExt; + + xcb_connection + .extension_information(dri3::X11_EXTENSION_NAME) + .ok()??; + + let reply = dri3::open(xcb_connection, screen.root, 0) + .ok()? + .reply() + .ok()?; + let fd = reply.device_fd; + + let path = format!("/proc/self/fd/{}", fd.as_raw_fd()); + let metadata = std::fs::metadata(&path).ok()?; + + crate::linux::compositor_gpu_hint_from_dev_t(metadata.rdev()) +} + +fn check_compositor_present(xcb_connection: &XCBConnection, root: xproto::Window) -> bool { // Method 1: Check for _NET_WM_CM_S{root} let atom_name = format!("_NET_WM_CM_S{}", root); let atom1 = get_reply( diff --git a/crates/gpui_linux/src/linux/x11/window.rs b/crates/gpui_linux/src/linux/x11/window.rs index 8060e4c4457c6ef4575d86c4d975e3ead901f693..a7cdc67ecd908becd22f799767f482754527fa51 100644 --- a/crates/gpui_linux/src/linux/x11/window.rs +++ b/crates/gpui_linux/src/linux/x11/window.rs @@ -9,7 +9,7 @@ use gpui::{ Tiling, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowDecorations, WindowKind, WindowParams, px, }; -use gpui_wgpu::{WgpuContext, WgpuRenderer, WgpuSurfaceConfig}; +use gpui_wgpu::{CompositorGpuHint, WgpuContext, WgpuRenderer, WgpuSurfaceConfig}; use collections::FxHashSet; use raw_window_handle as rwh; @@ -319,12 +319,28 @@ impl rwh::HasDisplayHandle for RawWindow { impl rwh::HasWindowHandle for X11Window { fn window_handle(&self) -> Result, rwh::HandleError> { - unimplemented!() + let Some(non_zero) = NonZeroU32::new(self.0.x_window) else { + return Err(rwh::HandleError::Unavailable); + }; + let handle = rwh::XcbWindowHandle::new(non_zero); + Ok(unsafe { rwh::WindowHandle::borrow_raw(handle.into()) }) } } + impl rwh::HasDisplayHandle for X11Window { fn display_handle(&self) -> Result, rwh::HandleError> { - unimplemented!() + let connection = + as_raw_xcb_connection::AsRawXcbConnection::as_raw_xcb_connection(&*self.0.xcb) + as *mut _; + let Some(non_zero) = NonNull::new(connection) else { + return Err(rwh::HandleError::Unavailable); + }; + let screen_id = { + let state = self.0.state.borrow(); + u32::from(state.display.id()) as i32 + }; + let handle = rwh::XcbDisplayHandle::new(Some(non_zero), screen_id); + Ok(unsafe { rwh::DisplayHandle::borrow_raw(handle.into()) }) } } @@ -392,6 +408,7 @@ impl X11WindowState { client: X11ClientStatePtr, executor: ForegroundExecutor, gpu_context: &mut Option, + compositor_gpu: Option, params: WindowParams, xcb: &Rc, client_side_decorations_supported: bool, @@ -497,21 +514,6 @@ impl X11WindowState { ), )?; - if let Some(size) = params.window_min_size { - let mut size_hints = WmSizeHints::new(); - let min_size = (f32::from(size.width) as i32, f32::from(size.height) as i32); - size_hints.min_size = Some(min_size); - check_reply( - || { - format!( - "X11 change of WM_SIZE_HINTS failed. min_size: {:?}", - min_size - ) - }, - size_hints.set_normal_hints(xcb, x_window), - )?; - } - let reply = get_reply(|| "X11 GetGeometry failed.", xcb.get_geometry(x_window))?; if reply.x == 0 && reply.y == 0 { bounds.origin.x.0 += 2; @@ -694,9 +696,28 @@ impl X11WindowState { // too transparent: false, }; - WgpuRenderer::new(gpu_context, &raw_window, config)? + WgpuRenderer::new(gpu_context, &raw_window, config, compositor_gpu)? }; + // Set max window size hints based on the GPU's maximum texture dimension. + // This prevents the window from being resized larger than what the GPU can render. + let max_texture_size = renderer.max_texture_size(); + let mut size_hints = WmSizeHints::new(); + if let Some(size) = params.window_min_size { + size_hints.min_size = + Some((f32::from(size.width) as i32, f32::from(size.height) as i32)); + } + size_hints.max_size = Some((max_texture_size as i32, max_texture_size as i32)); + check_reply( + || { + format!( + "X11 change of WM_SIZE_HINTS failed. max_size: {:?}", + max_texture_size + ) + }, + size_hints.set_normal_hints(xcb, x_window), + )?; + let display = Rc::new(X11Display::new(xcb, scale_factor, x_screen_index)?); Ok(Self { @@ -799,6 +820,7 @@ impl X11Window { client: X11ClientStatePtr, executor: ForegroundExecutor, gpu_context: &mut Option, + compositor_gpu: Option, params: WindowParams, xcb: &Rc, client_side_decorations_supported: bool, @@ -815,6 +837,7 @@ impl X11Window { client, executor, gpu_context, + compositor_gpu, params, xcb, client_side_decorations_supported, @@ -1045,9 +1068,10 @@ impl X11WindowStatePtr { } pub fn refresh(&self, request_frame_options: RequestFrameOptions) { - let mut cb = self.callbacks.borrow_mut(); - if let Some(ref mut fun) = cb.request_frame { + let callback = self.callbacks.borrow_mut().request_frame.take(); + if let Some(mut fun) = callback { fun(request_frame_options); + self.callbacks.borrow_mut().request_frame = Some(fun); } } @@ -1055,10 +1079,13 @@ impl X11WindowStatePtr { if self.is_blocked() { return; } - if let Some(ref mut fun) = self.callbacks.borrow_mut().input - && !fun(input.clone()).propagate - { - return; + let callback = self.callbacks.borrow_mut().input.take(); + if let Some(mut fun) = callback { + let result = fun(input.clone()); + self.callbacks.borrow_mut().input = Some(fun); + if !result.propagate { + return; + } } if let PlatformInput::KeyDown(event) = input { // only allow shift modifier when inserting text @@ -1191,14 +1218,18 @@ impl X11WindowStatePtr { } pub fn set_active(&self, focus: bool) { - if let Some(ref mut fun) = self.callbacks.borrow_mut().active_status_change { + let callback = self.callbacks.borrow_mut().active_status_change.take(); + if let Some(mut fun) = callback { fun(focus); + self.callbacks.borrow_mut().active_status_change = Some(fun); } } pub fn set_hovered(&self, focus: bool) { - if let Some(ref mut fun) = self.callbacks.borrow_mut().hovered_status_change { + let callback = self.callbacks.borrow_mut().hovered_status_change.take(); + if let Some(mut fun) = callback { fun(focus); + self.callbacks.borrow_mut().hovered_status_change = Some(fun); } } @@ -1209,9 +1240,10 @@ impl X11WindowStatePtr { state.renderer.update_transparency(is_transparent); state.appearance = appearance; drop(state); - let mut callbacks = self.callbacks.borrow_mut(); - if let Some(ref mut fun) = callbacks.appearance_changed { - (fun)() + let callback = self.callbacks.borrow_mut().appearance_changed.take(); + if let Some(mut fun) = callback { + fun(); + self.callbacks.borrow_mut().appearance_changed = Some(fun); } } } diff --git a/crates/gpui_macos/Cargo.toml b/crates/gpui_macos/Cargo.toml index 4aedb1f4f1bed02e22f0dc6a881d60cc39ddd3a1..06e5d0e7321af523a249f19ec0d5ac50e2da5d3f 100644 --- a/crates/gpui_macos/Cargo.toml +++ b/crates/gpui_macos/Cargo.toml @@ -34,6 +34,7 @@ core-text = "21" core-video.workspace = true ctor.workspace = true derive_more.workspace = true +dispatch2 = "0.3.1" etagere = "0.2" # WARNING: If you change this, you must also publish a new version of zed-font-kit to crates.io font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "110523127440aefb11ce0cf280ae7c5071337ec5", package = "zed-font-kit", version = "0.14.1-zed", optional = true } @@ -57,6 +58,5 @@ util.workspace = true uuid.workspace = true [target.'cfg(target_os = "macos")'.build-dependencies] -bindgen = "0.71" cbindgen = { version = "0.28.0", default-features = false } -gpui.workspace = true \ No newline at end of file +gpui.workspace = true diff --git a/crates/gpui_macos/build.rs b/crates/gpui_macos/build.rs index 32dfc571d257495c9c0a8cae54bc9fb567b51489..d5c1893f4ce18190a546aed1a708685cf66dc0e9 100644 --- a/crates/gpui_macos/build.rs +++ b/crates/gpui_macos/build.rs @@ -15,8 +15,6 @@ mod macos_build { use cbindgen::Config; pub fn run() { - generate_dispatch_bindings(); - let header_path = generate_shader_bindings(); #[cfg(feature = "runtime_shaders")] @@ -25,39 +23,6 @@ mod macos_build { compile_metal_shaders(&header_path); } - fn generate_dispatch_bindings() { - println!("cargo:rustc-link-lib=framework=System"); - - let bindings = bindgen::Builder::default() - .header("src/dispatch.h") - .allowlist_var("_dispatch_main_q") - .allowlist_var("_dispatch_source_type_data_add") - .allowlist_var("DISPATCH_QUEUE_PRIORITY_HIGH") - .allowlist_var("DISPATCH_QUEUE_PRIORITY_DEFAULT") - .allowlist_var("DISPATCH_QUEUE_PRIORITY_LOW") - .allowlist_var("DISPATCH_TIME_NOW") - .allowlist_function("dispatch_get_global_queue") - .allowlist_function("dispatch_async_f") - .allowlist_function("dispatch_after_f") - .allowlist_function("dispatch_time") - .allowlist_function("dispatch_source_merge_data") - .allowlist_function("dispatch_source_create") - .allowlist_function("dispatch_source_set_event_handler_f") - .allowlist_function("dispatch_resume") - .allowlist_function("dispatch_suspend") - .allowlist_function("dispatch_source_cancel") - .allowlist_function("dispatch_set_context") - .parse_callbacks(Box::new(bindgen::CargoCallbacks::new())) - .layout_tests(false) - .generate() - .expect("unable to generate bindings"); - - let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); - bindings - .write_to_file(out_path.join("dispatch_sys.rs")) - .expect("couldn't write dispatch bindings"); - } - fn generate_shader_bindings() -> PathBuf { let output_path = PathBuf::from(env::var("OUT_DIR").unwrap()).join("scene.h"); diff --git a/crates/gpui_macos/src/dispatch.h b/crates/gpui_macos/src/dispatch.h deleted file mode 100644 index 54f3818738042b00938ad566ec0269fc0d80241d..0000000000000000000000000000000000000000 --- a/crates/gpui_macos/src/dispatch.h +++ /dev/null @@ -1,2 +0,0 @@ -#include -#include diff --git a/crates/gpui_macos/src/dispatcher.rs b/crates/gpui_macos/src/dispatcher.rs index 755016e44be84f585631fbf311ef499adfc69367..07638639e4bf5d3f002c1babfc213bc330e63dce 100644 --- a/crates/gpui_macos/src/dispatcher.rs +++ b/crates/gpui_macos/src/dispatcher.rs @@ -1,7 +1,4 @@ -#![allow(non_upper_case_globals)] -#![allow(non_camel_case_types)] -#![allow(non_snake_case)] - +use dispatch2::{DispatchQueue, DispatchQueueGlobalPriority, DispatchTime, GlobalQueueIdentifier}; use gpui::{ GLOBAL_THREAD_TIMINGS, PlatformDispatcher, Priority, RunnableMeta, RunnableVariant, THREAD_TIMINGS, TaskTiming, ThreadTaskTimings, @@ -26,21 +23,10 @@ use objc::{ }; use std::{ ffi::c_void, - ptr::{NonNull, addr_of}, + ptr::NonNull, time::{Duration, Instant}, }; -/// All items in the generated file are marked as pub, so we're gonna wrap it in a separate mod to prevent -/// these pub items from leaking into public API. -pub(crate) mod dispatch_sys { - include!(concat!(env!("OUT_DIR"), "/dispatch_sys.rs")); -} - -use dispatch_sys::*; -pub(crate) fn dispatch_get_main_queue() -> dispatch_queue_t { - addr_of!(_dispatch_main_q) as *const _ as dispatch_queue_t -} - pub(crate) struct MacDispatcher; impl MacDispatcher { @@ -89,43 +75,32 @@ impl PlatformDispatcher for MacDispatcher { Priority::RealtimeAudio => { panic!("RealtimeAudio priority should use spawn_realtime, not dispatch") } - Priority::High => DISPATCH_QUEUE_PRIORITY_HIGH as isize, - Priority::Medium => DISPATCH_QUEUE_PRIORITY_DEFAULT as isize, - Priority::Low => DISPATCH_QUEUE_PRIORITY_LOW as isize, + Priority::High => DispatchQueueGlobalPriority::High, + Priority::Medium => DispatchQueueGlobalPriority::Default, + Priority::Low => DispatchQueueGlobalPriority::Low, }; unsafe { - dispatch_async_f( - dispatch_get_global_queue(queue_priority, 0), - context, - Some(trampoline as unsafe extern "C" fn(*mut c_void)), - ); + DispatchQueue::global_queue(GlobalQueueIdentifier::Priority(queue_priority)) + .exec_async_f(context, trampoline); } } fn dispatch_on_main_thread(&self, runnable: RunnableVariant, _priority: Priority) { let context = runnable.into_raw().as_ptr() as *mut c_void; unsafe { - dispatch_async_f( - dispatch_get_main_queue(), - context, - Some(trampoline as unsafe extern "C" fn(*mut c_void)), - ); + DispatchQueue::main().exec_async_f(context, trampoline); } } fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) { let context = runnable.into_raw().as_ptr() as *mut c_void; + let queue = DispatchQueue::global_queue(GlobalQueueIdentifier::Priority( + DispatchQueueGlobalPriority::High, + )); + let when = DispatchTime::NOW.time(duration.as_nanos() as i64); unsafe { - let queue = - dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH.try_into().unwrap(), 0); - let when = dispatch_time(DISPATCH_TIME_NOW as u64, duration.as_nanos() as i64); - dispatch_after_f( - when, - queue, - context, - Some(trampoline as unsafe extern "C" fn(*mut c_void)), - ); + DispatchQueue::exec_after_f(when, &queue, context, trampoline); } } diff --git a/crates/gpui_macos/src/display_link.rs b/crates/gpui_macos/src/display_link.rs index b086cc1b12182db661e5fa1cb82b671c7fd5b8bc..86e9b4072bab3cfb7cf5d0d69bc6ca29ad15cbb1 100644 --- a/crates/gpui_macos/src/display_link.rs +++ b/crates/gpui_macos/src/display_link.rs @@ -1,26 +1,21 @@ -use crate::{ - dispatch_get_main_queue, - dispatcher::dispatch_sys::{ - _dispatch_source_type_data_add, dispatch_resume, dispatch_set_context, - dispatch_source_cancel, dispatch_source_create, dispatch_source_merge_data, - dispatch_source_set_event_handler_f, dispatch_source_t, dispatch_suspend, - }, -}; use anyhow::Result; use core_graphics::display::CGDirectDisplayID; +use dispatch2::{ + _dispatch_source_type_data_add, DispatchObject, DispatchQueue, DispatchRetained, DispatchSource, +}; use std::ffi::c_void; use util::ResultExt; pub struct DisplayLink { display_link: Option, - frame_requests: dispatch_source_t, + frame_requests: DispatchRetained, } impl DisplayLink { pub fn new( display_id: CGDirectDisplayID, data: *mut c_void, - callback: unsafe extern "C" fn(*mut c_void), + callback: extern "C" fn(*mut c_void), ) -> Result { unsafe extern "C" fn display_link_callback( _display_link_out: *mut sys::CVDisplayLink, @@ -31,31 +26,27 @@ impl DisplayLink { frame_requests: *mut c_void, ) -> i32 { unsafe { - let frame_requests = frame_requests as dispatch_source_t; - dispatch_source_merge_data(frame_requests, 1); + let frame_requests = &*(frame_requests as *const DispatchSource); + frame_requests.merge_data(1); 0 } } unsafe { - let frame_requests = dispatch_source_create( - &_dispatch_source_type_data_add, + let frame_requests = DispatchSource::new( + &raw const _dispatch_source_type_data_add as *mut _, 0, 0, - dispatch_get_main_queue(), - ); - dispatch_set_context( - crate::dispatch_sys::dispatch_object_t { - _ds: frame_requests, - }, - data, + Some(DispatchQueue::main()), ); - dispatch_source_set_event_handler_f(frame_requests, Some(callback)); + frame_requests.set_context(data); + frame_requests.set_event_handler_f(callback); + frame_requests.resume(); let display_link = sys::DisplayLink::new( display_id, display_link_callback, - frame_requests as *mut c_void, + &*frame_requests as *const DispatchSource as *mut c_void, )?; Ok(Self { @@ -67,9 +58,6 @@ impl DisplayLink { pub fn start(&mut self) -> Result<()> { unsafe { - dispatch_resume(crate::dispatch_sys::dispatch_object_t { - _ds: self.frame_requests, - }); self.display_link.as_mut().unwrap().start()?; } Ok(()) @@ -77,9 +65,6 @@ impl DisplayLink { pub fn stop(&mut self) -> Result<()> { unsafe { - dispatch_suspend(crate::dispatch_sys::dispatch_object_t { - _ds: self.frame_requests, - }); self.display_link.as_mut().unwrap().stop()?; } Ok(()) @@ -97,9 +82,7 @@ impl Drop for DisplayLink { // // We might also want to upgrade to CADisplayLink, but that requires dropping old macOS support. std::mem::forget(self.display_link.take()); - unsafe { - dispatch_source_cancel(self.frame_requests); - } + self.frame_requests.cancel(); } } diff --git a/crates/gpui_macos/src/platform.rs b/crates/gpui_macos/src/platform.rs index c982f6da191f6b657e51238d8b6ac3d11f724149..d9c22cbea0354caff9bd5dd80d7ea98fa7e891de 100644 --- a/crates/gpui_macos/src/platform.rs +++ b/crates/gpui_macos/src/platform.rs @@ -24,6 +24,7 @@ use core_foundation::{ string::{CFString, CFStringRef}, }; use ctor::ctor; +use dispatch2::DispatchQueue; use futures::channel::oneshot; use gpui::{ Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, ForegroundExecutor, @@ -493,13 +494,11 @@ impl Platform for MacPlatform { // this, we make quitting the application asynchronous so that we aren't holding borrows to // the app state on the stack when we actually terminate the app. - use crate::dispatcher::{dispatch_get_main_queue, dispatch_sys::dispatch_async_f}; - unsafe { - dispatch_async_f(dispatch_get_main_queue(), ptr::null_mut(), Some(quit)); + DispatchQueue::main().exec_async_f(ptr::null_mut(), quit); } - unsafe extern "C" fn quit(_: *mut c_void) { + extern "C" fn quit(_: *mut c_void) { unsafe { let app = NSApplication::sharedApplication(nil); let _: () = msg_send![app, terminate: nil]; @@ -1261,19 +1260,13 @@ extern "C" fn on_thermal_state_change(this: &mut Object, _: Sel, _: id) { // Defer to the next run loop iteration to avoid re-entrant borrows of the App RefCell, // as NSNotificationCenter delivers this notification synchronously and it may fire while // the App is already borrowed (same pattern as quit() above). - use crate::dispatcher::{dispatch_get_main_queue, dispatch_sys::dispatch_async_f}; - let platform = unsafe { get_mac_platform(this) }; let platform_ptr = platform as *const MacPlatform as *mut c_void; unsafe { - dispatch_async_f( - dispatch_get_main_queue(), - platform_ptr, - Some(on_thermal_state_change), - ); + DispatchQueue::main().exec_async_f(platform_ptr, on_thermal_state_change); } - unsafe extern "C" fn on_thermal_state_change(context: *mut c_void) { + extern "C" fn on_thermal_state_change(context: *mut c_void) { let platform = unsafe { &*(context as *const MacPlatform) }; let mut lock = platform.0.lock(); if let Some(mut callback) = lock.on_thermal_state_change.take() { diff --git a/crates/gpui_macos/src/window.rs b/crates/gpui_macos/src/window.rs index 0d4edc28e8d6c79742760ab5ae0be12d944244db..456ee31ac3b03780e68267621d66435b1ceab4a9 100644 --- a/crates/gpui_macos/src/window.rs +++ b/crates/gpui_macos/src/window.rs @@ -1,7 +1,6 @@ use crate::{ - BoolExt, DisplayLink, MacDisplay, NSRange, NSStringExt, dispatch_get_main_queue, - dispatcher::dispatch_sys::dispatch_async_f, events::platform_input_from_native, ns_string, - renderer, + BoolExt, DisplayLink, MacDisplay, NSRange, NSStringExt, events::platform_input_from_native, + ns_string, renderer, }; #[cfg(any(test, feature = "test-support"))] use anyhow::Result; @@ -22,6 +21,7 @@ use cocoa::{ NSUserDefaults, }, }; +use dispatch2::DispatchQueue; use gpui::{ AnyWindowHandle, BackgroundExecutor, Bounds, Capslock, ExternalPaths, FileDropEvent, ForegroundExecutor, KeyDownEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, @@ -1050,34 +1050,32 @@ impl PlatformWindow for MacWindow { fn merge_all_windows(&self) { let native_window = self.0.lock().native_window; - unsafe extern "C" fn merge_windows_async(context: *mut std::ffi::c_void) { - let native_window = context as id; - let _: () = msg_send![native_window, mergeAllWindows:nil]; + extern "C" fn merge_windows_async(context: *mut std::ffi::c_void) { + unsafe { + let native_window = context as id; + let _: () = msg_send![native_window, mergeAllWindows:nil]; + } } unsafe { - dispatch_async_f( - dispatch_get_main_queue(), - native_window as *mut std::ffi::c_void, - Some(merge_windows_async), - ); + DispatchQueue::main() + .exec_async_f(native_window as *mut std::ffi::c_void, merge_windows_async); } } fn move_tab_to_new_window(&self) { let native_window = self.0.lock().native_window; - unsafe extern "C" fn move_tab_async(context: *mut std::ffi::c_void) { - let native_window = context as id; - let _: () = msg_send![native_window, moveTabToNewWindow:nil]; - let _: () = msg_send![native_window, makeKeyAndOrderFront: nil]; + extern "C" fn move_tab_async(context: *mut std::ffi::c_void) { + unsafe { + let native_window = context as id; + let _: () = msg_send![native_window, moveTabToNewWindow:nil]; + let _: () = msg_send![native_window, makeKeyAndOrderFront: nil]; + } } unsafe { - dispatch_async_f( - dispatch_get_main_queue(), - native_window as *mut std::ffi::c_void, - Some(move_tab_async), - ); + DispatchQueue::main() + .exec_async_f(native_window as *mut std::ffi::c_void, move_tab_async); } } @@ -2252,7 +2250,7 @@ extern "C" fn display_layer(this: &Object, _: Sel, _: id) { } } -unsafe extern "C" fn step(view: *mut c_void) { +extern "C" fn step(view: *mut c_void) { let view = view as id; let window_state = unsafe { get_window_state(&*view) }; let mut lock = window_state.lock(); @@ -2551,19 +2549,20 @@ fn send_file_drop_event( window_state: Arc>, file_drop_event: FileDropEvent, ) -> bool { - let mut window_state = window_state.lock(); - let window_event_callback = window_state.event_callback.as_mut(); - if let Some(callback) = window_event_callback { - let external_files_dragged = match file_drop_event { - FileDropEvent::Entered { .. } => Some(true), - FileDropEvent::Exited => Some(false), - _ => None, - }; + let external_files_dragged = match file_drop_event { + FileDropEvent::Entered { .. } => Some(true), + FileDropEvent::Exited => Some(false), + _ => None, + }; + let mut lock = window_state.lock(); + if let Some(mut callback) = lock.event_callback.take() { + drop(lock); callback(PlatformInput::FileDrop(file_drop_event)); - + let mut lock = window_state.lock(); + lock.event_callback = Some(callback); if let Some(external_files_dragged) = external_files_dragged { - window_state.external_files_dragged = external_files_dragged; + lock.external_files_dragged = external_files_dragged; } true } else { diff --git a/crates/gpui_macros/src/test.rs b/crates/gpui_macros/src/test.rs index 490ea07fee696908fad91410aa67ff124cdabe64..087e01740d2ba48392afee0ed7e31cf0779b180d 100644 --- a/crates/gpui_macros/src/test.rs +++ b/crates/gpui_macros/src/test.rs @@ -165,12 +165,13 @@ fn generate_test_function( dispatcher.clone(), Some(stringify!(#outer_fn_name)), ); + let _entity_refcounts = #cx_varname.app.borrow().ref_counts_drop_handle(); )); cx_teardowns.extend(quote!( - dispatcher.run_until_parked(); - #cx_varname.executor().forbid_parking(); - #cx_varname.quit(); - dispatcher.run_until_parked(); + #cx_varname.run_until_parked(); + #cx_varname.update(|cx| { cx.background_executor().forbid_parking(); cx.quit(); }); + #cx_varname.run_until_parked(); + drop(#cx_varname); )); inner_fn_args.extend(quote!(&mut #cx_varname,)); continue; @@ -191,10 +192,17 @@ fn generate_test_function( &[#seeds], #max_retries, &mut |dispatcher, _seed| { - let foreground_executor = gpui::ForegroundExecutor::new(std::sync::Arc::new(dispatcher.clone())); + let exec = std::sync::Arc::new(dispatcher.clone()); #cx_vars - foreground_executor.block_test(#inner_fn_name(#inner_fn_args)); + gpui::ForegroundExecutor::new(exec.clone()).block_test(#inner_fn_name(#inner_fn_args)); + drop(exec); #cx_teardowns + // Ideally we would only drop cancelled tasks, that way we could detect leaks due to task <-> entity + // cycles as cancelled tasks will be dropped properly once the runnable gets run again + // + // async-task does not give us the power to do this just yet though + dispatcher.drain_tasks(); + drop(dispatcher); }, #on_failure_fn_name ); @@ -229,13 +237,15 @@ fn generate_test_function( Some(stringify!(#outer_fn_name)) ); let mut #cx_varname_lock = #cx_varname.app.borrow_mut(); + let _entity_refcounts = #cx_varname_lock.ref_counts_drop_handle(); )); inner_fn_args.extend(quote!(&mut #cx_varname_lock,)); cx_teardowns.extend(quote!( drop(#cx_varname_lock); - dispatcher.run_until_parked(); + #cx_varname.run_until_parked(); #cx_varname.update(|cx| { cx.background_executor().forbid_parking(); cx.quit(); }); - dispatcher.run_until_parked(); + #cx_varname.run_until_parked(); + drop(#cx_varname); )); continue; } @@ -246,12 +256,13 @@ fn generate_test_function( dispatcher.clone(), Some(stringify!(#outer_fn_name)) ); + let _entity_refcounts = #cx_varname.app.borrow().ref_counts_drop_handle(); )); cx_teardowns.extend(quote!( - dispatcher.run_until_parked(); - #cx_varname.executor().forbid_parking(); - #cx_varname.quit(); - dispatcher.run_until_parked(); + #cx_varname.run_until_parked(); + #cx_varname.update(|cx| { cx.background_executor().forbid_parking(); cx.quit(); }); + #cx_varname.run_until_parked(); + drop(#cx_varname); )); inner_fn_args.extend(quote!(&mut #cx_varname,)); continue; @@ -277,6 +288,12 @@ fn generate_test_function( #cx_vars #inner_fn_name(#inner_fn_args); #cx_teardowns + // Ideally we would only drop cancelled tasks, that way we could detect leaks due to task <-> entity + // cycles as cancelled tasks will be dropped properly once they runnable gets run again + // + // async-task does not give us the power to do this just yet though + dispatcher.drain_tasks(); + drop(dispatcher); }, #on_failure_fn_name, ); diff --git a/crates/gpui_platform/Cargo.toml b/crates/gpui_platform/Cargo.toml index cc1a41bc47d23cc42a48e7a71d0666eb86ff5da0..cfb47b1851b9e792c31fad9aca79b3671095b603 100644 --- a/crates/gpui_platform/Cargo.toml +++ b/crates/gpui_platform/Cargo.toml @@ -31,3 +31,7 @@ gpui_windows.workspace = true [target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies] gpui_linux.workspace = true + +[target.'cfg(target_family = "wasm")'.dependencies] +gpui_web.workspace = true +console_error_panic_hook = "0.1.7" diff --git a/crates/gpui_platform/src/gpui_platform.rs b/crates/gpui_platform/src/gpui_platform.rs index c8f45efd6d84f38159e4a659514c1ad7a8a8f364..7dac5498a652f7a7fe68b9f6d7ea23dffabdfb22 100644 --- a/crates/gpui_platform/src/gpui_platform.rs +++ b/crates/gpui_platform/src/gpui_platform.rs @@ -18,6 +18,20 @@ pub fn headless() -> gpui::Application { gpui::Application::with_platform(current_platform(true)) } +/// Unlike `application`, this function returns a single-threaded web application. +#[cfg(target_family = "wasm")] +pub fn single_threaded_web() -> gpui::Application { + gpui::Application::with_platform(Rc::new(gpui_web::WebPlatform::new(false))) +} + +/// Initializes panic hooks and logging for the web platform. +/// Call this before running the application in a wasm_bindgen entrypoint. +#[cfg(target_family = "wasm")] +pub fn web_init() { + console_error_panic_hook::set_once(); + gpui_web::init_logging(); +} + /// Returns the default [`Platform`] for the current OS. pub fn current_platform(headless: bool) -> Rc { #[cfg(target_os = "macos")] @@ -33,10 +47,16 @@ pub fn current_platform(headless: bool) -> Rc { ) } - #[cfg(not(any(target_os = "macos", target_os = "windows")))] + #[cfg(any(target_os = "linux", target_os = "freebsd"))] { gpui_linux::current_platform(headless) } + + #[cfg(target_family = "wasm")] + { + let _ = headless; + Rc::new(gpui_web::WebPlatform::new(true)) + } } #[cfg(all(test, target_os = "macos"))] diff --git a/crates/gpui_util/Cargo.toml b/crates/gpui_util/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..5f2267c7f34e0d23b0a83e886ed991137932fca6 --- /dev/null +++ b/crates/gpui_util/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "gpui_util" +version = "0.1.0" +publish.workspace = true +edition.workspace = true + +[dependencies] +log.workspace = true +anyhow.workspace = true + +[lints] +workspace = true diff --git a/crates/gpui_util/LICENSE-APACHE b/crates/gpui_util/LICENSE-APACHE new file mode 120000 index 0000000000000000000000000000000000000000..1cd601d0a3affae83854be02a0afdec3b7a9ec4d --- /dev/null +++ b/crates/gpui_util/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/util/src/arc_cow.rs b/crates/gpui_util/src/arc_cow.rs similarity index 100% rename from crates/util/src/arc_cow.rs rename to crates/gpui_util/src/arc_cow.rs diff --git a/crates/gpui_util/src/lib.rs b/crates/gpui_util/src/lib.rs new file mode 100644 index 0000000000000000000000000000000000000000..ae56c2ccd0ef8ff5ab339e1ddd017884abb168d4 --- /dev/null +++ b/crates/gpui_util/src/lib.rs @@ -0,0 +1,292 @@ +// FluentBuilder +// pub use gpui_util::{FutureExt, Timeout, arc_cow::ArcCow}; + +use std::{ + env, + ops::AddAssign, + panic::Location, + pin::Pin, + sync::OnceLock, + task::{Context, Poll}, + time::Instant, +}; + +pub mod arc_cow; + +pub fn post_inc + AddAssign + Copy>(value: &mut T) -> T { + let prev = *value; + *value += T::from(1); + prev +} + +pub fn measure(label: &str, f: impl FnOnce() -> R) -> R { + static ZED_MEASUREMENTS: OnceLock = OnceLock::new(); + let zed_measurements = ZED_MEASUREMENTS.get_or_init(|| { + env::var("ZED_MEASUREMENTS") + .map(|measurements| measurements == "1" || measurements == "true") + .unwrap_or(false) + }); + + if *zed_measurements { + let start = Instant::now(); + let result = f(); + let elapsed = start.elapsed(); + eprintln!("{}: {:?}", label, elapsed); + result + } else { + f() + } +} + +#[macro_export] +macro_rules! debug_panic { + ( $($fmt_arg:tt)* ) => { + if cfg!(debug_assertions) { + panic!( $($fmt_arg)* ); + } else { + let backtrace = std::backtrace::Backtrace::capture(); + log::error!("{}\n{:?}", format_args!($($fmt_arg)*), backtrace); + } + }; +} + +#[track_caller] +pub fn some_or_debug_panic(option: Option) -> Option { + #[cfg(debug_assertions)] + if option.is_none() { + panic!("Unexpected None"); + } + option +} + +/// Expands to an immediately-invoked function expression. Good for using the ? operator +/// in functions which do not return an Option or Result. +/// +/// Accepts a normal block, an async block, or an async move block. +#[macro_export] +macro_rules! maybe { + ($block:block) => { + (|| $block)() + }; + (async $block:block) => { + (async || $block)() + }; + (async move $block:block) => { + (async move || $block)() + }; +} +pub trait ResultExt { + type Ok; + + fn log_err(self) -> Option; + /// Assert that this result should never be an error in development or tests. + fn debug_assert_ok(self, reason: &str) -> Self; + fn warn_on_err(self) -> Option; + fn log_with_level(self, level: log::Level) -> Option; + fn anyhow(self) -> anyhow::Result + where + E: Into; +} + +impl ResultExt for Result +where + E: std::fmt::Debug, +{ + type Ok = T; + + #[track_caller] + fn log_err(self) -> Option { + self.log_with_level(log::Level::Error) + } + + #[track_caller] + fn debug_assert_ok(self, reason: &str) -> Self { + if let Err(error) = &self { + debug_panic!("{reason} - {error:?}"); + } + self + } + + #[track_caller] + fn warn_on_err(self) -> Option { + self.log_with_level(log::Level::Warn) + } + + #[track_caller] + fn log_with_level(self, level: log::Level) -> Option { + match self { + Ok(value) => Some(value), + Err(error) => { + log_error_with_caller(*Location::caller(), error, level); + None + } + } + } + + fn anyhow(self) -> anyhow::Result + where + E: Into, + { + self.map_err(Into::into) + } +} + +fn log_error_with_caller(caller: core::panic::Location<'_>, error: E, level: log::Level) +where + E: std::fmt::Debug, +{ + #[cfg(not(windows))] + let file = caller.file(); + #[cfg(windows)] + let file = caller.file().replace('\\', "/"); + // In this codebase all crates reside in a `crates` directory, + // so discard the prefix up to that segment to find the crate name + let file = file.split_once("crates/"); + let target = file.as_ref().and_then(|(_, s)| s.split_once("/src/")); + + let module_path = target.map(|(krate, module)| { + if module.starts_with(krate) { + module.trim_end_matches(".rs").replace('/', "::") + } else { + krate.to_owned() + "::" + &module.trim_end_matches(".rs").replace('/', "::") + } + }); + let file = file.map(|(_, file)| format!("crates/{file}")); + log::logger().log( + &log::Record::builder() + .target(module_path.as_deref().unwrap_or("")) + .module_path(file.as_deref()) + .args(format_args!("{:?}", error)) + .file(Some(caller.file())) + .line(Some(caller.line())) + .level(level) + .build(), + ); +} + +pub fn log_err(error: &E) { + log_error_with_caller(*Location::caller(), error, log::Level::Error); +} + +pub trait TryFutureExt { + fn log_err(self) -> LogErrorFuture + where + Self: Sized; + + fn log_tracked_err(self, location: core::panic::Location<'static>) -> LogErrorFuture + where + Self: Sized; + + fn warn_on_err(self) -> LogErrorFuture + where + Self: Sized; + fn unwrap(self) -> UnwrapFuture + where + Self: Sized; +} + +impl TryFutureExt for F +where + F: Future>, + E: std::fmt::Debug, +{ + #[track_caller] + fn log_err(self) -> LogErrorFuture + where + Self: Sized, + { + let location = Location::caller(); + LogErrorFuture(self, log::Level::Error, *location) + } + + fn log_tracked_err(self, location: core::panic::Location<'static>) -> LogErrorFuture + where + Self: Sized, + { + LogErrorFuture(self, log::Level::Error, location) + } + + #[track_caller] + fn warn_on_err(self) -> LogErrorFuture + where + Self: Sized, + { + let location = Location::caller(); + LogErrorFuture(self, log::Level::Warn, *location) + } + + fn unwrap(self) -> UnwrapFuture + where + Self: Sized, + { + UnwrapFuture(self) + } +} + +#[must_use] +pub struct LogErrorFuture(F, log::Level, core::panic::Location<'static>); + +impl Future for LogErrorFuture +where + F: Future>, + E: std::fmt::Debug, +{ + type Output = Option; + + fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll { + let level = self.1; + let location = self.2; + let inner = unsafe { Pin::new_unchecked(&mut self.get_unchecked_mut().0) }; + match inner.poll(cx) { + Poll::Ready(output) => Poll::Ready(match output { + Ok(output) => Some(output), + Err(error) => { + log_error_with_caller(location, error, level); + None + } + }), + Poll::Pending => Poll::Pending, + } + } +} + +pub struct UnwrapFuture(F); + +impl Future for UnwrapFuture +where + F: Future>, + E: std::fmt::Debug, +{ + type Output = T; + + fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll { + let inner = unsafe { Pin::new_unchecked(&mut self.get_unchecked_mut().0) }; + match inner.poll(cx) { + Poll::Ready(result) => Poll::Ready(result.unwrap()), + Poll::Pending => Poll::Pending, + } + } +} + +pub struct Deferred(Option); + +impl Deferred { + /// Drop without running the deferred function. + pub fn abort(mut self) { + self.0.take(); + } +} + +impl Drop for Deferred { + fn drop(&mut self) { + if let Some(f) = self.0.take() { + f() + } + } +} + +/// Run the given function when the returned value is dropped (unless it's cancelled). +#[must_use] +pub fn defer(f: F) -> Deferred { + Deferred(Some(f)) +} diff --git a/crates/gpui_web/Cargo.toml b/crates/gpui_web/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..dbb110597c7b850c28cde99ed573eab8264a18f7 --- /dev/null +++ b/crates/gpui_web/Cargo.toml @@ -0,0 +1,71 @@ +[package] +name = "gpui_web" +version = "0.1.0" +publish.workspace = true +edition.workspace = true +license = "Apache-2.0" +autoexamples = false + +[lints] +workspace = true + +[features] +default = ["multithreaded"] +multithreaded = ["dep:wasm_thread"] + +[lib] +path = "src/gpui_web.rs" + +[target.'cfg(target_family = "wasm")'.dependencies] +gpui.workspace = true +parking_lot = { workspace = true, features = ["nightly"] } +gpui_wgpu.workspace = true +http_client.workspace = true +anyhow.workspace = true +futures.workspace = true +log.workspace = true +smallvec.workspace = true +uuid.workspace = true +wasm-bindgen.workspace = true +wasm-bindgen-futures = "0.4" +web-time.workspace = true +console_error_panic_hook = "0.1.7" +js-sys = "0.3" +raw-window-handle = "0.6" +wasm_thread = { version = "0.3", features = ["es_modules"], optional = true } +web-sys = { version = "0.3", features = [ + "console", + "CssStyleDeclaration", + "DataTransfer", + "Document", + "DomRect", + "DragEvent", + "Element", + "EventTarget", + "File", + "FileList", + "HtmlCanvasElement", + "HtmlElement", + "HtmlInputElement", + "KeyboardEvent", + "MediaQueryList", + "MediaQueryListEvent", + "MouseEvent", + "Navigator", + "PointerEvent", + "ResizeObserver", + "ResizeObserverBoxOptions", + "ResizeObserverEntry", + "ResizeObserverSize", + "ResizeObserverOptions", + "Screen", + "Storage", + "VisualViewport", + "Headers", + "Request", + "RequestInit", + "RequestRedirect", + "Response", + "WheelEvent", + "Window", +] } diff --git a/crates/gpui_web/LICENSE-APACHE b/crates/gpui_web/LICENSE-APACHE new file mode 120000 index 0000000000000000000000000000000000000000..1cd601d0a3affae83854be02a0afdec3b7a9ec4d --- /dev/null +++ b/crates/gpui_web/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/gpui_web/examples/hello_web/.cargo/config.toml b/crates/gpui_web/examples/hello_web/.cargo/config.toml new file mode 100644 index 0000000000000000000000000000000000000000..de4cb1a897249f8f2f8e27025ccb6d1a54f01c34 --- /dev/null +++ b/crates/gpui_web/examples/hello_web/.cargo/config.toml @@ -0,0 +1,14 @@ +[target.wasm32-unknown-unknown] +rustflags = [ + "-C", "target-feature=+atomics,+bulk-memory,+mutable-globals", + "-C", "link-arg=--shared-memory", + "-C", "link-arg=--max-memory=1073741824", + "-C", "link-arg=--import-memory", + "-C", "link-arg=--export=__wasm_init_tls", + "-C", "link-arg=--export=__tls_size", + "-C", "link-arg=--export=__tls_align", + "-C", "link-arg=--export=__tls_base", +] + +[unstable] +build-std = ["std,panic_abort"] diff --git a/crates/gpui_web/examples/hello_web/.gitignore b/crates/gpui_web/examples/hello_web/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..2b1bc8d911302b667ef2cc4e795b80eb2dd5ba28 --- /dev/null +++ b/crates/gpui_web/examples/hello_web/.gitignore @@ -0,0 +1,3 @@ +/dist +/target +Cargo.lock diff --git a/crates/gpui_web/examples/hello_web/Cargo.toml b/crates/gpui_web/examples/hello_web/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..7f8c5cf137778dc9f2d7cbab38c9dae91f2d9d15 --- /dev/null +++ b/crates/gpui_web/examples/hello_web/Cargo.toml @@ -0,0 +1,16 @@ +[workspace] + +[package] +name = "hello_web" +version = "0.1.0" +edition = "2024" +publish = false + +[[bin]] +name = "hello_web" +path = "main.rs" + +[dependencies] +gpui = { path = "../../../gpui" } +gpui_platform = { path = "../../../gpui_platform" } +web-time = "1" diff --git a/crates/gpui_web/examples/hello_web/LICENSE-APACHE b/crates/gpui_web/examples/hello_web/LICENSE-APACHE new file mode 120000 index 0000000000000000000000000000000000000000..15824831a24d7753b6e945b8190dd7d15413aed1 --- /dev/null +++ b/crates/gpui_web/examples/hello_web/LICENSE-APACHE @@ -0,0 +1 @@ +../../../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/gpui_web/examples/hello_web/index.html b/crates/gpui_web/examples/hello_web/index.html new file mode 100644 index 0000000000000000000000000000000000000000..692f3edf06ef548e29cc6aaecc16d917f32ebdb6 --- /dev/null +++ b/crates/gpui_web/examples/hello_web/index.html @@ -0,0 +1,31 @@ + + + + + + GPUI Web: hello_web + + + + + diff --git a/crates/gpui_web/examples/hello_web/main.rs b/crates/gpui_web/examples/hello_web/main.rs new file mode 100644 index 0000000000000000000000000000000000000000..a6ff103e6475ca18ab1991403ba05089475e2e33 --- /dev/null +++ b/crates/gpui_web/examples/hello_web/main.rs @@ -0,0 +1,422 @@ +use gpui::prelude::*; +use gpui::{ + App, Bounds, Context, ElementId, SharedString, Task, Window, WindowBounds, WindowOptions, div, + px, rgb, size, +}; + +// --------------------------------------------------------------------------- +// Prime counting (intentionally brute-force so it hammers the CPU) +// --------------------------------------------------------------------------- + +fn is_prime(n: u64) -> bool { + if n < 2 { + return false; + } + if n < 4 { + return true; + } + if n % 2 == 0 || n % 3 == 0 { + return false; + } + let mut i = 5; + while i * i <= n { + if n % i == 0 || n % (i + 2) == 0 { + return false; + } + i += 6; + } + true +} + +fn count_primes_in_range(start: u64, end: u64) -> u64 { + let mut count = 0; + for n in start..end { + if is_prime(n) { + count += 1; + } + } + count +} + +// --------------------------------------------------------------------------- +// App state +// --------------------------------------------------------------------------- + +const NUM_CHUNKS: u64 = 12; + +#[derive(Clone, Copy, PartialEq, Eq)] +enum Preset { + TenMillion, + FiftyMillion, + HundredMillion, +} + +impl Preset { + fn label(self) -> &'static str { + match self { + Preset::TenMillion => "10 M", + Preset::FiftyMillion => "50 M", + Preset::HundredMillion => "100 M", + } + } + + fn value(self) -> u64 { + match self { + Preset::TenMillion => 10_000_000, + Preset::FiftyMillion => 50_000_000, + Preset::HundredMillion => 100_000_000, + } + } + + const ALL: [Preset; 3] = [ + Preset::TenMillion, + Preset::FiftyMillion, + Preset::HundredMillion, + ]; +} + +struct ChunkResult { + count: u64, +} + +struct Run { + limit: u64, + chunks_done: u64, + chunk_results: Vec, + total: Option, + elapsed: Option, +} + +struct HelloWeb { + selected_preset: Preset, + current_run: Option, + history: Vec, + _tasks: Vec>, +} + +impl HelloWeb { + fn new(_cx: &mut Context) -> Self { + Self { + selected_preset: Preset::TenMillion, + current_run: None, + history: Vec::new(), + _tasks: Vec::new(), + } + } + + fn start_search(&mut self, cx: &mut Context) { + let limit = self.selected_preset.value(); + let chunk_size = limit / NUM_CHUNKS; + + self.current_run = Some(Run { + limit, + chunks_done: 0, + chunk_results: Vec::new(), + total: None, + elapsed: None, + }); + self._tasks.clear(); + cx.notify(); + + let start_time = web_time::Instant::now(); + + for i in 0..NUM_CHUNKS { + let range_start = i * chunk_size; + let range_end = if i == NUM_CHUNKS - 1 { + limit + } else { + range_start + chunk_size + }; + + let task = cx.spawn(async move |this, cx| { + let count = cx + .background_spawn(async move { count_primes_in_range(range_start, range_end) }) + .await; + + this.update(cx, |this, cx| { + if let Some(run) = &mut this.current_run { + run.chunk_results.push(ChunkResult { count }); + run.chunks_done += 1; + + if run.chunks_done == NUM_CHUNKS { + let total: u64 = run.chunk_results.iter().map(|r| r.count).sum(); + let elapsed_ms = start_time.elapsed().as_secs_f64() * 1000.0; + run.total = Some(total); + run.elapsed = Some(elapsed_ms); + this.history.push( + format!( + "π({}) = {} ({:.0} ms, {} chunks)", + format_number(run.limit), + format_number(total), + elapsed_ms, + NUM_CHUNKS, + ) + .into(), + ); + } + cx.notify(); + } + }) + .ok(); + }); + + self._tasks.push(task); + } + } +} + +fn format_number(n: u64) -> String { + let s = n.to_string(); + let mut result = String::new(); + for (i, ch) in s.chars().rev().enumerate() { + if i > 0 && i % 3 == 0 { + result.push(','); + } + result.push(ch); + } + result.chars().rev().collect() +} + +// --------------------------------------------------------------------------- +// Render +// --------------------------------------------------------------------------- + +const BG_BASE: u32 = 0x1e1e2e; +const BG_SURFACE: u32 = 0x313244; +const BG_OVERLAY: u32 = 0x45475a; +const TEXT_PRIMARY: u32 = 0xcdd6f4; +const TEXT_SECONDARY: u32 = 0xa6adc8; +const TEXT_DIM: u32 = 0x6c7086; +const ACCENT_YELLOW: u32 = 0xf9e2af; +const ACCENT_GREEN: u32 = 0xa6e3a1; +const ACCENT_BLUE: u32 = 0x89b4fa; +const ACCENT_MAUVE: u32 = 0xcba6f7; + +impl Render for HelloWeb { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let is_running = self.current_run.as_ref().is_some_and(|r| r.total.is_none()); + + // -- Preset buttons -- + let preset_row = Preset::ALL.iter().enumerate().fold( + div().flex().flex_row().gap_2(), + |row, (index, &preset)| { + let is_selected = preset == self.selected_preset; + let (bg, text_color) = if is_selected { + (ACCENT_BLUE, BG_BASE) + } else { + (BG_OVERLAY, TEXT_SECONDARY) + }; + row.child( + div() + .id(ElementId::NamedInteger("preset".into(), index as u64)) + .px_3() + .py_1() + .rounded_md() + .bg(rgb(bg)) + .text_color(rgb(text_color)) + .text_sm() + .cursor_pointer() + .when(!is_running, |this| { + this.on_click(cx.listener(move |this, _event, _window, _cx| { + this.selected_preset = preset; + })) + }) + .child(preset.label()), + ) + }, + ); + + // -- Go button -- + let (go_bg, go_text, go_label) = if is_running { + (BG_OVERLAY, TEXT_DIM, "Running…") + } else { + (ACCENT_GREEN, BG_BASE, "Count Primes") + }; + let go_button = div() + .id("go") + .px_4() + .py(px(6.)) + .rounded_md() + .bg(rgb(go_bg)) + .text_color(rgb(go_text)) + .cursor_pointer() + .when(!is_running, |this| { + this.on_click(cx.listener(|this, _event, _window, cx| { + this.start_search(cx); + })) + }) + .child(go_label); + + // -- Progress / result area -- + let status_area = if let Some(run) = &self.current_run { + let progress_fraction = run.chunks_done as f32 / NUM_CHUNKS as f32; + let progress_pct = (progress_fraction * 100.0) as u32; + + let status_text: SharedString = if let Some(total) = run.total { + format!( + "Found {} primes below {} in {:.0} ms", + format_number(total), + format_number(run.limit), + run.elapsed.unwrap_or(0.0), + ) + .into() + } else { + format!( + "Searching up to {} … {}/{} chunks ({}%)", + format_number(run.limit), + run.chunks_done, + NUM_CHUNKS, + progress_pct, + ) + .into() + }; + + let bar_color = if run.total.is_some() { + ACCENT_GREEN + } else { + ACCENT_BLUE + }; + + let chunk_dots = + (0..NUM_CHUNKS as usize).fold(div().flex().flex_row().gap_1().mt_2(), |row, i| { + let done = i < run.chunks_done as usize; + let color = if done { ACCENT_MAUVE } else { BG_OVERLAY }; + row.child(div().size(px(10.)).rounded_sm().bg(rgb(color))) + }); + + div() + .flex() + .flex_col() + .w_full() + .gap_2() + .child(div().text_color(rgb(TEXT_PRIMARY)).child(status_text)) + .child( + div() + .w_full() + .h(px(8.)) + .rounded_sm() + .bg(rgb(BG_OVERLAY)) + .child( + div() + .h_full() + .rounded_sm() + .bg(rgb(bar_color)) + .w(gpui::relative(progress_fraction)), + ), + ) + .child(chunk_dots) + } else { + div().flex().flex_col().w_full().child( + div() + .text_color(rgb(TEXT_DIM)) + .child("Select a range and press Count Primes to begin."), + ) + }; + + // -- History log -- + let history_section = if self.history.is_empty() { + div() + } else { + self.history + .iter() + .rev() + .fold(div().flex().flex_col().gap_1(), |col, entry| { + col.child( + div() + .text_sm() + .text_color(rgb(TEXT_SECONDARY)) + .child(entry.clone()), + ) + }) + }; + + // -- Layout -- + div() + .flex() + .flex_col() + .size_full() + .bg(rgb(BG_BASE)) + .justify_center() + .items_center() + .gap_4() + .p_4() + // Title + .child( + div() + .text_xl() + .text_color(rgb(TEXT_PRIMARY)) + .child("Prime Sieve — GPUI Web"), + ) + .child(div().text_sm().text_color(rgb(TEXT_DIM)).child(format!( + "Background threads: {} · Chunks per run: {}", + std::thread::available_parallelism().map_or(2, |n| n.get().max(2)), + NUM_CHUNKS, + ))) + // Controls + .child( + div() + .flex() + .flex_col() + .items_center() + .gap_3() + .p_4() + .w(px(500.)) + .rounded_lg() + .bg(rgb(BG_SURFACE)) + .child( + div() + .text_sm() + .text_color(rgb(ACCENT_YELLOW)) + .child("Count primes below:"), + ) + .child(preset_row) + .child(go_button), + ) + // Status + .child( + div() + .flex() + .flex_col() + .w(px(500.)) + .p_4() + .rounded_lg() + .bg(rgb(BG_SURFACE)) + .child(status_area), + ) + // History + .when(!self.history.is_empty(), |this| { + this.child( + div() + .flex() + .flex_col() + .w(px(500.)) + .p_4() + .rounded_lg() + .bg(rgb(BG_SURFACE)) + .gap_2() + .child(div().text_sm().text_color(rgb(TEXT_DIM)).child("History")) + .child(history_section), + ) + }) + } +} + +// --------------------------------------------------------------------------- +// Entry point +// --------------------------------------------------------------------------- + +fn main() { + gpui_platform::web_init(); + gpui_platform::application().run(|cx: &mut App| { + let bounds = Bounds::centered(None, size(px(640.), px(560.)), cx); + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + ..Default::default() + }, + |_, cx| cx.new(HelloWeb::new), + ) + .expect("failed to open window"); + cx.activate(true); + }); +} diff --git a/crates/gpui_web/examples/hello_web/rust-toolchain.toml b/crates/gpui_web/examples/hello_web/rust-toolchain.toml new file mode 100644 index 0000000000000000000000000000000000000000..8ea6df3d47dc12d2daa3d1b25431f2d636698dc4 --- /dev/null +++ b/crates/gpui_web/examples/hello_web/rust-toolchain.toml @@ -0,0 +1,4 @@ +[toolchain] +channel = "nightly" +targets = ["wasm32-unknown-unknown"] +components = ["rust-src", "rustfmt", "clippy"] diff --git a/crates/gpui_web/examples/hello_web/trunk.toml b/crates/gpui_web/examples/hello_web/trunk.toml new file mode 100644 index 0000000000000000000000000000000000000000..5ef787c270984bd9dfc4aa28daa25a4b2945126b --- /dev/null +++ b/crates/gpui_web/examples/hello_web/trunk.toml @@ -0,0 +1,7 @@ +[serve] +addresses = ["127.0.0.1"] +port = 8080 +open = true + +# Headers required for WebGPU / SharedArrayBuffer support. +headers = { "Cross-Origin-Embedder-Policy" = "require-corp", "Cross-Origin-Opener-Policy" = "same-origin" } diff --git a/crates/gpui_web/src/dispatcher.rs b/crates/gpui_web/src/dispatcher.rs new file mode 100644 index 0000000000000000000000000000000000000000..d9419fb35353cfadd809b0bbc1cb9e7dbf124cda --- /dev/null +++ b/crates/gpui_web/src/dispatcher.rs @@ -0,0 +1,345 @@ +use gpui::{ + PlatformDispatcher, Priority, PriorityQueueReceiver, PriorityQueueSender, RunnableVariant, + ThreadTaskTimings, +}; +use std::sync::Arc; +use std::sync::atomic::AtomicI32; +use std::time::Duration; +use wasm_bindgen::prelude::*; +use web_time::Instant; + +#[cfg(feature = "multithreaded")] +const MIN_BACKGROUND_THREADS: usize = 2; + +#[cfg(feature = "multithreaded")] +fn shared_memory_supported() -> bool { + let global = js_sys::global(); + let has_shared_array_buffer = + js_sys::Reflect::has(&global, &JsValue::from_str("SharedArrayBuffer")).unwrap_or(false); + let has_atomics = js_sys::Reflect::has(&global, &JsValue::from_str("Atomics")).unwrap_or(false); + let memory = js_sys::WebAssembly::Memory::from(wasm_bindgen::memory()); + let buffer = memory.buffer(); + let is_shared_buffer = buffer.is_instance_of::(); + has_shared_array_buffer && has_atomics && is_shared_buffer +} + +enum MainThreadItem { + Runnable(RunnableVariant), + Delayed { + runnable: RunnableVariant, + millis: i32, + }, + // TODO-Wasm: Shouldn't these run on their own dedicated thread? + RealtimeFunction(Box), +} + +struct MainThreadMailbox { + sender: PriorityQueueSender, + receiver: parking_lot::Mutex>, + signal: AtomicI32, +} + +impl MainThreadMailbox { + fn new() -> Self { + let (sender, receiver) = PriorityQueueReceiver::new(); + Self { + sender, + receiver: parking_lot::Mutex::new(receiver), + signal: AtomicI32::new(0), + } + } + + fn post(&self, priority: Priority, item: MainThreadItem) { + if self.sender.spin_send(priority, item).is_err() { + log::error!("MainThreadMailbox::send failed: receiver disconnected"); + } + + // TODO-Wasm: Verify this lock-free protocol + let view = self.signal_view(); + js_sys::Atomics::store(&view, 0, 1).ok(); + js_sys::Atomics::notify(&view, 0).ok(); + } + + fn drain(&self, window: &web_sys::Window) { + let mut receiver = self.receiver.lock(); + loop { + // We need these `spin` variants because we can't acquire a lock on the main thread. + // TODO-WASM: Should we do something different? + match receiver.spin_try_pop() { + Ok(Some(item)) => execute_on_main_thread(window, item), + Ok(None) => break, + Err(_) => break, + } + } + } + + fn signal_view(&self) -> js_sys::Int32Array { + let byte_offset = self.signal.as_ptr() as u32; + let memory = js_sys::WebAssembly::Memory::from(wasm_bindgen::memory()); + js_sys::Int32Array::new_with_byte_offset_and_length(&memory.buffer(), byte_offset, 1) + } + + fn run_waker_loop(self: &Arc, window: web_sys::Window) { + if !shared_memory_supported() { + log::warn!("SharedArrayBuffer not available; main thread mailbox waker loop disabled"); + return; + } + + let mailbox = Arc::clone(self); + wasm_bindgen_futures::spawn_local(async move { + let view = mailbox.signal_view(); + loop { + js_sys::Atomics::store(&view, 0, 0).expect("Atomics.store failed"); + + let result = match js_sys::Atomics::wait_async(&view, 0, 0) { + Ok(result) => result, + Err(error) => { + log::error!("Atomics.waitAsync failed: {error:?}"); + break; + } + }; + + let is_async = js_sys::Reflect::get(&result, &JsValue::from_str("async")) + .ok() + .and_then(|v| v.as_bool()) + .unwrap_or(false); + + if !is_async { + log::error!("Atomics.waitAsync returned synchronously; waker loop exiting"); + break; + } + + let promise: js_sys::Promise = + js_sys::Reflect::get(&result, &JsValue::from_str("value")) + .expect("waitAsync result missing 'value'") + .unchecked_into(); + + let _ = wasm_bindgen_futures::JsFuture::from(promise).await; + + mailbox.drain(&window); + } + }); + } +} + +pub struct WebDispatcher { + main_thread_id: std::thread::ThreadId, + browser_window: web_sys::Window, + background_sender: PriorityQueueSender, + main_thread_mailbox: Arc, + supports_threads: bool, + #[cfg(feature = "multithreaded")] + _background_threads: Vec>, +} + +// Safety: `web_sys::Window` is only accessed from the main thread +// All other fields are `Send + Sync` by construction. +unsafe impl Send for WebDispatcher {} +unsafe impl Sync for WebDispatcher {} + +impl WebDispatcher { + pub fn new(browser_window: web_sys::Window, allow_threads: bool) -> Self { + #[cfg(feature = "multithreaded")] + let (background_sender, background_receiver) = PriorityQueueReceiver::new(); + #[cfg(not(feature = "multithreaded"))] + let (background_sender, _) = PriorityQueueReceiver::new(); + + let main_thread_mailbox = Arc::new(MainThreadMailbox::new()); + + #[cfg(feature = "multithreaded")] + let supports_threads = allow_threads && shared_memory_supported(); + #[cfg(not(feature = "multithreaded"))] + let supports_threads = false; + + if supports_threads { + main_thread_mailbox.run_waker_loop(browser_window.clone()); + } else { + log::warn!( + "SharedArrayBuffer not available; falling back to single-threaded dispatcher" + ); + } + + #[cfg(feature = "multithreaded")] + let background_threads = if supports_threads { + let thread_count = browser_window + .navigator() + .hardware_concurrency() + .max(MIN_BACKGROUND_THREADS as f64) as usize; + + // TODO-Wasm: Is it bad to have web workers blocking for a long time like this? + (0..thread_count) + .map(|i| { + let mut receiver = background_receiver.clone(); + wasm_thread::Builder::new() + .name(format!("background-worker-{i}")) + .spawn(move || { + loop { + let runnable: RunnableVariant = match receiver.pop() { + Ok(runnable) => runnable, + Err(_) => { + log::info!( + "background-worker-{i}: channel disconnected, exiting" + ); + break; + } + }; + + if runnable.metadata().is_closed() { + continue; + } + + runnable.run(); + } + }) + .expect("failed to spawn background worker thread") + }) + .collect::>() + } else { + Vec::new() + }; + + Self { + main_thread_id: std::thread::current().id(), + browser_window, + background_sender, + main_thread_mailbox, + supports_threads, + #[cfg(feature = "multithreaded")] + _background_threads: background_threads, + } + } + + fn on_main_thread(&self) -> bool { + std::thread::current().id() == self.main_thread_id + } +} + +impl PlatformDispatcher for WebDispatcher { + fn get_all_timings(&self) -> Vec { + // TODO-Wasm: should we panic here? + Vec::new() + } + + fn get_current_thread_timings(&self) -> ThreadTaskTimings { + ThreadTaskTimings { + thread_name: None, + thread_id: std::thread::current().id(), + timings: Vec::new(), + total_pushed: 0, + } + } + + fn is_main_thread(&self) -> bool { + self.on_main_thread() + } + + fn dispatch(&self, runnable: RunnableVariant, priority: Priority) { + if !self.supports_threads { + self.dispatch_on_main_thread(runnable, priority); + return; + } + + let result = if self.on_main_thread() { + self.background_sender.spin_send(priority, runnable) + } else { + self.background_sender.send(priority, runnable) + }; + + if let Err(error) = result { + log::error!("dispatch: failed to send to background queue: {error:?}"); + } + } + + fn dispatch_on_main_thread(&self, runnable: RunnableVariant, priority: Priority) { + if self.on_main_thread() { + schedule_runnable(&self.browser_window, runnable, priority); + } else { + self.main_thread_mailbox + .post(priority, MainThreadItem::Runnable(runnable)); + } + } + + fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) { + let millis = duration.as_millis().min(i32::MAX as u128) as i32; + if self.on_main_thread() { + let callback = Closure::once_into_js(move || { + if !runnable.metadata().is_closed() { + runnable.run(); + } + }); + self.browser_window + .set_timeout_with_callback_and_timeout_and_arguments_0( + callback.unchecked_ref(), + millis, + ) + .ok(); + } else { + self.main_thread_mailbox + .post(Priority::High, MainThreadItem::Delayed { runnable, millis }); + } + } + + fn spawn_realtime(&self, function: Box) { + if self.on_main_thread() { + let callback = Closure::once_into_js(move || { + function(); + }); + self.browser_window + .queue_microtask(callback.unchecked_ref()); + } else { + self.main_thread_mailbox + .post(Priority::High, MainThreadItem::RealtimeFunction(function)); + } + } + + fn now(&self) -> Instant { + Instant::now() + } +} + +fn execute_on_main_thread(window: &web_sys::Window, item: MainThreadItem) { + match item { + MainThreadItem::Runnable(runnable) => { + if !runnable.metadata().is_closed() { + runnable.run(); + } + } + MainThreadItem::Delayed { runnable, millis } => { + let callback = Closure::once_into_js(move || { + if !runnable.metadata().is_closed() { + runnable.run(); + } + }); + window + .set_timeout_with_callback_and_timeout_and_arguments_0( + callback.unchecked_ref(), + millis, + ) + .ok(); + } + MainThreadItem::RealtimeFunction(function) => { + function(); + } + } +} + +fn schedule_runnable(window: &web_sys::Window, runnable: RunnableVariant, priority: Priority) { + let callback = Closure::once_into_js(move || { + if !runnable.metadata().is_closed() { + runnable.run(); + } + }); + let callback: &js_sys::Function = callback.unchecked_ref(); + + match priority { + Priority::RealtimeAudio => { + window.queue_microtask(callback); + } + _ => { + // TODO-Wasm: this ought to enqueue so we can dequeue with proper priority + window + .set_timeout_with_callback_and_timeout_and_arguments_0(callback, 0) + .ok(); + } + } +} diff --git a/crates/gpui_web/src/display.rs b/crates/gpui_web/src/display.rs new file mode 100644 index 0000000000000000000000000000000000000000..77dd35d92367ccc3439536db8f9bbb5ed079e7a1 --- /dev/null +++ b/crates/gpui_web/src/display.rs @@ -0,0 +1,98 @@ +use anyhow::Result; +use gpui::{Bounds, DisplayId, Pixels, PlatformDisplay, Point, Size, px}; + +#[derive(Debug)] +pub struct WebDisplay { + id: DisplayId, + uuid: uuid::Uuid, + browser_window: web_sys::Window, +} + +// Safety: WASM is single-threaded — there is no concurrent access to `web_sys::Window`. +unsafe impl Send for WebDisplay {} +unsafe impl Sync for WebDisplay {} + +impl WebDisplay { + pub fn new(browser_window: web_sys::Window) -> Self { + WebDisplay { + id: DisplayId::new(1), + uuid: uuid::Uuid::new_v4(), + browser_window, + } + } + + fn screen_size(&self) -> Size { + let Some(screen) = self.browser_window.screen().ok() else { + return Size { + width: px(1920.), + height: px(1080.), + }; + }; + + let width = screen.width().unwrap_or(1920) as f32; + let height = screen.height().unwrap_or(1080) as f32; + + Size { + width: px(width), + height: px(height), + } + } + + fn viewport_size(&self) -> Size { + let width = self + .browser_window + .inner_width() + .ok() + .and_then(|v| v.as_f64()) + .unwrap_or(1920.0) as f32; + let height = self + .browser_window + .inner_height() + .ok() + .and_then(|v| v.as_f64()) + .unwrap_or(1080.0) as f32; + + Size { + width: px(width), + height: px(height), + } + } +} + +impl PlatformDisplay for WebDisplay { + fn id(&self) -> DisplayId { + self.id + } + + fn uuid(&self) -> Result { + Ok(self.uuid) + } + + fn bounds(&self) -> Bounds { + let size = self.screen_size(); + Bounds { + origin: Point::default(), + size, + } + } + + fn visible_bounds(&self) -> Bounds { + let size = self.viewport_size(); + Bounds { + origin: Point::default(), + size, + } + } + + fn default_bounds(&self) -> Bounds { + let visible = self.visible_bounds(); + let width = visible.size.width * 0.75; + let height = visible.size.height * 0.75; + let origin_x = (visible.size.width - width) / 2.0; + let origin_y = (visible.size.height - height) / 2.0; + Bounds { + origin: Point::new(origin_x, origin_y), + size: Size { width, height }, + } + } +} diff --git a/crates/gpui_web/src/events.rs b/crates/gpui_web/src/events.rs new file mode 100644 index 0000000000000000000000000000000000000000..5f6d8527e70a3778a46a11e00758e822790e742f --- /dev/null +++ b/crates/gpui_web/src/events.rs @@ -0,0 +1,615 @@ +use std::rc::Rc; + +use gpui::{ + Capslock, ExternalPaths, FileDropEvent, KeyDownEvent, KeyUpEvent, Keystroke, Modifiers, + ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseExitEvent, MouseMoveEvent, + MouseUpEvent, NavigationDirection, Pixels, PlatformInput, Point, ScrollDelta, ScrollWheelEvent, + TouchPhase, point, px, +}; +use smallvec::smallvec; +use wasm_bindgen::prelude::*; + +use crate::window::WebWindowInner; + +pub struct WebEventListeners { + #[allow(dead_code)] + closures: Vec>, +} + +pub(crate) struct ClickState { + last_position: Point, + last_time: f64, + current_count: usize, +} + +impl Default for ClickState { + fn default() -> Self { + Self { + last_position: Point::default(), + last_time: 0.0, + current_count: 0, + } + } +} + +impl ClickState { + fn register_click(&mut self, position: Point, time: f64) -> usize { + let distance = ((f32::from(position.x) - f32::from(self.last_position.x)).powi(2) + + (f32::from(position.y) - f32::from(self.last_position.y)).powi(2)) + .sqrt(); + + if (time - self.last_time) < 400.0 && distance < 5.0 { + self.current_count += 1; + } else { + self.current_count = 1; + } + + self.last_position = position; + self.last_time = time; + self.current_count + } +} + +impl WebWindowInner { + pub fn register_event_listeners(self: &Rc) -> WebEventListeners { + let mut closures = vec![ + self.register_pointer_down(), + self.register_pointer_up(), + self.register_pointer_move(), + self.register_pointer_leave(), + self.register_wheel(), + self.register_context_menu(), + self.register_dragover(), + self.register_drop(), + self.register_dragleave(), + self.register_key_down(), + self.register_key_up(), + self.register_focus(), + self.register_blur(), + self.register_pointer_enter(), + self.register_pointer_leave_hover(), + ]; + closures.extend(self.register_visibility_change()); + closures.extend(self.register_appearance_change()); + + WebEventListeners { closures } + } + + fn listen( + self: &Rc, + event_name: &str, + handler: impl FnMut(JsValue) + 'static, + ) -> Closure { + let closure = Closure::::new(handler); + self.canvas + .add_event_listener_with_callback(event_name, closure.as_ref().unchecked_ref()) + .ok(); + closure + } + + /// Registers a listener with `{passive: false}` so that `preventDefault()` works. + /// Needed for events like `wheel` which are passive by default in modern browsers. + fn listen_non_passive( + self: &Rc, + event_name: &str, + handler: impl FnMut(JsValue) + 'static, + ) -> Closure { + let closure = Closure::::new(handler); + let canvas_js: &JsValue = self.canvas.as_ref(); + let callback_js: &JsValue = closure.as_ref(); + let options = js_sys::Object::new(); + js_sys::Reflect::set(&options, &"passive".into(), &false.into()).ok(); + if let Ok(add_fn_val) = js_sys::Reflect::get(canvas_js, &"addEventListener".into()) { + if let Ok(add_fn) = add_fn_val.dyn_into::() { + add_fn + .call3(canvas_js, &event_name.into(), callback_js, &options) + .ok(); + } + } + closure + } + + fn dispatch_input(&self, input: PlatformInput) { + let mut borrowed = self.callbacks.borrow_mut(); + if let Some(ref mut callback) = borrowed.input { + callback(input); + } + } + + fn register_pointer_down(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen("pointerdown", move |event: JsValue| { + let event: web_sys::PointerEvent = event.unchecked_into(); + event.prevent_default(); + this.canvas.focus().ok(); + + let button = dom_mouse_button_to_gpui(event.button()); + let position = pointer_position_in_element(&event); + let modifiers = modifiers_from_mouse_event(&event, this.is_mac); + let time = js_sys::Date::now(); + + this.pressed_button.set(Some(button)); + let click_count = this.click_state.borrow_mut().register_click(position, time); + + { + let mut current_state = this.state.borrow_mut(); + current_state.mouse_position = position; + current_state.modifiers = modifiers; + } + + this.dispatch_input(PlatformInput::MouseDown(MouseDownEvent { + button, + position, + modifiers, + click_count, + first_mouse: false, + })); + }) + } + + fn register_pointer_up(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen("pointerup", move |event: JsValue| { + let event: web_sys::PointerEvent = event.unchecked_into(); + event.prevent_default(); + + let button = dom_mouse_button_to_gpui(event.button()); + let position = pointer_position_in_element(&event); + let modifiers = modifiers_from_mouse_event(&event, this.is_mac); + + this.pressed_button.set(None); + let click_count = this.click_state.borrow().current_count; + + { + let mut current_state = this.state.borrow_mut(); + current_state.mouse_position = position; + current_state.modifiers = modifiers; + } + + this.dispatch_input(PlatformInput::MouseUp(MouseUpEvent { + button, + position, + modifiers, + click_count, + })); + }) + } + + fn register_pointer_move(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen("pointermove", move |event: JsValue| { + let event: web_sys::PointerEvent = event.unchecked_into(); + event.prevent_default(); + + let position = pointer_position_in_element(&event); + let modifiers = modifiers_from_mouse_event(&event, this.is_mac); + let current_pressed = this.pressed_button.get(); + + { + let mut current_state = this.state.borrow_mut(); + current_state.mouse_position = position; + current_state.modifiers = modifiers; + } + + this.dispatch_input(PlatformInput::MouseMove(MouseMoveEvent { + position, + pressed_button: current_pressed, + modifiers, + })); + }) + } + + fn register_pointer_leave(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen("pointerleave", move |event: JsValue| { + let event: web_sys::PointerEvent = event.unchecked_into(); + + let position = pointer_position_in_element(&event); + let modifiers = modifiers_from_mouse_event(&event, this.is_mac); + let current_pressed = this.pressed_button.get(); + + { + let mut current_state = this.state.borrow_mut(); + current_state.mouse_position = position; + current_state.modifiers = modifiers; + } + + this.dispatch_input(PlatformInput::MouseExited(MouseExitEvent { + position, + pressed_button: current_pressed, + modifiers, + })); + }) + } + + fn register_wheel(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen_non_passive("wheel", move |event: JsValue| { + let event: web_sys::WheelEvent = event.unchecked_into(); + event.prevent_default(); + + let mouse_event: &web_sys::MouseEvent = event.as_ref(); + let position = mouse_position_in_element(mouse_event); + let modifiers = modifiers_from_wheel_event(mouse_event, this.is_mac); + + let delta_mode = event.delta_mode(); + let delta = if delta_mode == 1 { + ScrollDelta::Lines(point(-event.delta_x() as f32, -event.delta_y() as f32)) + } else { + ScrollDelta::Pixels(point( + px(-event.delta_x() as f32), + px(-event.delta_y() as f32), + )) + }; + + { + let mut current_state = this.state.borrow_mut(); + current_state.modifiers = modifiers; + } + + this.dispatch_input(PlatformInput::ScrollWheel(ScrollWheelEvent { + position, + delta, + modifiers, + touch_phase: TouchPhase::Moved, + })); + }) + } + + fn register_context_menu(self: &Rc) -> Closure { + self.listen("contextmenu", move |event: JsValue| { + let event: web_sys::Event = event.unchecked_into(); + event.prevent_default(); + }) + } + + fn register_dragover(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen("dragover", move |event: JsValue| { + let event: web_sys::DragEvent = event.unchecked_into(); + event.prevent_default(); + + let mouse_event: &web_sys::MouseEvent = event.as_ref(); + let position = mouse_position_in_element(mouse_event); + + { + let mut current_state = this.state.borrow_mut(); + current_state.mouse_position = position; + } + + this.dispatch_input(PlatformInput::FileDrop(FileDropEvent::Pending { position })); + }) + } + + fn register_drop(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen("drop", move |event: JsValue| { + let event: web_sys::DragEvent = event.unchecked_into(); + event.prevent_default(); + + let mouse_event: &web_sys::MouseEvent = event.as_ref(); + let position = mouse_position_in_element(mouse_event); + + { + let mut current_state = this.state.borrow_mut(); + current_state.mouse_position = position; + } + + let paths = extract_file_paths_from_drag(&event); + + this.dispatch_input(PlatformInput::FileDrop(FileDropEvent::Entered { + position, + paths: ExternalPaths(paths), + })); + + this.dispatch_input(PlatformInput::FileDrop(FileDropEvent::Submit { position })); + }) + } + + fn register_dragleave(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen("dragleave", move |_event: JsValue| { + this.dispatch_input(PlatformInput::FileDrop(FileDropEvent::Exited)); + }) + } + + fn register_key_down(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen("keydown", move |event: JsValue| { + let event: web_sys::KeyboardEvent = event.unchecked_into(); + + let modifiers = modifiers_from_keyboard_event(&event, this.is_mac); + let capslock = capslock_from_keyboard_event(&event); + + { + let mut current_state = this.state.borrow_mut(); + current_state.modifiers = modifiers; + current_state.capslock = capslock; + } + + this.dispatch_input(PlatformInput::ModifiersChanged(ModifiersChangedEvent { + modifiers, + capslock, + })); + + let key = dom_key_to_gpui_key(&event); + + if is_modifier_only_key(&key) { + return; + } + + event.prevent_default(); + + let is_held = event.repeat(); + let key_char = compute_key_char(&event, &key, &modifiers); + + let keystroke = Keystroke { + modifiers, + key, + key_char, + }; + + this.dispatch_input(PlatformInput::KeyDown(KeyDownEvent { + keystroke, + is_held, + prefer_character_input: false, + })); + }) + } + + fn register_key_up(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen("keyup", move |event: JsValue| { + let event: web_sys::KeyboardEvent = event.unchecked_into(); + + let modifiers = modifiers_from_keyboard_event(&event, this.is_mac); + let capslock = capslock_from_keyboard_event(&event); + + { + let mut current_state = this.state.borrow_mut(); + current_state.modifiers = modifiers; + current_state.capslock = capslock; + } + + this.dispatch_input(PlatformInput::ModifiersChanged(ModifiersChangedEvent { + modifiers, + capslock, + })); + + let key = dom_key_to_gpui_key(&event); + + if is_modifier_only_key(&key) { + return; + } + + event.prevent_default(); + + let key_char = compute_key_char(&event, &key, &modifiers); + + let keystroke = Keystroke { + modifiers, + key, + key_char, + }; + + this.dispatch_input(PlatformInput::KeyUp(KeyUpEvent { keystroke })); + }) + } + + fn register_focus(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen("focus", move |_event: JsValue| { + { + let mut state = this.state.borrow_mut(); + state.is_active = true; + } + let mut callbacks = this.callbacks.borrow_mut(); + if let Some(ref mut callback) = callbacks.active_status_change { + callback(true); + } + }) + } + + fn register_blur(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen("blur", move |_event: JsValue| { + { + let mut state = this.state.borrow_mut(); + state.is_active = false; + } + let mut callbacks = this.callbacks.borrow_mut(); + if let Some(ref mut callback) = callbacks.active_status_change { + callback(false); + } + }) + } + + fn register_pointer_enter(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen("pointerenter", move |_event: JsValue| { + { + let mut state = this.state.borrow_mut(); + state.is_hovered = true; + } + let mut callbacks = this.callbacks.borrow_mut(); + if let Some(ref mut callback) = callbacks.hover_status_change { + callback(true); + } + }) + } + + fn register_pointer_leave_hover(self: &Rc) -> Closure { + let this = Rc::clone(self); + self.listen("pointerleave", move |_event: JsValue| { + { + let mut state = this.state.borrow_mut(); + state.is_hovered = false; + } + let mut callbacks = this.callbacks.borrow_mut(); + if let Some(ref mut callback) = callbacks.hover_status_change { + callback(false); + } + }) + } +} + +fn dom_key_to_gpui_key(event: &web_sys::KeyboardEvent) -> String { + let key = event.key(); + match key.as_str() { + "Enter" => "enter".to_string(), + "Backspace" => "backspace".to_string(), + "Tab" => "tab".to_string(), + "Escape" => "escape".to_string(), + "Delete" => "delete".to_string(), + " " => "space".to_string(), + "ArrowLeft" => "left".to_string(), + "ArrowRight" => "right".to_string(), + "ArrowUp" => "up".to_string(), + "ArrowDown" => "down".to_string(), + "Home" => "home".to_string(), + "End" => "end".to_string(), + "PageUp" => "pageup".to_string(), + "PageDown" => "pagedown".to_string(), + "Insert" => "insert".to_string(), + "Control" => "control".to_string(), + "Alt" => "alt".to_string(), + "Shift" => "shift".to_string(), + "Meta" => "platform".to_string(), + "CapsLock" => "capslock".to_string(), + other => { + if let Some(rest) = other.strip_prefix('F') { + if let Ok(number) = rest.parse::() { + if (1..=35).contains(&number) { + return format!("f{number}"); + } + } + } + other.to_lowercase() + } + } +} + +fn dom_mouse_button_to_gpui(button: i16) -> MouseButton { + match button { + 0 => MouseButton::Left, + 1 => MouseButton::Middle, + 2 => MouseButton::Right, + 3 => MouseButton::Navigate(NavigationDirection::Back), + 4 => MouseButton::Navigate(NavigationDirection::Forward), + _ => MouseButton::Left, + } +} + +fn modifiers_from_keyboard_event(event: &web_sys::KeyboardEvent, _is_mac: bool) -> Modifiers { + Modifiers { + control: event.ctrl_key(), + alt: event.alt_key(), + shift: event.shift_key(), + platform: event.meta_key(), + function: false, + } +} + +fn modifiers_from_mouse_event(event: &web_sys::PointerEvent, _is_mac: bool) -> Modifiers { + let mouse_event: &web_sys::MouseEvent = event.as_ref(); + Modifiers { + control: mouse_event.ctrl_key(), + alt: mouse_event.alt_key(), + shift: mouse_event.shift_key(), + platform: mouse_event.meta_key(), + function: false, + } +} + +fn modifiers_from_wheel_event(event: &web_sys::MouseEvent, _is_mac: bool) -> Modifiers { + Modifiers { + control: event.ctrl_key(), + alt: event.alt_key(), + shift: event.shift_key(), + platform: event.meta_key(), + function: false, + } +} + +fn capslock_from_keyboard_event(event: &web_sys::KeyboardEvent) -> Capslock { + Capslock { + on: event.get_modifier_state("CapsLock"), + } +} + +pub(crate) fn is_mac_platform(browser_window: &web_sys::Window) -> bool { + let navigator = browser_window.navigator(); + + #[allow(deprecated)] + // navigator.platform() is deprecated but navigator.userAgentData is not widely available yet + if let Ok(platform) = navigator.platform() { + if platform.contains("Mac") { + return true; + } + } + + if let Ok(user_agent) = navigator.user_agent() { + return user_agent.contains("Mac"); + } + + false +} + +fn is_modifier_only_key(key: &str) -> bool { + matches!(key, "control" | "alt" | "shift" | "platform" | "capslock") +} + +fn compute_key_char( + event: &web_sys::KeyboardEvent, + gpui_key: &str, + modifiers: &Modifiers, +) -> Option { + if modifiers.platform || modifiers.control { + return None; + } + + if is_modifier_only_key(gpui_key) { + return None; + } + + if gpui_key == "space" { + return Some(" ".to_string()); + } + + let raw_key = event.key(); + + if raw_key.len() == 1 { + return Some(raw_key); + } + + None +} + +fn pointer_position_in_element(event: &web_sys::PointerEvent) -> Point { + let mouse_event: &web_sys::MouseEvent = event.as_ref(); + mouse_position_in_element(mouse_event) +} + +fn mouse_position_in_element(event: &web_sys::MouseEvent) -> Point { + // offset_x/offset_y give position relative to the target element's padding edge + point(px(event.offset_x() as f32), px(event.offset_y() as f32)) +} + +fn extract_file_paths_from_drag( + event: &web_sys::DragEvent, +) -> smallvec::SmallVec<[std::path::PathBuf; 2]> { + let mut paths = smallvec![]; + let Some(data_transfer) = event.data_transfer() else { + return paths; + }; + let file_list = data_transfer.files(); + let Some(files) = file_list else { + return paths; + }; + for index in 0..files.length() { + if let Some(file) = files.get(index) { + paths.push(std::path::PathBuf::from(file.name())); + } + } + paths +} diff --git a/crates/gpui_web/src/gpui_web.rs b/crates/gpui_web/src/gpui_web.rs new file mode 100644 index 0000000000000000000000000000000000000000..9cd773823bd9b65ef99cb89c12184919a4c45dc2 --- /dev/null +++ b/crates/gpui_web/src/gpui_web.rs @@ -0,0 +1,18 @@ +#![cfg(target_family = "wasm")] + +mod dispatcher; +mod display; +mod events; +mod http_client; +mod keyboard; +mod logging; +mod platform; +mod window; + +pub use dispatcher::WebDispatcher; +pub use display::WebDisplay; +pub use http_client::FetchHttpClient; +pub use keyboard::WebKeyboardLayout; +pub use logging::init_logging; +pub use platform::WebPlatform; +pub use window::WebWindow; diff --git a/crates/gpui_web/src/http_client.rs b/crates/gpui_web/src/http_client.rs new file mode 100644 index 0000000000000000000000000000000000000000..14d58cf45766885af76f49892589f70b89fb8116 --- /dev/null +++ b/crates/gpui_web/src/http_client.rs @@ -0,0 +1,199 @@ +use anyhow::anyhow; +use futures::AsyncReadExt as _; +use http_client::{AsyncBody, HttpClient, RedirectPolicy}; +use std::future::Future; +use std::pin::Pin; +use std::task::Poll; +use wasm_bindgen::JsCast as _; +use wasm_bindgen::prelude::*; + +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(catch, js_name = "fetch")] + fn global_fetch(input: &web_sys::Request) -> Result; +} + +pub struct FetchHttpClient { + user_agent: Option, +} + +impl Default for FetchHttpClient { + fn default() -> Self { + Self { user_agent: None } + } +} + +#[cfg(feature = "multithreaded")] +impl FetchHttpClient { + /// # Safety + /// + /// The caller must ensure that the created `FetchHttpClient` is only used in a single thread environment. + pub unsafe fn new() -> Self { + Self::default() + } + + /// # Safety + /// + /// The caller must ensure that the created `FetchHttpClient` is only used in a single thread environment. + pub unsafe fn with_user_agent(user_agent: &str) -> anyhow::Result { + Ok(Self { + user_agent: Some(http_client::http::header::HeaderValue::from_str( + user_agent, + )?), + }) + } +} + +#[cfg(not(feature = "multithreaded"))] +impl FetchHttpClient { + pub fn new() -> Self { + Self::default() + } + + pub fn with_user_agent(user_agent: &str) -> anyhow::Result { + Ok(Self { + user_agent: Some(http_client::http::header::HeaderValue::from_str( + user_agent, + )?), + }) + } +} + +/// Wraps a `!Send` future to satisfy the `Send` bound on `BoxFuture`. +/// +/// Safety: only valid in WASM contexts where the `FetchHttpClient` is +/// confined to a single thread (guaranteed by the caller via unsafe +/// constructors when `multithreaded` is enabled, or by the absence of +/// threads when it is not). +struct AssertSend(F); + +unsafe impl Send for AssertSend {} + +impl Future for AssertSend { + type Output = F::Output; + + fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll { + // Safety: pin projection for a single-field newtype wrapper. + let inner = unsafe { self.map_unchecked_mut(|this| &mut this.0) }; + inner.poll(cx) + } +} + +impl HttpClient for FetchHttpClient { + fn user_agent(&self) -> Option<&http_client::http::header::HeaderValue> { + self.user_agent.as_ref() + } + + fn proxy(&self) -> Option<&http_client::Url> { + None + } + + fn send( + &self, + req: http_client::http::Request, + ) -> futures::future::BoxFuture<'static, anyhow::Result>> + { + let (parts, body) = req.into_parts(); + + Box::pin(AssertSend(async move { + let body_bytes = read_body_to_bytes(body).await?; + + let init = web_sys::RequestInit::new(); + init.set_method(parts.method.as_str()); + + if let Some(redirect_policy) = parts.extensions.get::() { + match redirect_policy { + RedirectPolicy::NoFollow => { + init.set_redirect(web_sys::RequestRedirect::Manual); + } + RedirectPolicy::FollowLimit(_) | RedirectPolicy::FollowAll => { + init.set_redirect(web_sys::RequestRedirect::Follow); + } + } + } + + if let Some(ref bytes) = body_bytes { + let uint8array = js_sys::Uint8Array::from(bytes.as_slice()); + init.set_body(uint8array.as_ref()); + } + + let url = parts.uri.to_string(); + let request = web_sys::Request::new_with_str_and_init(&url, &init) + .map_err(|error| anyhow!("failed to create fetch Request: {error:?}"))?; + + let request_headers = request.headers(); + for (name, value) in &parts.headers { + let value_str = value + .to_str() + .map_err(|_| anyhow!("non-ASCII header value for {name}"))?; + request_headers + .set(name.as_str(), value_str) + .map_err(|error| anyhow!("failed to set header {name}: {error:?}"))?; + } + + let promise = global_fetch(&request) + .map_err(|error| anyhow!("fetch threw an error: {error:?}"))?; + let response_value = wasm_bindgen_futures::JsFuture::from(promise) + .await + .map_err(|error| anyhow!("fetch failed: {error:?}"))?; + + let web_response: web_sys::Response = response_value + .dyn_into() + .map_err(|error| anyhow!("fetch result is not a Response: {error:?}"))?; + + let status = web_response.status(); + let mut builder = http_client::http::Response::builder().status(status); + + // `Headers` is a JS iterable yielding `[name, value]` pairs. + // `js_sys::Array::from` calls `Array.from()` which accepts any iterable. + let header_pairs = js_sys::Array::from(&web_response.headers()); + for index in 0..header_pairs.length() { + match header_pairs.get(index).dyn_into::() { + Ok(pair) => match (pair.get(0).as_string(), pair.get(1).as_string()) { + (Some(name), Some(value)) => { + builder = builder.header(name, value); + } + (name, value) => { + log::warn!( + "skipping response header at index {index}: \ + name={name:?}, value={value:?}" + ); + } + }, + Err(entry) => { + log::warn!("skipping non-array header entry at index {index}: {entry:?}"); + } + } + } + + // The entire response body is eagerly buffered into memory via + // `arrayBuffer()`. The Fetch API does not expose a synchronous + // streaming interface; streaming would require `ReadableStream` + // interop which is significantly more complex. + let body_promise = web_response + .array_buffer() + .map_err(|error| anyhow!("failed to initiate response body read: {error:?}"))?; + let body_value = wasm_bindgen_futures::JsFuture::from(body_promise) + .await + .map_err(|error| anyhow!("failed to read response body: {error:?}"))?; + let array_buffer: js_sys::ArrayBuffer = body_value + .dyn_into() + .map_err(|error| anyhow!("response body is not an ArrayBuffer: {error:?}"))?; + let response_bytes = js_sys::Uint8Array::new(&array_buffer).to_vec(); + + builder + .body(AsyncBody::from(response_bytes)) + .map_err(|error| anyhow!(error)) + })) + } +} + +async fn read_body_to_bytes(mut body: AsyncBody) -> anyhow::Result>> { + let mut buffer = Vec::new(); + body.read_to_end(&mut buffer).await?; + if buffer.is_empty() { + Ok(None) + } else { + Ok(Some(buffer)) + } +} diff --git a/crates/gpui_web/src/keyboard.rs b/crates/gpui_web/src/keyboard.rs new file mode 100644 index 0000000000000000000000000000000000000000..3c1c97a01ee784af1687bfe90bedc97b091c5fba --- /dev/null +++ b/crates/gpui_web/src/keyboard.rs @@ -0,0 +1,19 @@ +use gpui::PlatformKeyboardLayout; + +pub struct WebKeyboardLayout; + +impl WebKeyboardLayout { + pub fn new() -> Self { + WebKeyboardLayout + } +} + +impl PlatformKeyboardLayout for WebKeyboardLayout { + fn id(&self) -> &str { + "us" + } + + fn name(&self) -> &str { + "US" + } +} diff --git a/crates/gpui_web/src/logging.rs b/crates/gpui_web/src/logging.rs new file mode 100644 index 0000000000000000000000000000000000000000..9e76201b194815a01d558c8099f552401af0fcea --- /dev/null +++ b/crates/gpui_web/src/logging.rs @@ -0,0 +1,37 @@ +use log::{Level, Log, Metadata, Record}; + +struct ConsoleLogger; + +impl Log for ConsoleLogger { + fn enabled(&self, _metadata: &Metadata) -> bool { + true + } + + fn log(&self, record: &Record) { + if !self.enabled(record.metadata()) { + return; + } + + let message = format!( + "[{}] {}: {}", + record.level(), + record.target(), + record.args() + ); + let js_string = wasm_bindgen::JsValue::from_str(&message); + + match record.level() { + Level::Error => web_sys::console::error_1(&js_string), + Level::Warn => web_sys::console::warn_1(&js_string), + Level::Info => web_sys::console::info_1(&js_string), + Level::Debug | Level::Trace => web_sys::console::log_1(&js_string), + } + } + + fn flush(&self) {} +} + +pub fn init_logging() { + log::set_logger(&ConsoleLogger).ok(); + log::set_max_level(log::LevelFilter::Info); +} diff --git a/crates/gpui_web/src/platform.rs b/crates/gpui_web/src/platform.rs new file mode 100644 index 0000000000000000000000000000000000000000..4d78b71aa05b743f779d0e8a1e7ed8a5eac136f9 --- /dev/null +++ b/crates/gpui_web/src/platform.rs @@ -0,0 +1,344 @@ +use crate::dispatcher::WebDispatcher; +use crate::display::WebDisplay; +use crate::keyboard::WebKeyboardLayout; +use crate::window::WebWindow; +use anyhow::Result; +use futures::channel::oneshot; +use gpui::{ + Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DummyKeyboardMapper, + ForegroundExecutor, Keymap, Menu, MenuItem, PathPromptOptions, Platform, PlatformDisplay, + PlatformKeyboardLayout, PlatformKeyboardMapper, PlatformTextSystem, PlatformWindow, Task, + ThermalState, WindowAppearance, WindowParams, +}; +use gpui_wgpu::WgpuContext; +use std::{ + borrow::Cow, + cell::RefCell, + path::{Path, PathBuf}, + rc::Rc, + sync::Arc, +}; + +static BUNDLED_FONTS: &[&[u8]] = &[ + include_bytes!("../../../assets/fonts/ibm-plex-sans/IBMPlexSans-Regular.ttf"), + include_bytes!("../../../assets/fonts/ibm-plex-sans/IBMPlexSans-Italic.ttf"), + include_bytes!("../../../assets/fonts/ibm-plex-sans/IBMPlexSans-SemiBold.ttf"), + include_bytes!("../../../assets/fonts/ibm-plex-sans/IBMPlexSans-SemiBoldItalic.ttf"), + include_bytes!("../../../assets/fonts/lilex/Lilex-Regular.ttf"), + include_bytes!("../../../assets/fonts/lilex/Lilex-Bold.ttf"), + include_bytes!("../../../assets/fonts/lilex/Lilex-Italic.ttf"), + include_bytes!("../../../assets/fonts/lilex/Lilex-BoldItalic.ttf"), +]; + +pub struct WebPlatform { + browser_window: web_sys::Window, + background_executor: BackgroundExecutor, + foreground_executor: ForegroundExecutor, + text_system: Arc, + active_window: RefCell>, + active_display: Rc, + callbacks: RefCell, + wgpu_context: Rc>>, +} + +#[derive(Default)] +struct WebPlatformCallbacks { + open_urls: Option)>>, + quit: Option>, + reopen: Option>, + app_menu_action: Option>, + will_open_app_menu: Option>, + validate_app_menu_command: Option bool>>, + keyboard_layout_change: Option>, + thermal_state_change: Option>, +} + +impl WebPlatform { + pub fn new(allow_multi_threading: bool) -> Self { + let browser_window = + web_sys::window().expect("must be running in a browser window context"); + let dispatcher = Arc::new(WebDispatcher::new( + browser_window.clone(), + allow_multi_threading, + )); + let background_executor = BackgroundExecutor::new(dispatcher.clone()); + let foreground_executor = ForegroundExecutor::new(dispatcher); + let text_system = Arc::new(gpui_wgpu::CosmicTextSystem::new_without_system_fonts( + "IBM Plex Sans", + )); + let fonts = BUNDLED_FONTS + .iter() + .map(|bytes| Cow::Borrowed(*bytes)) + .collect(); + if let Err(error) = text_system.add_fonts(fonts) { + log::error!("failed to load bundled fonts: {error:#}"); + } + let text_system: Arc = text_system; + let active_display: Rc = + Rc::new(WebDisplay::new(browser_window.clone())); + + Self { + browser_window, + background_executor, + foreground_executor, + text_system, + active_window: RefCell::new(None), + active_display, + callbacks: RefCell::new(WebPlatformCallbacks::default()), + wgpu_context: Rc::new(RefCell::new(None)), + } + } +} + +impl Platform for WebPlatform { + fn background_executor(&self) -> BackgroundExecutor { + self.background_executor.clone() + } + + fn foreground_executor(&self) -> ForegroundExecutor { + self.foreground_executor.clone() + } + + fn text_system(&self) -> Arc { + self.text_system.clone() + } + + fn run(&self, on_finish_launching: Box) { + let wgpu_context = self.wgpu_context.clone(); + wasm_bindgen_futures::spawn_local(async move { + match WgpuContext::new_web().await { + Ok(context) => { + log::info!("WebGPU context initialized successfully"); + *wgpu_context.borrow_mut() = Some(context); + on_finish_launching(); + } + Err(err) => { + log::error!("Failed to initialize WebGPU context: {err:#}"); + on_finish_launching(); + } + } + }); + } + + fn quit(&self) { + log::warn!("WebPlatform::quit called, but quitting is not supported in the browser ."); + } + + fn restart(&self, _binary_path: Option) {} + + fn activate(&self, _ignoring_other_apps: bool) {} + + fn hide(&self) {} + + fn hide_other_apps(&self) {} + + fn unhide_other_apps(&self) {} + + fn displays(&self) -> Vec> { + vec![self.active_display.clone()] + } + + fn primary_display(&self) -> Option> { + Some(self.active_display.clone()) + } + + fn active_window(&self) -> Option { + *self.active_window.borrow() + } + + fn open_window( + &self, + handle: AnyWindowHandle, + params: WindowParams, + ) -> anyhow::Result> { + let context_ref = self.wgpu_context.borrow(); + let context = context_ref.as_ref().ok_or_else(|| { + anyhow::anyhow!("WebGPU context not initialized. Was Platform::run() called?") + })?; + + let window = WebWindow::new(handle, params, context, self.browser_window.clone())?; + *self.active_window.borrow_mut() = Some(handle); + Ok(Box::new(window)) + } + + fn window_appearance(&self) -> WindowAppearance { + let Ok(Some(media_query)) = self + .browser_window + .match_media("(prefers-color-scheme: dark)") + else { + return WindowAppearance::Light; + }; + if media_query.matches() { + WindowAppearance::Dark + } else { + WindowAppearance::Light + } + } + + fn open_url(&self, url: &str) { + if let Err(error) = self.browser_window.open_with_url(url) { + log::warn!("Failed to open URL '{url}': {error:?}"); + } + } + + fn on_open_urls(&self, callback: Box)>) { + self.callbacks.borrow_mut().open_urls = Some(callback); + } + + fn register_url_scheme(&self, _url: &str) -> Task> { + Task::ready(Ok(())) + } + + fn prompt_for_paths( + &self, + _options: PathPromptOptions, + ) -> oneshot::Receiver>>> { + let (tx, rx) = oneshot::channel(); + tx.send(Err(anyhow::anyhow!( + "prompt_for_paths is not supported on the web" + ))) + .ok(); + rx + } + + fn prompt_for_new_path( + &self, + _directory: &Path, + _suggested_name: Option<&str>, + ) -> oneshot::Receiver>> { + let (sender, receiver) = oneshot::channel(); + sender + .send(Err(anyhow::anyhow!( + "prompt_for_new_path is not supported on the web" + ))) + .ok(); + receiver + } + + fn can_select_mixed_files_and_dirs(&self) -> bool { + false + } + + fn reveal_path(&self, _path: &Path) {} + + fn open_with_system(&self, _path: &Path) {} + + fn on_quit(&self, callback: Box) { + self.callbacks.borrow_mut().quit = Some(callback); + } + + fn on_reopen(&self, callback: Box) { + self.callbacks.borrow_mut().reopen = Some(callback); + } + + fn set_menus(&self, _menus: Vec

, _keymap: &Keymap) {} + + fn set_dock_menu(&self, _menu: Vec, _keymap: &Keymap) {} + + fn on_app_menu_action(&self, callback: Box) { + self.callbacks.borrow_mut().app_menu_action = Some(callback); + } + + fn on_will_open_app_menu(&self, callback: Box) { + self.callbacks.borrow_mut().will_open_app_menu = Some(callback); + } + + fn on_validate_app_menu_command(&self, callback: Box bool>) { + self.callbacks.borrow_mut().validate_app_menu_command = Some(callback); + } + + fn thermal_state(&self) -> ThermalState { + ThermalState::Nominal + } + + fn on_thermal_state_change(&self, callback: Box) { + self.callbacks.borrow_mut().thermal_state_change = Some(callback); + } + + fn compositor_name(&self) -> &'static str { + "Web" + } + + fn app_path(&self) -> Result { + Err(anyhow::anyhow!("app_path is not available on the web")) + } + + fn path_for_auxiliary_executable(&self, _name: &str) -> Result { + Err(anyhow::anyhow!( + "path_for_auxiliary_executable is not available on the web" + )) + } + + fn set_cursor_style(&self, style: CursorStyle) { + let css_cursor = match style { + CursorStyle::Arrow => "default", + CursorStyle::IBeam => "text", + CursorStyle::Crosshair => "crosshair", + CursorStyle::ClosedHand => "grabbing", + CursorStyle::OpenHand => "grab", + CursorStyle::PointingHand => "pointer", + CursorStyle::ResizeLeft | CursorStyle::ResizeRight | CursorStyle::ResizeLeftRight => { + "ew-resize" + } + CursorStyle::ResizeUp | CursorStyle::ResizeDown | CursorStyle::ResizeUpDown => { + "ns-resize" + } + CursorStyle::ResizeUpLeftDownRight => "nesw-resize", + CursorStyle::ResizeUpRightDownLeft => "nwse-resize", + CursorStyle::ResizeColumn => "col-resize", + CursorStyle::ResizeRow => "row-resize", + CursorStyle::IBeamCursorForVerticalLayout => "vertical-text", + CursorStyle::OperationNotAllowed => "not-allowed", + CursorStyle::DragLink => "alias", + CursorStyle::DragCopy => "copy", + CursorStyle::ContextualMenu => "context-menu", + CursorStyle::None => "none", + }; + + if let Some(document) = self.browser_window.document() { + if let Some(body) = document.body() { + if let Err(error) = body.style().set_property("cursor", css_cursor) { + log::warn!("Failed to set cursor style: {error:?}"); + } + } + } + } + + fn should_auto_hide_scrollbars(&self) -> bool { + true + } + + fn read_from_clipboard(&self) -> Option { + None + } + + fn write_to_clipboard(&self, _item: ClipboardItem) {} + + fn write_credentials(&self, _url: &str, _username: &str, _password: &[u8]) -> Task> { + Task::ready(Err(anyhow::anyhow!( + "credential storage is not available on the web" + ))) + } + + fn read_credentials(&self, _url: &str) -> Task)>>> { + Task::ready(Ok(None)) + } + + fn delete_credentials(&self, _url: &str) -> Task> { + Task::ready(Err(anyhow::anyhow!( + "credential storage is not available on the web" + ))) + } + + fn keyboard_layout(&self) -> Box { + Box::new(WebKeyboardLayout) + } + + fn keyboard_mapper(&self) -> Rc { + Rc::new(DummyKeyboardMapper) + } + + fn on_keyboard_layout_change(&self, callback: Box) { + self.callbacks.borrow_mut().keyboard_layout_change = Some(callback); + } +} diff --git a/crates/gpui_web/src/window.rs b/crates/gpui_web/src/window.rs new file mode 100644 index 0000000000000000000000000000000000000000..ab6d6fc857dfd092ea7e3c5d2dcb46f9ddc96cfb --- /dev/null +++ b/crates/gpui_web/src/window.rs @@ -0,0 +1,702 @@ +use crate::display::WebDisplay; +use crate::events::{ClickState, WebEventListeners, is_mac_platform}; +use std::sync::Arc; +use std::{cell::Cell, cell::RefCell, rc::Rc}; + +use gpui::{ + AnyWindowHandle, Bounds, Capslock, Decorations, DevicePixels, DispatchEventResult, GpuSpecs, + Modifiers, MouseButton, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, + PlatformInputHandler, PlatformWindow, Point, PromptButton, PromptLevel, RequestFrameOptions, + ResizeEdge, Scene, Size, WindowAppearance, WindowBackgroundAppearance, WindowBounds, + WindowControlArea, WindowControls, WindowDecorations, WindowParams, px, +}; +use gpui_wgpu::{WgpuContext, WgpuRenderer, WgpuSurfaceConfig}; +use wasm_bindgen::prelude::*; + +#[derive(Default)] +pub(crate) struct WebWindowCallbacks { + pub(crate) request_frame: Option>, + pub(crate) input: Option DispatchEventResult>>, + pub(crate) active_status_change: Option>, + pub(crate) hover_status_change: Option>, + pub(crate) resize: Option, f32)>>, + pub(crate) moved: Option>, + pub(crate) should_close: Option bool>>, + pub(crate) close: Option>, + pub(crate) appearance_changed: Option>, + pub(crate) hit_test_window_control: Option Option>>, +} + +pub(crate) struct WebWindowMutableState { + pub(crate) renderer: WgpuRenderer, + pub(crate) bounds: Bounds, + pub(crate) scale_factor: f32, + pub(crate) max_texture_dimension: u32, + pub(crate) title: String, + pub(crate) input_handler: Option, + pub(crate) is_fullscreen: bool, + pub(crate) is_active: bool, + pub(crate) is_hovered: bool, + pub(crate) mouse_position: Point, + pub(crate) modifiers: Modifiers, + pub(crate) capslock: Capslock, +} + +pub(crate) struct WebWindowInner { + pub(crate) browser_window: web_sys::Window, + pub(crate) canvas: web_sys::HtmlCanvasElement, + pub(crate) has_device_pixel_support: bool, + pub(crate) is_mac: bool, + pub(crate) state: RefCell, + pub(crate) callbacks: RefCell, + pub(crate) click_state: RefCell, + pub(crate) pressed_button: Cell>, + pub(crate) last_physical_size: Cell<(u32, u32)>, + pub(crate) notify_scale: Cell, + mql_handle: RefCell>, + pending_physical_size: Cell>, +} + +pub struct WebWindow { + inner: Rc, + display: Rc, + #[allow(dead_code)] + handle: AnyWindowHandle, + _raf_closure: Closure, + _resize_observer: Option, + _resize_observer_closure: Closure, + _event_listeners: WebEventListeners, +} + +impl WebWindow { + pub fn new( + handle: AnyWindowHandle, + _params: WindowParams, + context: &WgpuContext, + browser_window: web_sys::Window, + ) -> anyhow::Result { + let document = browser_window + .document() + .ok_or_else(|| anyhow::anyhow!("No `document` found on window"))?; + + let canvas: web_sys::HtmlCanvasElement = document + .create_element("canvas") + .map_err(|e| anyhow::anyhow!("Failed to create canvas element: {e:?}"))? + .dyn_into() + .map_err(|e| anyhow::anyhow!("Created element is not a canvas: {e:?}"))?; + + let dpr = browser_window.device_pixel_ratio() as f32; + let max_texture_dimension = context.device.limits().max_texture_dimension_2d; + let has_device_pixel_support = check_device_pixel_support(); + + canvas.set_tab_index(0); + + let style = canvas.style(); + style + .set_property("width", "100%") + .map_err(|e| anyhow::anyhow!("Failed to set canvas width style: {e:?}"))?; + style + .set_property("height", "100%") + .map_err(|e| anyhow::anyhow!("Failed to set canvas height style: {e:?}"))?; + style + .set_property("display", "block") + .map_err(|e| anyhow::anyhow!("Failed to set canvas display style: {e:?}"))?; + style + .set_property("outline", "none") + .map_err(|e| anyhow::anyhow!("Failed to set canvas outline style: {e:?}"))?; + style + .set_property("touch-action", "none") + .map_err(|e| anyhow::anyhow!("Failed to set touch-action style: {e:?}"))?; + + let body = document + .body() + .ok_or_else(|| anyhow::anyhow!("No `body` found on document"))?; + body.append_child(&canvas) + .map_err(|e| anyhow::anyhow!("Failed to append canvas to body: {e:?}"))?; + + canvas.focus().ok(); + + let device_size = Size { + width: DevicePixels(0), + height: DevicePixels(0), + }; + + let renderer_config = WgpuSurfaceConfig { + size: device_size, + transparent: false, + }; + + let renderer = WgpuRenderer::new_from_canvas(context, &canvas, renderer_config)?; + + let display: Rc = Rc::new(WebDisplay::new(browser_window.clone())); + + let initial_bounds = Bounds { + origin: Point::default(), + size: Size::default(), + }; + + let mutable_state = WebWindowMutableState { + renderer, + bounds: initial_bounds, + scale_factor: dpr, + max_texture_dimension, + title: String::new(), + input_handler: None, + is_fullscreen: false, + is_active: true, + is_hovered: false, + mouse_position: Point::default(), + modifiers: Modifiers::default(), + capslock: Capslock::default(), + }; + + let is_mac = is_mac_platform(&browser_window); + + let inner = Rc::new(WebWindowInner { + browser_window, + canvas, + has_device_pixel_support, + is_mac, + state: RefCell::new(mutable_state), + callbacks: RefCell::new(WebWindowCallbacks::default()), + click_state: RefCell::new(ClickState::default()), + pressed_button: Cell::new(None), + last_physical_size: Cell::new((0, 0)), + notify_scale: Cell::new(false), + mql_handle: RefCell::new(None), + pending_physical_size: Cell::new(None), + }); + + let raf_closure = inner.create_raf_closure(); + inner.schedule_raf(&raf_closure); + + let resize_observer_closure = Self::create_resize_observer_closure(Rc::clone(&inner)); + let resize_observer = + web_sys::ResizeObserver::new(resize_observer_closure.as_ref().unchecked_ref()).ok(); + + if let Some(ref observer) = resize_observer { + inner.observe_canvas(observer); + inner.watch_dpr_changes(observer); + } + + let event_listeners = inner.register_event_listeners(); + + Ok(Self { + inner, + display, + handle, + _raf_closure: raf_closure, + _resize_observer: resize_observer, + _resize_observer_closure: resize_observer_closure, + _event_listeners: event_listeners, + }) + } + + fn create_resize_observer_closure( + inner: Rc, + ) -> Closure { + Closure::new(move |entries: js_sys::Array| { + let entry: web_sys::ResizeObserverEntry = match entries.get(0).dyn_into().ok() { + Some(entry) => entry, + None => return, + }; + + let dpr = inner.browser_window.device_pixel_ratio(); + let dpr_f32 = dpr as f32; + + let (physical_width, physical_height, logical_width, logical_height) = + if inner.has_device_pixel_support { + let size: web_sys::ResizeObserverSize = entry + .device_pixel_content_box_size() + .get(0) + .unchecked_into(); + let pw = size.inline_size() as u32; + let ph = size.block_size() as u32; + let lw = pw as f64 / dpr; + let lh = ph as f64 / dpr; + (pw, ph, lw as f32, lh as f32) + } else { + // Safari fallback: use contentRect (always CSS px). + let rect = entry.content_rect(); + let lw = rect.width() as f32; + let lh = rect.height() as f32; + let pw = (lw as f64 * dpr).round() as u32; + let ph = (lh as f64 * dpr).round() as u32; + (pw, ph, lw, lh) + }; + + let scale_changed = inner.notify_scale.replace(false); + let prev = inner.last_physical_size.get(); + let size_changed = prev != (physical_width, physical_height); + + if !scale_changed && !size_changed { + return; + } + inner + .last_physical_size + .set((physical_width, physical_height)); + + // Skip rendering to a zero-size canvas (e.g. display:none). + if physical_width == 0 || physical_height == 0 { + let mut s = inner.state.borrow_mut(); + s.bounds.size = Size::default(); + s.scale_factor = dpr_f32; + // Still fire the callback so GPUI knows the window is gone. + drop(s); + let mut cbs = inner.callbacks.borrow_mut(); + if let Some(ref mut callback) = cbs.resize { + callback(Size::default(), dpr_f32); + } + return; + } + + let max_texture_dimension = inner.state.borrow().max_texture_dimension; + let clamped_width = physical_width.min(max_texture_dimension); + let clamped_height = physical_height.min(max_texture_dimension); + + inner + .pending_physical_size + .set(Some((clamped_width, clamped_height))); + + { + let mut s = inner.state.borrow_mut(); + s.bounds.size = Size { + width: px(logical_width), + height: px(logical_height), + }; + s.scale_factor = dpr_f32; + } + + let new_size = Size { + width: px(logical_width), + height: px(logical_height), + }; + + let mut cbs = inner.callbacks.borrow_mut(); + if let Some(ref mut callback) = cbs.resize { + callback(new_size, dpr_f32); + } + }) + } +} + +impl WebWindowInner { + fn create_raf_closure(self: &Rc) -> Closure { + let raf_handle: Rc>> = Rc::new(RefCell::new(None)); + let raf_handle_inner = Rc::clone(&raf_handle); + + let this = Rc::clone(self); + let closure = Closure::new(move || { + { + let mut callbacks = this.callbacks.borrow_mut(); + if let Some(ref mut callback) = callbacks.request_frame { + callback(RequestFrameOptions { + require_presentation: true, + force_render: false, + }); + } + } + + // Re-schedule for the next frame + if let Some(ref func) = *raf_handle_inner.borrow() { + this.browser_window.request_animation_frame(func).ok(); + } + }); + + let js_func: js_sys::Function = + closure.as_ref().unchecked_ref::().clone(); + *raf_handle.borrow_mut() = Some(js_func); + + closure + } + + fn schedule_raf(&self, closure: &Closure) { + self.browser_window + .request_animation_frame(closure.as_ref().unchecked_ref()) + .ok(); + } + + fn observe_canvas(&self, observer: &web_sys::ResizeObserver) { + observer.unobserve(&self.canvas); + if self.has_device_pixel_support { + let options = web_sys::ResizeObserverOptions::new(); + options.set_box(web_sys::ResizeObserverBoxOptions::DevicePixelContentBox); + observer.observe_with_options(&self.canvas, &options); + } else { + observer.observe(&self.canvas); + } + } + + fn watch_dpr_changes(self: &Rc, observer: &web_sys::ResizeObserver) { + let current_dpr = self.browser_window.device_pixel_ratio(); + let media_query = + format!("(resolution: {current_dpr}dppx), (-webkit-device-pixel-ratio: {current_dpr})"); + let Some(mql) = self.browser_window.match_media(&media_query).ok().flatten() else { + return; + }; + + let this = Rc::clone(self); + let observer = observer.clone(); + + let closure = Closure::::new(move |_event: JsValue| { + this.notify_scale.set(true); + this.observe_canvas(&observer); + this.watch_dpr_changes(&observer); + }); + + mql.add_event_listener_with_callback("change", closure.as_ref().unchecked_ref()) + .ok(); + + *self.mql_handle.borrow_mut() = Some(MqlHandle { + mql, + _closure: closure, + }); + } + + pub(crate) fn register_visibility_change( + self: &Rc, + ) -> Option> { + let document = self.browser_window.document()?; + let this = Rc::clone(self); + + let closure = Closure::::new(move |_event: JsValue| { + let is_visible = this + .browser_window + .document() + .map(|doc| { + let state_str: String = js_sys::Reflect::get(&doc, &"visibilityState".into()) + .ok() + .and_then(|v| v.as_string()) + .unwrap_or_default(); + state_str == "visible" + }) + .unwrap_or(true); + + { + let mut state = this.state.borrow_mut(); + state.is_active = is_visible; + } + let mut callbacks = this.callbacks.borrow_mut(); + if let Some(ref mut callback) = callbacks.active_status_change { + callback(is_visible); + } + }); + + document + .add_event_listener_with_callback("visibilitychange", closure.as_ref().unchecked_ref()) + .ok(); + + Some(closure) + } + + pub(crate) fn register_appearance_change( + self: &Rc, + ) -> Option> { + let mql = self + .browser_window + .match_media("(prefers-color-scheme: dark)") + .ok()??; + + let this = Rc::clone(self); + let closure = Closure::::new(move |_event: JsValue| { + let mut callbacks = this.callbacks.borrow_mut(); + if let Some(ref mut callback) = callbacks.appearance_changed { + callback(); + } + }); + + mql.add_event_listener_with_callback("change", closure.as_ref().unchecked_ref()) + .ok(); + + Some(closure) + } +} + +fn current_appearance(browser_window: &web_sys::Window) -> WindowAppearance { + let is_dark = browser_window + .match_media("(prefers-color-scheme: dark)") + .ok() + .flatten() + .map(|mql| mql.matches()) + .unwrap_or(false); + + if is_dark { + WindowAppearance::Dark + } else { + WindowAppearance::Light + } +} + +struct MqlHandle { + mql: web_sys::MediaQueryList, + _closure: Closure, +} + +impl Drop for MqlHandle { + fn drop(&mut self) { + self.mql + .remove_event_listener_with_callback("change", self._closure.as_ref().unchecked_ref()) + .ok(); + } +} + +// Safari does not support `devicePixelContentBoxSize`, so detect whether it's available. +fn check_device_pixel_support() -> bool { + let global: JsValue = js_sys::global().into(); + let Ok(constructor) = js_sys::Reflect::get(&global, &"ResizeObserverEntry".into()) else { + return false; + }; + let Ok(prototype) = js_sys::Reflect::get(&constructor, &"prototype".into()) else { + return false; + }; + let descriptor = js_sys::Object::get_own_property_descriptor( + &prototype.unchecked_into::(), + &"devicePixelContentBoxSize".into(), + ); + !descriptor.is_undefined() +} + +impl raw_window_handle::HasWindowHandle for WebWindow { + fn window_handle( + &self, + ) -> Result, raw_window_handle::HandleError> { + let canvas_ref: &JsValue = self.inner.canvas.as_ref(); + let obj = std::ptr::NonNull::from(canvas_ref).cast::(); + let handle = raw_window_handle::WebCanvasWindowHandle::new(obj); + Ok(unsafe { raw_window_handle::WindowHandle::borrow_raw(handle.into()) }) + } +} + +impl raw_window_handle::HasDisplayHandle for WebWindow { + fn display_handle( + &self, + ) -> Result, raw_window_handle::HandleError> { + Ok(raw_window_handle::DisplayHandle::web()) + } +} + +impl PlatformWindow for WebWindow { + fn bounds(&self) -> Bounds { + self.inner.state.borrow().bounds + } + + fn is_maximized(&self) -> bool { + false + } + + fn window_bounds(&self) -> WindowBounds { + WindowBounds::Windowed(self.bounds()) + } + + fn content_size(&self) -> Size { + self.inner.state.borrow().bounds.size + } + + fn resize(&mut self, size: Size) { + let style = self.inner.canvas.style(); + style + .set_property("width", &format!("{}px", f32::from(size.width))) + .ok(); + style + .set_property("height", &format!("{}px", f32::from(size.height))) + .ok(); + } + + fn scale_factor(&self) -> f32 { + self.inner.state.borrow().scale_factor + } + + fn appearance(&self) -> WindowAppearance { + current_appearance(&self.inner.browser_window) + } + + fn display(&self) -> Option> { + Some(self.display.clone()) + } + + fn mouse_position(&self) -> Point { + self.inner.state.borrow().mouse_position + } + + fn modifiers(&self) -> Modifiers { + self.inner.state.borrow().modifiers + } + + fn capslock(&self) -> Capslock { + self.inner.state.borrow().capslock + } + + fn set_input_handler(&mut self, input_handler: PlatformInputHandler) { + self.inner.state.borrow_mut().input_handler = Some(input_handler); + } + + fn take_input_handler(&mut self) -> Option { + self.inner.state.borrow_mut().input_handler.take() + } + + fn prompt( + &self, + _level: PromptLevel, + _msg: &str, + _detail: Option<&str>, + _answers: &[PromptButton], + ) -> Option> { + None + } + + fn activate(&self) { + self.inner.state.borrow_mut().is_active = true; + } + + fn is_active(&self) -> bool { + self.inner.state.borrow().is_active + } + + fn is_hovered(&self) -> bool { + self.inner.state.borrow().is_hovered + } + + fn background_appearance(&self) -> WindowBackgroundAppearance { + WindowBackgroundAppearance::Opaque + } + + fn set_title(&mut self, title: &str) { + self.inner.state.borrow_mut().title = title.to_owned(); + if let Some(document) = self.inner.browser_window.document() { + document.set_title(title); + } + } + + fn set_background_appearance(&self, _background: WindowBackgroundAppearance) {} + + fn minimize(&self) { + log::warn!("WebWindow::minimize is not supported in the browser"); + } + + fn zoom(&self) { + log::warn!("WebWindow::zoom is not supported in the browser"); + } + + fn toggle_fullscreen(&self) { + let mut state = self.inner.state.borrow_mut(); + state.is_fullscreen = !state.is_fullscreen; + + if state.is_fullscreen { + let canvas: &web_sys::Element = self.inner.canvas.as_ref(); + canvas.request_fullscreen().ok(); + } else { + if let Some(document) = self.inner.browser_window.document() { + document.exit_fullscreen(); + } + } + } + + fn is_fullscreen(&self) -> bool { + self.inner.state.borrow().is_fullscreen + } + + fn on_request_frame(&self, callback: Box) { + self.inner.callbacks.borrow_mut().request_frame = Some(callback); + } + + fn on_input(&self, callback: Box DispatchEventResult>) { + self.inner.callbacks.borrow_mut().input = Some(callback); + } + + fn on_active_status_change(&self, callback: Box) { + self.inner.callbacks.borrow_mut().active_status_change = Some(callback); + } + + fn on_hover_status_change(&self, callback: Box) { + self.inner.callbacks.borrow_mut().hover_status_change = Some(callback); + } + + fn on_resize(&self, callback: Box, f32)>) { + self.inner.callbacks.borrow_mut().resize = Some(callback); + } + + fn on_moved(&self, callback: Box) { + self.inner.callbacks.borrow_mut().moved = Some(callback); + } + + fn on_should_close(&self, callback: Box bool>) { + self.inner.callbacks.borrow_mut().should_close = Some(callback); + } + + fn on_close(&self, callback: Box) { + self.inner.callbacks.borrow_mut().close = Some(callback); + } + + fn on_hit_test_window_control(&self, callback: Box Option>) { + self.inner.callbacks.borrow_mut().hit_test_window_control = Some(callback); + } + + fn on_appearance_changed(&self, callback: Box) { + self.inner.callbacks.borrow_mut().appearance_changed = Some(callback); + } + + fn draw(&self, scene: &Scene) { + if let Some((width, height)) = self.inner.pending_physical_size.take() { + if self.inner.canvas.width() != width || self.inner.canvas.height() != height { + self.inner.canvas.set_width(width); + self.inner.canvas.set_height(height); + } + + let mut state = self.inner.state.borrow_mut(); + state.renderer.update_drawable_size(Size { + width: DevicePixels(width as i32), + height: DevicePixels(height as i32), + }); + drop(state); + } + + self.inner.state.borrow_mut().renderer.draw(scene); + } + + fn completed_frame(&self) { + // On web, presentation happens automatically via wgpu surface present + } + + fn sprite_atlas(&self) -> Arc { + self.inner.state.borrow().renderer.sprite_atlas().clone() + } + + fn is_subpixel_rendering_supported(&self) -> bool { + self.inner + .state + .borrow() + .renderer + .supports_dual_source_blending() + } + + fn gpu_specs(&self) -> Option { + Some(self.inner.state.borrow().renderer.gpu_specs()) + } + + fn update_ime_position(&self, _bounds: Bounds) {} + + fn request_decorations(&self, _decorations: WindowDecorations) {} + + fn show_window_menu(&self, _position: Point) {} + + fn start_window_move(&self) {} + + fn start_window_resize(&self, _edge: ResizeEdge) {} + + fn window_decorations(&self) -> Decorations { + Decorations::Server + } + + fn set_app_id(&mut self, _app_id: &str) {} + + fn window_controls(&self) -> WindowControls { + WindowControls { + fullscreen: true, + maximize: false, + minimize: false, + window_menu: false, + } + } + + fn set_client_inset(&self, _inset: Pixels) {} +} diff --git a/crates/gpui_wgpu/Cargo.toml b/crates/gpui_wgpu/Cargo.toml index a3664fe59e9c51f5b9e68f63c67d30aa502bd737..c5c078088981803712e559f0a3e19c9f1ab850d5 100644 --- a/crates/gpui_wgpu/Cargo.toml +++ b/crates/gpui_wgpu/Cargo.toml @@ -11,16 +11,36 @@ workspace = true [lib] path = "src/gpui_wgpu.rs" -[target.'cfg(not(target_os = "windows"))'.dependencies] +[features] +default = [] +font-kit = ["dep:font-kit"] + +[dependencies] gpui.workspace = true anyhow.workspace = true bytemuck = "1" collections.workspace = true +cosmic-text = "0.17.0" etagere = "0.2" +itertools.workspace = true log.workspace = true parking_lot.workspace = true profiling.workspace = true raw-window-handle = "0.6" -smol.workspace = true -util.workspace = true +smallvec.workspace = true +swash = "0.2.6" +gpui_util.workspace = true wgpu.workspace = true + +# Optional: only needed on platforms with multiple font sources (e.g. Linux) +# WARNING: If you change this, you must also publish a new version of zed-font-kit to crates.io +font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "110523127440aefb11ce0cf280ae7c5071337ec5", package = "zed-font-kit", version = "0.14.1-zed", optional = true } + +[target.'cfg(not(target_family = "wasm"))'.dependencies] +pollster.workspace = true + +[target.'cfg(target_family = "wasm")'.dependencies] +wasm-bindgen.workspace = true +wasm-bindgen-futures = "0.4" +web-sys = { version = "0.3", features = ["HtmlCanvasElement"] } +js-sys = "0.3" \ No newline at end of file diff --git a/crates/gpui_wgpu/src/cosmic_text_system.rs b/crates/gpui_wgpu/src/cosmic_text_system.rs new file mode 100644 index 0000000000000000000000000000000000000000..c664ca9449ff211b2c094556c0f896dc71cdf574 --- /dev/null +++ b/crates/gpui_wgpu/src/cosmic_text_system.rs @@ -0,0 +1,645 @@ +use anyhow::{Context as _, Ok, Result}; +use collections::HashMap; +use cosmic_text::{ + Attrs, AttrsList, Family, Font as CosmicTextFont, FontFeatures as CosmicFontFeatures, + FontSystem, ShapeBuffer, ShapeLine, +}; +use gpui::{ + Bounds, DevicePixels, Font, FontFeatures, FontId, FontMetrics, FontRun, GlyphId, LineLayout, + Pixels, PlatformTextSystem, RenderGlyphParams, SUBPIXEL_VARIANTS_X, SUBPIXEL_VARIANTS_Y, + ShapedGlyph, ShapedRun, SharedString, Size, TextRenderingMode, point, size, +}; + +use itertools::Itertools; +use parking_lot::RwLock; +use smallvec::SmallVec; +use std::{borrow::Cow, sync::Arc}; +use swash::{ + scale::{Render, ScaleContext, Source, StrikeWith}, + zeno::{Format, Vector}, +}; + +pub struct CosmicTextSystem(RwLock); + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +struct FontKey { + family: SharedString, + features: FontFeatures, +} + +impl FontKey { + fn new(family: SharedString, features: FontFeatures) -> Self { + Self { family, features } + } +} + +struct CosmicTextSystemState { + font_system: FontSystem, + scratch: ShapeBuffer, + swash_scale_context: ScaleContext, + /// Contains all already loaded fonts, including all faces. Indexed by `FontId`. + loaded_fonts: Vec, + /// Caches the `FontId`s associated with a specific family to avoid iterating the font database + /// for every font face in a family. + font_ids_by_family_cache: HashMap>, + system_font_fallback: String, +} + +struct LoadedFont { + font: Arc, + features: CosmicFontFeatures, + is_known_emoji_font: bool, +} + +impl CosmicTextSystem { + pub fn new(system_font_fallback: &str) -> Self { + let font_system = FontSystem::new(); + + Self(RwLock::new(CosmicTextSystemState { + font_system, + scratch: ShapeBuffer::default(), + swash_scale_context: ScaleContext::new(), + loaded_fonts: Vec::new(), + font_ids_by_family_cache: HashMap::default(), + system_font_fallback: system_font_fallback.to_string(), + })) + } + + pub fn new_without_system_fonts(system_font_fallback: &str) -> Self { + let font_system = FontSystem::new_with_locale_and_db( + "en-US".to_string(), + cosmic_text::fontdb::Database::new(), + ); + + Self(RwLock::new(CosmicTextSystemState { + font_system, + scratch: ShapeBuffer::default(), + swash_scale_context: ScaleContext::new(), + loaded_fonts: Vec::new(), + font_ids_by_family_cache: HashMap::default(), + system_font_fallback: system_font_fallback.to_string(), + })) + } +} + +impl PlatformTextSystem for CosmicTextSystem { + fn add_fonts(&self, fonts: Vec>) -> Result<()> { + self.0.write().add_fonts(fonts) + } + + fn all_font_names(&self) -> Vec { + let mut result = self + .0 + .read() + .font_system + .db() + .faces() + .filter_map(|face| face.families.first().map(|family| family.0.clone())) + .collect_vec(); + result.sort(); + result.dedup(); + result + } + + fn font_id(&self, font: &Font) -> Result { + let mut state = self.0.write(); + let key = FontKey::new(font.family.clone(), font.features.clone()); + let candidates = if let Some(font_ids) = state.font_ids_by_family_cache.get(&key) { + font_ids.as_slice() + } else { + let font_ids = state.load_family(&font.family, &font.features)?; + state.font_ids_by_family_cache.insert(key.clone(), font_ids); + state.font_ids_by_family_cache[&key].as_ref() + }; + + let ix = find_best_match(font, candidates, &state)?; + + Ok(candidates[ix]) + } + + fn font_metrics(&self, font_id: FontId) -> FontMetrics { + let metrics = self + .0 + .read() + .loaded_font(font_id) + .font + .as_swash() + .metrics(&[]); + + FontMetrics { + units_per_em: metrics.units_per_em as u32, + ascent: metrics.ascent, + descent: -metrics.descent, + line_gap: metrics.leading, + underline_position: metrics.underline_offset, + underline_thickness: metrics.stroke_size, + cap_height: metrics.cap_height, + x_height: metrics.x_height, + bounding_box: Bounds { + origin: point(0.0, 0.0), + size: size(metrics.max_width, metrics.ascent + metrics.descent), + }, + } + } + + fn typographic_bounds(&self, font_id: FontId, glyph_id: GlyphId) -> Result> { + let lock = self.0.read(); + let glyph_metrics = lock.loaded_font(font_id).font.as_swash().glyph_metrics(&[]); + let glyph_id = glyph_id.0 as u16; + Ok(Bounds { + origin: point(0.0, 0.0), + size: size( + glyph_metrics.advance_width(glyph_id), + glyph_metrics.advance_height(glyph_id), + ), + }) + } + + fn advance(&self, font_id: FontId, glyph_id: GlyphId) -> Result> { + self.0.read().advance(font_id, glyph_id) + } + + fn glyph_for_char(&self, font_id: FontId, ch: char) -> Option { + self.0.read().glyph_for_char(font_id, ch) + } + + fn glyph_raster_bounds(&self, params: &RenderGlyphParams) -> Result> { + self.0.write().raster_bounds(params) + } + + fn rasterize_glyph( + &self, + params: &RenderGlyphParams, + raster_bounds: Bounds, + ) -> Result<(Size, Vec)> { + self.0.write().rasterize_glyph(params, raster_bounds) + } + + fn layout_line(&self, text: &str, font_size: Pixels, runs: &[FontRun]) -> LineLayout { + self.0.write().layout_line(text, font_size, runs) + } + + fn recommended_rendering_mode( + &self, + _font_id: FontId, + _font_size: Pixels, + ) -> TextRenderingMode { + TextRenderingMode::Subpixel + } +} + +impl CosmicTextSystemState { + fn loaded_font(&self, font_id: FontId) -> &LoadedFont { + &self.loaded_fonts[font_id.0] + } + + #[profiling::function] + fn add_fonts(&mut self, fonts: Vec>) -> Result<()> { + let db = self.font_system.db_mut(); + for bytes in fonts { + match bytes { + Cow::Borrowed(embedded_font) => { + db.load_font_data(embedded_font.to_vec()); + } + Cow::Owned(bytes) => { + db.load_font_data(bytes); + } + } + } + Ok(()) + } + + #[profiling::function] + fn load_family( + &mut self, + name: &str, + features: &FontFeatures, + ) -> Result> { + let name = gpui::font_name_with_fallbacks(name, &self.system_font_fallback); + + let families = self + .font_system + .db() + .faces() + .filter(|face| face.families.iter().any(|family| *name == family.0)) + .map(|face| (face.id, face.post_script_name.clone())) + .collect::>(); + + let mut loaded_font_ids = SmallVec::new(); + for (font_id, postscript_name) in families { + let font = self + .font_system + .get_font(font_id, cosmic_text::Weight::NORMAL) + .context("Could not load font")?; + + // HACK: To let the storybook run and render Windows caption icons. We should actually do better font fallback. + let allowed_bad_font_names = [ + "SegoeFluentIcons", // NOTE: Segoe fluent icons postscript name is inconsistent + "Segoe Fluent Icons", + ]; + + if font.as_swash().charmap().map('m') == 0 + && !allowed_bad_font_names.contains(&postscript_name.as_str()) + { + self.font_system.db_mut().remove_face(font.id()); + continue; + }; + + let font_id = FontId(self.loaded_fonts.len()); + loaded_font_ids.push(font_id); + self.loaded_fonts.push(LoadedFont { + font, + features: cosmic_font_features(features)?, + is_known_emoji_font: check_is_known_emoji_font(&postscript_name), + }); + } + + Ok(loaded_font_ids) + } + + fn advance(&self, font_id: FontId, glyph_id: GlyphId) -> Result> { + let glyph_metrics = self.loaded_font(font_id).font.as_swash().glyph_metrics(&[]); + Ok(Size { + width: glyph_metrics.advance_width(glyph_id.0 as u16), + height: glyph_metrics.advance_height(glyph_id.0 as u16), + }) + } + + fn glyph_for_char(&self, font_id: FontId, ch: char) -> Option { + let glyph_id = self.loaded_font(font_id).font.as_swash().charmap().map(ch); + if glyph_id == 0 { + None + } else { + Some(GlyphId(glyph_id.into())) + } + } + + fn raster_bounds(&mut self, params: &RenderGlyphParams) -> Result> { + let image = self.render_glyph_image(params)?; + Ok(Bounds { + origin: point(image.placement.left.into(), (-image.placement.top).into()), + size: size(image.placement.width.into(), image.placement.height.into()), + }) + } + + #[profiling::function] + fn rasterize_glyph( + &mut self, + params: &RenderGlyphParams, + glyph_bounds: Bounds, + ) -> Result<(Size, Vec)> { + if glyph_bounds.size.width.0 == 0 || glyph_bounds.size.height.0 == 0 { + anyhow::bail!("glyph bounds are empty"); + } + + let mut image = self.render_glyph_image(params)?; + let bitmap_size = glyph_bounds.size; + match image.content { + swash::scale::image::Content::Color | swash::scale::image::Content::SubpixelMask => { + // Convert from RGBA to BGRA. + for pixel in image.data.chunks_exact_mut(4) { + pixel.swap(0, 2); + } + Ok((bitmap_size, image.data)) + } + swash::scale::image::Content::Mask => Ok((bitmap_size, image.data)), + } + } + + fn render_glyph_image( + &mut self, + params: &RenderGlyphParams, + ) -> Result { + let loaded_font = &self.loaded_fonts[params.font_id.0]; + let font_ref = loaded_font.font.as_swash(); + let pixel_size = f32::from(params.font_size); + + let subpixel_offset = Vector::new( + params.subpixel_variant.x as f32 / SUBPIXEL_VARIANTS_X as f32 / params.scale_factor, + params.subpixel_variant.y as f32 / SUBPIXEL_VARIANTS_Y as f32 / params.scale_factor, + ); + + let mut scaler = self + .swash_scale_context + .builder(font_ref) + .size(pixel_size * params.scale_factor) + .hint(true) + .build(); + + let sources: &[Source] = if params.is_emoji { + &[ + Source::ColorOutline(0), + Source::ColorBitmap(StrikeWith::BestFit), + Source::Outline, + ] + } else { + &[Source::Outline] + }; + + let mut renderer = Render::new(sources); + if params.subpixel_rendering { + // There seems to be a bug in Swash where the B and R values are swapped. + renderer + .format(Format::subpixel_bgra()) + .offset(subpixel_offset); + } else { + renderer.format(Format::Alpha).offset(subpixel_offset); + } + + let glyph_id: u16 = params.glyph_id.0.try_into()?; + renderer + .render(&mut scaler, glyph_id) + .with_context(|| format!("unable to render glyph via swash for {params:?}")) + } + + /// This is used when cosmic_text has chosen a fallback font instead of using the requested + /// font, typically to handle some unicode characters. When this happens, `loaded_fonts` may not + /// yet have an entry for this fallback font, and so one is added. + /// + /// Note that callers shouldn't use this `FontId` somewhere that will retrieve the corresponding + /// `LoadedFont.features`, as it will have an arbitrarily chosen or empty value. The only + /// current use of this field is for the *input* of `layout_line`, and so it's fine to use + /// `font_id_for_cosmic_id` when computing the *output* of `layout_line`. + fn font_id_for_cosmic_id(&mut self, id: cosmic_text::fontdb::ID) -> Result { + if let Some(ix) = self + .loaded_fonts + .iter() + .position(|loaded_font| loaded_font.font.id() == id) + { + Ok(FontId(ix)) + } else { + let font = self + .font_system + .get_font(id, cosmic_text::Weight::NORMAL) + .context("failed to get fallback font from cosmic-text font system")?; + let face = self + .font_system + .db() + .face(id) + .context("fallback font face not found in cosmic-text database")?; + + let font_id = FontId(self.loaded_fonts.len()); + self.loaded_fonts.push(LoadedFont { + font, + features: CosmicFontFeatures::new(), + is_known_emoji_font: check_is_known_emoji_font(&face.post_script_name), + }); + + Ok(font_id) + } + } + + #[profiling::function] + fn layout_line(&mut self, text: &str, font_size: Pixels, font_runs: &[FontRun]) -> LineLayout { + let mut attrs_list = AttrsList::new(&Attrs::new()); + let mut offs = 0; + for run in font_runs { + let loaded_font = self.loaded_font(run.font_id); + let Some(face) = self.font_system.db().face(loaded_font.font.id()) else { + log::warn!( + "font face not found in database for font_id {:?}", + run.font_id + ); + offs += run.len; + continue; + }; + let Some(first_family) = face.families.first() else { + log::warn!( + "font face has no family names for font_id {:?}", + run.font_id + ); + offs += run.len; + continue; + }; + + attrs_list.add_span( + offs..(offs + run.len), + &Attrs::new() + .metadata(run.font_id.0) + .family(Family::Name(&first_family.0)) + .stretch(face.stretch) + .style(face.style) + .weight(face.weight) + .font_features(loaded_font.features.clone()), + ); + offs += run.len; + } + + let line = ShapeLine::new( + &mut self.font_system, + text, + &attrs_list, + cosmic_text::Shaping::Advanced, + 4, + ); + let mut layout_lines = Vec::with_capacity(1); + line.layout_to_buffer( + &mut self.scratch, + f32::from(font_size), + None, // We do our own wrapping + cosmic_text::Wrap::None, + None, + &mut layout_lines, + None, + cosmic_text::Hinting::Disabled, + ); + + let Some(layout) = layout_lines.first() else { + return LineLayout { + font_size, + width: Pixels::ZERO, + ascent: Pixels::ZERO, + descent: Pixels::ZERO, + runs: Vec::new(), + len: text.len(), + }; + }; + + let mut runs: Vec = Vec::new(); + for glyph in &layout.glyphs { + let mut font_id = FontId(glyph.metadata); + let mut loaded_font = self.loaded_font(font_id); + if loaded_font.font.id() != glyph.font_id { + match self.font_id_for_cosmic_id(glyph.font_id) { + std::result::Result::Ok(resolved_id) => { + font_id = resolved_id; + loaded_font = self.loaded_font(font_id); + } + Err(error) => { + log::warn!( + "failed to resolve cosmic font id {:?}: {error:#}", + glyph.font_id + ); + continue; + } + } + } + let is_emoji = loaded_font.is_known_emoji_font; + + // HACK: Prevent crash caused by variation selectors. + if glyph.glyph_id == 3 && is_emoji { + continue; + } + + let shaped_glyph = ShapedGlyph { + id: GlyphId(glyph.glyph_id as u32), + position: point(glyph.x.into(), glyph.y.into()), + index: glyph.start, + is_emoji, + }; + + if let Some(last_run) = runs + .last_mut() + .filter(|last_run| last_run.font_id == font_id) + { + last_run.glyphs.push(shaped_glyph); + } else { + runs.push(ShapedRun { + font_id, + glyphs: vec![shaped_glyph], + }); + } + } + + LineLayout { + font_size, + width: layout.w.into(), + ascent: layout.max_ascent.into(), + descent: layout.max_descent.into(), + runs, + len: text.len(), + } + } +} + +#[cfg(feature = "font-kit")] +fn find_best_match( + font: &Font, + candidates: &[FontId], + state: &CosmicTextSystemState, +) -> Result { + let candidate_properties = candidates + .iter() + .map(|font_id| { + let database_id = state.loaded_font(*font_id).font.id(); + let face_info = state + .font_system + .db() + .face(database_id) + .context("font face not found in database")?; + Ok(face_info_into_properties(face_info)) + }) + .collect::>>()?; + + let ix = + font_kit::matching::find_best_match(&candidate_properties, &font_into_properties(font)) + .context("requested font family contains no font matching the other parameters")?; + + Ok(ix) +} + +#[cfg(not(feature = "font-kit"))] +fn find_best_match( + font: &Font, + candidates: &[FontId], + state: &CosmicTextSystemState, +) -> Result { + if candidates.is_empty() { + anyhow::bail!("requested font family contains no font matching the other parameters"); + } + if candidates.len() == 1 { + return Ok(0); + } + + let target_weight = font.weight.0; + let target_italic = matches!( + font.style, + gpui::FontStyle::Italic | gpui::FontStyle::Oblique + ); + + let mut best_index = 0; + let mut best_score = u32::MAX; + + for (index, font_id) in candidates.iter().enumerate() { + let database_id = state.loaded_font(*font_id).font.id(); + let face_info = state + .font_system + .db() + .face(database_id) + .context("font face not found in database")?; + + let is_italic = matches!( + face_info.style, + cosmic_text::Style::Italic | cosmic_text::Style::Oblique + ); + let style_penalty: u32 = if is_italic == target_italic { 0 } else { 1000 }; + let weight_diff = (face_info.weight.0 as i32 - target_weight as i32).unsigned_abs(); + let score = style_penalty + weight_diff; + + if score < best_score { + best_score = score; + best_index = index; + } + } + + Ok(best_index) +} + +fn cosmic_font_features(features: &FontFeatures) -> Result { + let mut result = CosmicFontFeatures::new(); + for feature in features.0.iter() { + let name_bytes: [u8; 4] = feature + .0 + .as_bytes() + .try_into() + .context("Incorrect feature flag format")?; + + let tag = cosmic_text::FeatureTag::new(&name_bytes); + + result.set(tag, feature.1); + } + Ok(result) +} + +#[cfg(feature = "font-kit")] +fn font_into_properties(font: &gpui::Font) -> font_kit::properties::Properties { + font_kit::properties::Properties { + style: match font.style { + gpui::FontStyle::Normal => font_kit::properties::Style::Normal, + gpui::FontStyle::Italic => font_kit::properties::Style::Italic, + gpui::FontStyle::Oblique => font_kit::properties::Style::Oblique, + }, + weight: font_kit::properties::Weight(font.weight.0), + stretch: Default::default(), + } +} + +#[cfg(feature = "font-kit")] +fn face_info_into_properties( + face_info: &cosmic_text::fontdb::FaceInfo, +) -> font_kit::properties::Properties { + font_kit::properties::Properties { + style: match face_info.style { + cosmic_text::Style::Normal => font_kit::properties::Style::Normal, + cosmic_text::Style::Italic => font_kit::properties::Style::Italic, + cosmic_text::Style::Oblique => font_kit::properties::Style::Oblique, + }, + weight: font_kit::properties::Weight(face_info.weight.0.into()), + stretch: match face_info.stretch { + cosmic_text::Stretch::Condensed => font_kit::properties::Stretch::CONDENSED, + cosmic_text::Stretch::Expanded => font_kit::properties::Stretch::EXPANDED, + cosmic_text::Stretch::ExtraCondensed => font_kit::properties::Stretch::EXTRA_CONDENSED, + cosmic_text::Stretch::ExtraExpanded => font_kit::properties::Stretch::EXTRA_EXPANDED, + cosmic_text::Stretch::Normal => font_kit::properties::Stretch::NORMAL, + cosmic_text::Stretch::SemiCondensed => font_kit::properties::Stretch::SEMI_CONDENSED, + cosmic_text::Stretch::SemiExpanded => font_kit::properties::Stretch::SEMI_EXPANDED, + cosmic_text::Stretch::UltraCondensed => font_kit::properties::Stretch::ULTRA_CONDENSED, + cosmic_text::Stretch::UltraExpanded => font_kit::properties::Stretch::ULTRA_EXPANDED, + }, + } +} + +fn check_is_known_emoji_font(postscript_name: &str) -> bool { + // TODO: Include other common emoji fonts + postscript_name == "NotoColorEmoji" +} diff --git a/crates/gpui_wgpu/src/gpui_wgpu.rs b/crates/gpui_wgpu/src/gpui_wgpu.rs index 8a1eb576ed01475973e92f37b05ca3ee393daf66..a306a9d4cac2251a46cd1115462bdcbe4b368759 100644 --- a/crates/gpui_wgpu/src/gpui_wgpu.rs +++ b/crates/gpui_wgpu/src/gpui_wgpu.rs @@ -1,8 +1,9 @@ -#![cfg(not(target_os = "windows"))] +mod cosmic_text_system; mod wgpu_atlas; mod wgpu_context; mod wgpu_renderer; +pub use cosmic_text_system::*; pub use wgpu_atlas::*; pub use wgpu_context::*; pub use wgpu_renderer::*; diff --git a/crates/gpui_wgpu/src/shaders.wgsl b/crates/gpui_wgpu/src/shaders.wgsl index 58e9de109e6602d999433aa9b42d3b80d06ca4ad..12ce7d29b0b81603b7b051f733e905ccd9111d9d 100644 --- a/crates/gpui_wgpu/src/shaders.wgsl +++ b/crates/gpui_wgpu/src/shaders.wgsl @@ -1,4 +1,3 @@ -enable dual_source_blending; /* Functions useful for debugging: // A heat map color for debugging (blue -> cyan -> green -> yellow -> red). @@ -501,11 +500,11 @@ fn gradient_color(background: Background, position: vec2, bounds: Bounds, // checkerboard let size = background.gradient_angle_or_pattern_height; let relative_position = position - bounds.origin; - + let x_index = floor(relative_position.x / size); let y_index = floor(relative_position.y / size); let should_be_colored = (x_index + y_index) % 2.0; - + background_color = solid_color; background_color.a *= saturate(should_be_colored); } @@ -1033,7 +1032,7 @@ struct PathRasterizationVertex { struct PathRasterizationVarying { @builtin(position) position: vec4, @location(0) st_position: vec2, - @location(1) vertex_id: u32, + @location(1) @interpolate(flat) vertex_id: u32, //TODO: use `clip_distance` once Naga supports it @location(3) clip_distances: vec4, } @@ -1072,14 +1071,14 @@ fn fs_path_rasterization(input: PathRasterizationVarying) -> @location(0) vec4(color.rgb * color.a * alpha, color.a * alpha); } @@ -1334,57 +1333,3 @@ fn fs_surface(input: SurfaceVarying) -> @location(0) vec4 { return ycbcr_to_RGB * y_cb_cr; } - -// --- subpixel sprites --- // - -struct SubpixelSprite { - order: u32, - pad: u32, - bounds: Bounds, - content_mask: Bounds, - color: Hsla, - tile: AtlasTile, - transformation: TransformationMatrix, -} -@group(1) @binding(0) var b_subpixel_sprites: array; - -struct SubpixelSpriteOutput { - @builtin(position) position: vec4, - @location(0) tile_position: vec2, - @location(1) @interpolate(flat) color: vec4, - @location(3) clip_distances: vec4, -} - -struct SubpixelSpriteFragmentOutput { - @location(0) @blend_src(0) foreground: vec4, - @location(0) @blend_src(1) alpha: vec4, -} - -@vertex -fn vs_subpixel_sprite(@builtin(vertex_index) vertex_id: u32, @builtin(instance_index) instance_id: u32) -> SubpixelSpriteOutput { - let unit_vertex = vec2(f32(vertex_id & 1u), 0.5 * f32(vertex_id & 2u)); - let sprite = b_subpixel_sprites[instance_id]; - - var out = SubpixelSpriteOutput(); - out.position = to_device_position_transformed(unit_vertex, sprite.bounds, sprite.transformation); - out.tile_position = to_tile_position(unit_vertex, sprite.tile); - out.color = hsla_to_rgba(sprite.color); - out.clip_distances = distance_from_clip_rect_transformed(unit_vertex, sprite.bounds, sprite.content_mask, sprite.transformation); - return out; -} - -@fragment -fn fs_subpixel_sprite(input: SubpixelSpriteOutput) -> SubpixelSpriteFragmentOutput { - let sample = textureSample(t_sprite, s_sprite, input.tile_position).rgb; - let alpha_corrected = apply_contrast_and_gamma_correction3(sample, input.color.rgb, gamma_params.subpixel_enhanced_contrast, gamma_params.gamma_ratios); - - // Alpha clip after using the derivatives. - if (any(input.clip_distances < vec4(0.0))) { - return SubpixelSpriteFragmentOutput(vec4(0.0), vec4(0.0)); - } - - var out = SubpixelSpriteFragmentOutput(); - out.foreground = vec4(input.color.rgb, 1.0); - out.alpha = vec4(input.color.a * alpha_corrected, 1.0); - return out; -} diff --git a/crates/gpui_wgpu/src/shaders_subpixel.wgsl b/crates/gpui_wgpu/src/shaders_subpixel.wgsl new file mode 100644 index 0000000000000000000000000000000000000000..7acbd2e3d2e68ebdab349178b2918c564a35e4a3 --- /dev/null +++ b/crates/gpui_wgpu/src/shaders_subpixel.wgsl @@ -0,0 +1,53 @@ +// --- subpixel sprites --- // + +struct SubpixelSprite { + order: u32, + pad: u32, + bounds: Bounds, + content_mask: Bounds, + color: Hsla, + tile: AtlasTile, + transformation: TransformationMatrix, +} +@group(1) @binding(0) var b_subpixel_sprites: array; + +struct SubpixelSpriteOutput { + @builtin(position) position: vec4, + @location(0) tile_position: vec2, + @location(1) @interpolate(flat) color: vec4, + @location(3) clip_distances: vec4, +} + +struct SubpixelSpriteFragmentOutput { + @location(0) @blend_src(0) foreground: vec4, + @location(0) @blend_src(1) alpha: vec4, +} + +@vertex +fn vs_subpixel_sprite(@builtin(vertex_index) vertex_id: u32, @builtin(instance_index) instance_id: u32) -> SubpixelSpriteOutput { + let unit_vertex = vec2(f32(vertex_id & 1u), 0.5 * f32(vertex_id & 2u)); + let sprite = b_subpixel_sprites[instance_id]; + + var out = SubpixelSpriteOutput(); + out.position = to_device_position_transformed(unit_vertex, sprite.bounds, sprite.transformation); + out.tile_position = to_tile_position(unit_vertex, sprite.tile); + out.color = hsla_to_rgba(sprite.color); + out.clip_distances = distance_from_clip_rect_transformed(unit_vertex, sprite.bounds, sprite.content_mask, sprite.transformation); + return out; +} + +@fragment +fn fs_subpixel_sprite(input: SubpixelSpriteOutput) -> SubpixelSpriteFragmentOutput { + let sample = textureSample(t_sprite, s_sprite, input.tile_position).rgb; + let alpha_corrected = apply_contrast_and_gamma_correction3(sample, input.color.rgb, gamma_params.subpixel_enhanced_contrast, gamma_params.gamma_ratios); + + // Alpha clip after using the derivatives. + if (any(input.clip_distances < vec4(0.0))) { + return SubpixelSpriteFragmentOutput(vec4(0.0), vec4(0.0)); + } + + var out = SubpixelSpriteFragmentOutput(); + out.foreground = vec4(input.color.rgb, 1.0); + out.alpha = vec4(input.color.a * alpha_corrected, 1.0); + return out; +} diff --git a/crates/gpui_wgpu/src/wgpu_atlas.rs b/crates/gpui_wgpu/src/wgpu_atlas.rs index d3614ea126e3d3f7c4e83b645b0d4ac0d77e548e..ffef3a65398c3f03639a8551506463f91a862c33 100644 --- a/crates/gpui_wgpu/src/wgpu_atlas.rs +++ b/crates/gpui_wgpu/src/wgpu_atlas.rs @@ -1,4 +1,4 @@ -use anyhow::Result; +use anyhow::{Context as _, Result}; use collections::FxHashMap; use etagere::{BucketedAtlasAllocator, size2}; use gpui::{ @@ -30,6 +30,7 @@ struct PendingUpload { struct WgpuAtlasState { device: Arc, queue: Arc, + max_texture_size: u32, storage: WgpuAtlasStorage, tiles_by_key: FxHashMap, pending_uploads: Vec, @@ -41,9 +42,11 @@ pub struct WgpuTextureInfo { impl WgpuAtlas { pub fn new(device: Arc, queue: Arc) -> Self { + let max_texture_size = device.limits().max_texture_dimension_2d; WgpuAtlas(Mutex::new(WgpuAtlasState { device, queue, + max_texture_size, storage: WgpuAtlasStorage::default(), tiles_by_key: Default::default(), pending_uploads: Vec::new(), @@ -78,7 +81,9 @@ impl PlatformAtlas for WgpuAtlas { let Some((size, bytes)) = build()? else { return Ok(None); }; - let tile = lock.allocate(size, key.texture_kind()); + let tile = lock + .allocate(size, key.texture_kind()) + .context("failed to allocate")?; lock.upload_texture(tile.texture_id, tile.bounds, &bytes); lock.tiles_by_key.insert(key.clone(), tile.clone()); Ok(Some(tile)) @@ -110,7 +115,11 @@ impl PlatformAtlas for WgpuAtlas { } impl WgpuAtlasState { - fn allocate(&mut self, size: Size, texture_kind: AtlasTextureKind) -> AtlasTile { + fn allocate( + &mut self, + size: Size, + texture_kind: AtlasTextureKind, + ) -> Option { { let textures = &mut self.storage[texture_kind]; @@ -119,14 +128,12 @@ impl WgpuAtlasState { .rev() .find_map(|texture| texture.allocate(size)) { - return tile; + return Some(tile); } } let texture = self.push_texture(size, texture_kind); - texture - .allocate(size) - .expect("Failed to allocate from newly created texture") + texture.allocate(size) } fn push_texture( @@ -138,8 +145,13 @@ impl WgpuAtlasState { width: DevicePixels(1024), height: DevicePixels(1024), }; + let max_texture_size = self.max_texture_size as i32; + let max_atlas_size = Size { + width: DevicePixels(max_texture_size), + height: DevicePixels(max_texture_size), + }; - let size = min_size.max(&DEFAULT_ATLAS_SIZE); + let size = min_size.min(&max_atlas_size).max(&DEFAULT_ATLAS_SIZE); let format = match kind { AtlasTextureKind::Monochrome => wgpu::TextureFormat::R8Unorm, AtlasTextureKind::Subpixel => wgpu::TextureFormat::Bgra8Unorm, diff --git a/crates/gpui_wgpu/src/wgpu_context.rs b/crates/gpui_wgpu/src/wgpu_context.rs index 270201183c8afd33534c184b7dc597ed6ab7d9d5..b7883a6910261da8dc3f1df6414c5e38e1c46cd2 100644 --- a/crates/gpui_wgpu/src/wgpu_context.rs +++ b/crates/gpui_wgpu/src/wgpu_context.rs @@ -1,6 +1,8 @@ +#[cfg(not(target_family = "wasm"))] use anyhow::Context as _; +#[cfg(not(target_family = "wasm"))] +use gpui_util::ResultExt; use std::sync::Arc; -use util::ResultExt; pub struct WgpuContext { pub instance: wgpu::Instance, @@ -10,8 +12,19 @@ pub struct WgpuContext { dual_source_blending: bool, } +#[cfg(not(target_family = "wasm"))] +pub struct CompositorGpuHint { + pub vendor_id: u32, + pub device_id: u32, +} + impl WgpuContext { - pub fn new(instance: wgpu::Instance, surface: &wgpu::Surface<'_>) -> anyhow::Result { + #[cfg(not(target_family = "wasm"))] + pub fn new( + instance: wgpu::Instance, + surface: &wgpu::Surface<'_>, + compositor_gpu: Option, + ) -> anyhow::Result { let device_id_filter = match std::env::var("ZED_DEVICE_ID") { Ok(val) => parse_pci_id(&val) .context("Failed to parse device ID from `ZED_DEVICE_ID` environment variable") @@ -24,24 +37,48 @@ impl WgpuContext { } }; - let adapter = smol::block_on(Self::select_adapter( - &instance, - device_id_filter, - Some(surface), - ))?; + // Select an adapter by actually testing surface configuration with the real device. + // This is the only reliable way to determine compatibility on hybrid GPU systems. + let (adapter, device, queue, dual_source_blending) = + pollster::block_on(Self::select_adapter_and_device( + &instance, + device_id_filter, + surface, + compositor_gpu.as_ref(), + ))?; - let caps = surface.get_capabilities(&adapter); - if caps.formats.is_empty() { - let info = adapter.get_info(); - anyhow::bail!( - "No adapter compatible with the display surface could be found. \ - Best candidate {:?} (backend={:?}, device={:#06x}) reports no \ - supported surface formats.", - info.name, - info.backend, - info.device, - ); - } + log::info!( + "Selected GPU adapter: {:?} ({:?})", + adapter.get_info().name, + adapter.get_info().backend + ); + + Ok(Self { + instance, + adapter, + device: Arc::new(device), + queue: Arc::new(queue), + dual_source_blending, + }) + } + + #[cfg(target_family = "wasm")] + pub async fn new_web() -> anyhow::Result { + let instance = wgpu::Instance::new(&wgpu::InstanceDescriptor { + backends: wgpu::Backends::BROWSER_WEBGPU | wgpu::Backends::GL, + flags: wgpu::InstanceFlags::default(), + backend_options: wgpu::BackendOptions::default(), + memory_budget_thresholds: wgpu::MemoryBudgetThresholds::default(), + }); + + let adapter = instance + .request_adapter(&wgpu::RequestAdapterOptions { + power_preference: wgpu::PowerPreference::HighPerformance, + compatible_surface: None, + force_fallback_adapter: false, + }) + .await + .map_err(|e| anyhow::anyhow!("Failed to request GPU adapter: {e}"))?; log::info!( "Selected GPU adapter: {:?} ({:?})", @@ -49,7 +86,7 @@ impl WgpuContext { adapter.get_info().backend ); - let (device, queue, dual_source_blending) = Self::create_device(&adapter)?; + let (device, queue, dual_source_blending) = Self::create_device(&adapter).await?; Ok(Self { instance, @@ -60,6 +97,41 @@ impl WgpuContext { }) } + async fn create_device( + adapter: &wgpu::Adapter, + ) -> anyhow::Result<(wgpu::Device, wgpu::Queue, bool)> { + let dual_source_blending = adapter + .features() + .contains(wgpu::Features::DUAL_SOURCE_BLENDING); + + let mut required_features = wgpu::Features::empty(); + if dual_source_blending { + required_features |= wgpu::Features::DUAL_SOURCE_BLENDING; + } else { + log::warn!( + "Dual-source blending not available on this GPU. \ + Subpixel text antialiasing will be disabled." + ); + } + + let (device, queue) = adapter + .request_device(&wgpu::DeviceDescriptor { + label: Some("gpui_device"), + required_features, + required_limits: wgpu::Limits::downlevel_defaults() + .using_resolution(adapter.limits()) + .using_alignment(adapter.limits()), + memory_hints: wgpu::MemoryHints::MemoryUsage, + trace: wgpu::Trace::Off, + experimental_features: wgpu::ExperimentalFeatures::disabled(), + }) + .await + .map_err(|e| anyhow::anyhow!("Failed to create wgpu device: {e}"))?; + + Ok((device, queue, dual_source_blending)) + } + + #[cfg(not(target_family = "wasm"))] pub fn instance() -> wgpu::Instance { wgpu::Instance::new(&wgpu::InstanceDescriptor { backends: wgpu::Backends::VULKAN | wgpu::Backends::GL, @@ -84,97 +156,165 @@ impl WgpuContext { Ok(()) } - fn create_device(adapter: &wgpu::Adapter) -> anyhow::Result<(wgpu::Device, wgpu::Queue, bool)> { - let dual_source_blending_available = adapter - .features() - .contains(wgpu::Features::DUAL_SOURCE_BLENDING); + /// Select an adapter and create a device, testing that the surface can actually be configured. + /// This is the only reliable way to determine compatibility on hybrid GPU systems, where + /// adapters may report surface compatibility via get_capabilities() but fail when actually + /// configuring (e.g., NVIDIA reporting Vulkan Wayland support but failing because the + /// Wayland compositor runs on the Intel GPU). + #[cfg(not(target_family = "wasm"))] + async fn select_adapter_and_device( + instance: &wgpu::Instance, + device_id_filter: Option, + surface: &wgpu::Surface<'_>, + compositor_gpu: Option<&CompositorGpuHint>, + ) -> anyhow::Result<(wgpu::Adapter, wgpu::Device, wgpu::Queue, bool)> { + let mut adapters: Vec<_> = instance.enumerate_adapters(wgpu::Backends::all()).await; - let mut required_features = wgpu::Features::empty(); - if dual_source_blending_available { - required_features |= wgpu::Features::DUAL_SOURCE_BLENDING; - } else { - log::warn!( - "Dual-source blending not available on this GPU. \ - Subpixel text antialiasing will be disabled." - ); + if adapters.is_empty() { + anyhow::bail!("No GPU adapters found"); } - let (device, queue) = smol::block_on(adapter.request_device(&wgpu::DeviceDescriptor { - label: Some("gpui_device"), - required_features, - required_limits: wgpu::Limits::default(), - memory_hints: wgpu::MemoryHints::MemoryUsage, - trace: wgpu::Trace::Off, - experimental_features: wgpu::ExperimentalFeatures::disabled(), - })) - .map_err(|e| anyhow::anyhow!("Failed to create wgpu device: {e}"))?; - - Ok((device, queue, dual_source_blending_available)) - } - - async fn select_adapter( - instance: &wgpu::Instance, - device_id_filter: Option, - compatible_surface: Option<&wgpu::Surface<'_>>, - ) -> anyhow::Result { if let Some(device_id) = device_id_filter { - let adapters: Vec<_> = instance.enumerate_adapters(wgpu::Backends::all()).await; + log::info!("ZED_DEVICE_ID filter: {:#06x}", device_id); + } - if adapters.is_empty() { - anyhow::bail!("No GPU adapters found"); - } + // Sort adapters into a single priority order. Tiers (from highest to lowest): + // + // 1. ZED_DEVICE_ID match — explicit user override + // 2. Compositor GPU match — the GPU the display server is rendering on + // 3. Device type — WGPU HighPerformance order (Discrete > Integrated > + // Other > Virtual > Cpu). "Other" ranks above "Virtual" because + // backends like OpenGL may report real hardware as "Other". + // 4. Backend — prefer Vulkan/Metal/Dx12 over GL/etc. + adapters.sort_by_key(|adapter| { + let info = adapter.get_info(); + + // Backends like OpenGL report device=0 for all adapters, so + // device-based matching is only meaningful when non-zero. + let device_known = info.device != 0; + + let user_override: u8 = match device_id_filter { + Some(id) if device_known && info.device == id => 0, + _ => 1, + }; + + let compositor_match: u8 = match compositor_gpu { + Some(hint) + if device_known + && info.vendor == hint.vendor_id + && info.device == hint.device_id => + { + 0 + } + _ => 1, + }; + + let type_priority: u8 = match info.device_type { + wgpu::DeviceType::DiscreteGpu => 0, + wgpu::DeviceType::IntegratedGpu => 1, + wgpu::DeviceType::Other => 2, + wgpu::DeviceType::VirtualGpu => 3, + wgpu::DeviceType::Cpu => 4, + }; + + let backend_priority: u8 = match info.backend { + wgpu::Backend::Vulkan => 0, + wgpu::Backend::Metal => 0, + wgpu::Backend::Dx12 => 0, + _ => 1, + }; + + ( + user_override, + compositor_match, + type_priority, + backend_priority, + ) + }); + + // Log all available adapters (in sorted order) + log::info!("Found {} GPU adapter(s):", adapters.len()); + for adapter in &adapters { + let info = adapter.get_info(); + log::info!( + " - {} (vendor={:#06x}, device={:#06x}, backend={:?}, type={:?})", + info.name, + info.vendor, + info.device, + info.backend, + info.device_type, + ); + } - let mut non_matching_adapter_infos: Vec = Vec::new(); - - for adapter in adapters.into_iter() { - let info = adapter.get_info(); - if info.device == device_id { - if let Some(surface) = compatible_surface { - let caps = surface.get_capabilities(&adapter); - if caps.formats.is_empty() { - log::warn!( - "GPU matching ZED_DEVICE_ID={:#06x} ({}) is not compatible \ - with the display surface. Falling back to auto-selection.", - device_id, - info.name, - ); - break; - } - } + // Test each adapter by creating a device and configuring the surface + for adapter in adapters { + let info = adapter.get_info(); + log::info!("Testing adapter: {} ({:?})...", info.name, info.backend); + + match Self::try_adapter_with_surface(&adapter, surface).await { + Ok((device, queue, dual_source_blending)) => { log::info!( - "Found GPU matching ZED_DEVICE_ID={:#06x}: {}", - device_id, - info.name + "Selected GPU (passed configuration test): {} ({:?})", + info.name, + info.backend + ); + return Ok((adapter, device, queue, dual_source_blending)); + } + Err(e) => { + log::info!( + " Adapter {} ({:?}) failed: {}, trying next...", + info.name, + info.backend, + e ); - return Ok(adapter); - } else { - non_matching_adapter_infos.push(info); } } + } - log::warn!( - "No compatible GPU found matching ZED_DEVICE_ID={:#06x}. Available devices:", - device_id - ); + anyhow::bail!("No GPU adapter found that can configure the display surface") + } - for info in &non_matching_adapter_infos { - log::warn!( - " - {} (device_id={:#06x}, backend={})", - info.name, - info.device, - info.backend - ); - } + /// Try to use an adapter with a surface by creating a device and testing configuration. + /// Returns the device and queue if successful, allowing them to be reused. + #[cfg(not(target_family = "wasm"))] + async fn try_adapter_with_surface( + adapter: &wgpu::Adapter, + surface: &wgpu::Surface<'_>, + ) -> anyhow::Result<(wgpu::Device, wgpu::Queue, bool)> { + let caps = surface.get_capabilities(adapter); + if caps.formats.is_empty() { + anyhow::bail!("no compatible surface formats"); + } + if caps.alpha_modes.is_empty() { + anyhow::bail!("no compatible alpha modes"); } - instance - .request_adapter(&wgpu::RequestAdapterOptions { - power_preference: wgpu::PowerPreference::None, - compatible_surface, - force_fallback_adapter: false, - }) - .await - .map_err(|e| anyhow::anyhow!("Failed to request GPU adapter: {e}")) + // Create the real device with full features + let (device, queue, dual_source_blending) = Self::create_device(adapter).await?; + + // Use an error scope to capture any validation errors during configure + let error_scope = device.push_error_scope(wgpu::ErrorFilter::Validation); + + let test_config = wgpu::SurfaceConfiguration { + usage: wgpu::TextureUsages::RENDER_ATTACHMENT, + format: caps.formats[0], + width: 64, + height: 64, + present_mode: wgpu::PresentMode::Fifo, + desired_maximum_frame_latency: 2, + alpha_mode: caps.alpha_modes[0], + view_formats: vec![], + }; + + surface.configure(&device, &test_config); + + // Check if there was a validation error + let error = error_scope.pop().await; + if let Some(e) = error { + anyhow::bail!("surface configuration failed: {e}"); + } + + Ok((device, queue, dual_source_blending)) } pub fn supports_dual_source_blending(&self) -> bool { @@ -182,6 +322,7 @@ impl WgpuContext { } } +#[cfg(not(target_family = "wasm"))] fn parse_pci_id(id: &str) -> anyhow::Result { let mut id = id.trim(); diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index 95d64d952373f303c1015669ee90a93b5d179dd5..2fd83b7b065e7ce4fe0ba9ec017f39264a33bee3 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -1,3 +1,5 @@ +#[cfg(not(target_family = "wasm"))] +use crate::CompositorGpuHint; use crate::{WgpuAtlas, WgpuContext}; use bytemuck::{Pod, Zeroable}; use gpui::{ @@ -5,9 +7,11 @@ use gpui::{ PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, SubpixelSprite, Underline, get_gamma_correction_ratios, }; +use log::warn; +#[cfg(not(target_family = "wasm"))] use raw_window_handle::{HasDisplayHandle, HasWindowHandle}; use std::num::NonZeroU64; -use std::sync::Arc; +use std::sync::{Arc, Mutex}; #[repr(C)] #[derive(Clone, Copy, Pod, Zeroable)] @@ -105,9 +109,10 @@ pub struct WgpuRenderer { path_globals_bind_group: wgpu::BindGroup, instance_buffer: wgpu::Buffer, instance_buffer_capacity: u64, + max_buffer_size: u64, storage_buffer_alignment: u64, - path_intermediate_texture: wgpu::Texture, - path_intermediate_view: wgpu::TextureView, + path_intermediate_texture: Option, + path_intermediate_view: Option, path_msaa_texture: Option, path_msaa_view: Option, rendering_params: RenderingParameters, @@ -115,6 +120,9 @@ pub struct WgpuRenderer { adapter_info: wgpu::AdapterInfo, transparent_alpha_mode: wgpu::CompositeAlphaMode, opaque_alpha_mode: wgpu::CompositeAlphaMode, + max_texture_size: u32, + last_error: Arc>>, + failed_frame_count: u32, } impl WgpuRenderer { @@ -123,10 +131,12 @@ impl WgpuRenderer { /// # Safety /// The caller must ensure that the window handle remains valid for the lifetime /// of the returned renderer. + #[cfg(not(target_family = "wasm"))] pub fn new( gpu_context: &mut Option, window: &W, config: WgpuSurfaceConfig, + compositor_gpu: Option, ) -> anyhow::Result { let window_handle = window .window_handle() @@ -162,9 +172,30 @@ impl WgpuRenderer { context.check_compatible_with_surface(&surface)?; context } - None => gpu_context.insert(WgpuContext::new(instance, &surface)?), + None => gpu_context.insert(WgpuContext::new(instance, &surface, compositor_gpu)?), }; + Self::new_with_surface(context, surface, config) + } + + #[cfg(target_family = "wasm")] + pub fn new_from_canvas( + context: &WgpuContext, + canvas: &web_sys::HtmlCanvasElement, + config: WgpuSurfaceConfig, + ) -> anyhow::Result { + let surface = context + .instance + .create_surface(wgpu::SurfaceTarget::Canvas(canvas.clone())) + .map_err(|e| anyhow::anyhow!("Failed to create surface: {e}"))?; + Self::new_with_surface(context, surface, config) + } + + fn new_with_surface( + context: &WgpuContext, + surface: wgpu::Surface<'static>, + config: WgpuSurfaceConfig, + ) -> anyhow::Result { let surface_caps = surface.get_capabilities(&context.adapter); let preferred_formats = [ wgpu::TextureFormat::Bgra8Unorm, @@ -214,19 +245,36 @@ impl WgpuRenderer { opaque_alpha_mode }; + let device = Arc::clone(&context.device); + let max_texture_size = device.limits().max_texture_dimension_2d; + + let requested_width = config.size.width.0 as u32; + let requested_height = config.size.height.0 as u32; + let clamped_width = requested_width.min(max_texture_size); + let clamped_height = requested_height.min(max_texture_size); + + if clamped_width != requested_width || clamped_height != requested_height { + warn!( + "Requested surface size ({}, {}) exceeds maximum texture dimension {}. \ + Clamping to ({}, {}). Window content may not fill the entire window.", + requested_width, requested_height, max_texture_size, clamped_width, clamped_height + ); + } + let surface_config = wgpu::SurfaceConfiguration { usage: wgpu::TextureUsages::RENDER_ATTACHMENT, format: surface_format, - width: config.size.width.0 as u32, - height: config.size.height.0 as u32, + width: clamped_width.max(1), + height: clamped_height.max(1), present_mode: wgpu::PresentMode::Fifo, desired_maximum_frame_latency: 2, alpha_mode, view_formats: vec![], }; + // Configure the surface immediately. The adapter selection process already validated + // that this adapter can successfully configure this surface. surface.configure(&context.device, &surface_config); - let device = Arc::clone(&context.device); let queue = Arc::clone(&context.queue); let dual_source_blending = context.supports_dual_source_blending(); @@ -262,6 +310,7 @@ impl WgpuRenderer { mapped_at_creation: false, }); + let max_buffer_size = device.limits().max_buffer_size; let storage_buffer_alignment = device.limits().min_storage_buffer_offset_alignment as u64; let initial_instance_buffer_capacity = 2 * 1024 * 1024; let instance_buffer = device.create_buffer(&wgpu::BufferDescriptor { @@ -271,23 +320,6 @@ impl WgpuRenderer { mapped_at_creation: false, }); - let (path_intermediate_texture, path_intermediate_view) = Self::create_path_intermediate( - &device, - surface_format, - config.size.width.0 as u32, - config.size.height.0 as u32, - ); - - let (path_msaa_texture, path_msaa_view) = Self::create_msaa_if_needed( - &device, - surface_format, - config.size.width.0 as u32, - config.size.height.0 as u32, - rendering_params.path_sample_count, - ) - .map(|(t, v)| (Some(t), Some(v))) - .unwrap_or((None, None)); - let globals_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { label: Some("globals_bind_group"), layout: &bind_group_layouts.globals, @@ -336,6 +368,13 @@ impl WgpuRenderer { let adapter_info = context.adapter.get_info(); + let last_error: Arc>> = Arc::new(Mutex::new(None)); + let last_error_clone = Arc::clone(&last_error); + device.on_uncaptured_error(Arc::new(move |error| { + let mut guard = last_error_clone.lock().unwrap(); + *guard = Some(error.to_string()); + })); + Ok(Self { device, queue, @@ -352,16 +391,22 @@ impl WgpuRenderer { path_globals_bind_group, instance_buffer, instance_buffer_capacity: initial_instance_buffer_capacity, + max_buffer_size, storage_buffer_alignment, - path_intermediate_texture, - path_intermediate_view, - path_msaa_texture, - path_msaa_view, + // Defer intermediate texture creation to first draw call via ensure_intermediate_textures(). + // This avoids panics when the device/surface is in an invalid state during initialization. + path_intermediate_texture: None, + path_intermediate_view: None, + path_msaa_texture: None, + path_msaa_view: None, rendering_params, dual_source_blending, adapter_info, transparent_alpha_mode, opaque_alpha_mode, + max_texture_size, + last_error, + failed_frame_count: 0, }) } @@ -497,12 +542,25 @@ impl WgpuRenderer { path_sample_count: u32, dual_source_blending: bool, ) -> WgpuPipelines { - let shader_source = include_str!("shaders.wgsl"); + let base_shader_source = include_str!("shaders.wgsl"); let shader_module = device.create_shader_module(wgpu::ShaderModuleDescriptor { label: Some("gpui_shaders"), - source: wgpu::ShaderSource::Wgsl(shader_source.into()), + source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(base_shader_source)), }); + let subpixel_shader_source = include_str!("shaders_subpixel.wgsl"); + let subpixel_shader_module = if dual_source_blending { + let combined = format!( + "enable dual_source_blending;\n{base_shader_source}\n{subpixel_shader_source}" + ); + Some(device.create_shader_module(wgpu::ShaderModuleDescriptor { + label: Some("gpui_subpixel_shaders"), + source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Owned(combined)), + })) + } else { + None + }; + let blend_mode = match alpha_mode { wgpu::CompositeAlphaMode::PreMultiplied => { wgpu::BlendState::PREMULTIPLIED_ALPHA_BLENDING @@ -523,7 +581,8 @@ impl WgpuRenderer { data_layout: &wgpu::BindGroupLayout, topology: wgpu::PrimitiveTopology, color_targets: &[Option], - sample_count: u32| { + sample_count: u32, + module: &wgpu::ShaderModule| { let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { label: Some(&format!("{name}_layout")), bind_group_layouts: &[globals_layout, data_layout], @@ -534,13 +593,13 @@ impl WgpuRenderer { label: Some(name), layout: Some(&pipeline_layout), vertex: wgpu::VertexState { - module: &shader_module, + module, entry_point: Some(vs_entry), buffers: &[], compilation_options: wgpu::PipelineCompilationOptions::default(), }, fragment: Some(wgpu::FragmentState { - module: &shader_module, + module, entry_point: Some(fs_entry), targets: color_targets, compilation_options: wgpu::PipelineCompilationOptions::default(), @@ -574,6 +633,7 @@ impl WgpuRenderer { wgpu::PrimitiveTopology::TriangleStrip, &[Some(color_target.clone())], 1, + &shader_module, ); let shadows = create_pipeline( @@ -585,6 +645,7 @@ impl WgpuRenderer { wgpu::PrimitiveTopology::TriangleStrip, &[Some(color_target.clone())], 1, + &shader_module, ); let path_rasterization = create_pipeline( @@ -600,6 +661,7 @@ impl WgpuRenderer { write_mask: wgpu::ColorWrites::ALL, })], path_sample_count, + &shader_module, ); let paths_blend = wgpu::BlendState { @@ -628,6 +690,7 @@ impl WgpuRenderer { write_mask: wgpu::ColorWrites::ALL, })], 1, + &shader_module, ); let underlines = create_pipeline( @@ -639,6 +702,7 @@ impl WgpuRenderer { wgpu::PrimitiveTopology::TriangleStrip, &[Some(color_target.clone())], 1, + &shader_module, ); let mono_sprites = create_pipeline( @@ -650,9 +714,10 @@ impl WgpuRenderer { wgpu::PrimitiveTopology::TriangleStrip, &[Some(color_target.clone())], 1, + &shader_module, ); - let subpixel_sprites = if dual_source_blending { + let subpixel_sprites = if let Some(subpixel_module) = &subpixel_shader_module { let subpixel_blend = wgpu::BlendState { color: wgpu::BlendComponent { src_factor: wgpu::BlendFactor::Src1, @@ -679,6 +744,7 @@ impl WgpuRenderer { write_mask: wgpu::ColorWrites::COLOR, })], 1, + subpixel_module, )) } else { None @@ -693,6 +759,7 @@ impl WgpuRenderer { wgpu::PrimitiveTopology::TriangleStrip, &[Some(color_target.clone())], 1, + &shader_module, ); let surfaces = create_pipeline( @@ -704,6 +771,7 @@ impl WgpuRenderer { wgpu::PrimitiveTopology::TriangleStrip, &[Some(color_target)], 1, + &shader_module, ); WgpuPipelines { @@ -776,32 +844,75 @@ impl WgpuRenderer { let height = size.height.0 as u32; if width != self.surface_config.width || height != self.surface_config.height { - self.surface_config.width = width.max(1); - self.surface_config.height = height.max(1); + let clamped_width = width.min(self.max_texture_size); + let clamped_height = height.min(self.max_texture_size); + + if clamped_width != width || clamped_height != height { + warn!( + "Requested surface size ({}, {}) exceeds maximum texture dimension {}. \ + Clamping to ({}, {}). Window content may not fill the entire window.", + width, height, self.max_texture_size, clamped_width, clamped_height + ); + } + + // Wait for any in-flight GPU work to complete before destroying textures + if let Err(e) = self.device.poll(wgpu::PollType::Wait { + submission_index: None, + timeout: None, + }) { + warn!("Failed to poll device during resize: {e:?}"); + } + + // Destroy old textures before allocating new ones to avoid GPU memory spikes + if let Some(ref texture) = self.path_intermediate_texture { + texture.destroy(); + } + if let Some(ref texture) = self.path_msaa_texture { + texture.destroy(); + } + + self.surface_config.width = clamped_width.max(1); + self.surface_config.height = clamped_height.max(1); self.surface.configure(&self.device, &self.surface_config); - let (path_intermediate_texture, path_intermediate_view) = - Self::create_path_intermediate( - &self.device, - self.surface_config.format, - self.surface_config.width, - self.surface_config.height, - ); - self.path_intermediate_texture = path_intermediate_texture; - self.path_intermediate_view = path_intermediate_view; + // Invalidate intermediate textures - they will be lazily recreated + // in draw() after we confirm the surface is healthy. This avoids + // panics when the device/surface is in an invalid state during resize. + self.path_intermediate_texture = None; + self.path_intermediate_view = None; + self.path_msaa_texture = None; + self.path_msaa_view = None; + } + } - let (path_msaa_texture, path_msaa_view) = Self::create_msaa_if_needed( + fn ensure_intermediate_textures(&mut self) { + if self.path_intermediate_texture.is_some() { + return; + } + + let (path_intermediate_texture, path_intermediate_view) = { + let (t, v) = Self::create_path_intermediate( &self.device, self.surface_config.format, self.surface_config.width, self.surface_config.height, - self.rendering_params.path_sample_count, - ) - .map(|(t, v)| (Some(t), Some(v))) - .unwrap_or((None, None)); - self.path_msaa_texture = path_msaa_texture; - self.path_msaa_view = path_msaa_view; - } + ); + (Some(t), Some(v)) + }; + self.path_intermediate_texture = path_intermediate_texture; + self.path_intermediate_view = path_intermediate_view; + + let (path_msaa_texture, path_msaa_view) = Self::create_msaa_if_needed( + &self.device, + self.surface_config.format, + self.surface_config.width, + self.surface_config.height, + self.rendering_params.path_sample_count, + ) + .map(|(t, v)| (Some(t), Some(v))) + .unwrap_or((None, None)); + self.path_msaa_texture = path_msaa_texture; + self.path_msaa_view = path_msaa_view; } pub fn update_transparency(&mut self, transparent: bool) { @@ -837,6 +948,10 @@ impl WgpuRenderer { &self.atlas } + pub fn supports_dual_source_blending(&self) -> bool { + self.dual_source_blending + } + pub fn gpu_specs(&self) -> GpuSpecs { GpuSpecs { is_software_emulated: self.adapter_info.device_type == wgpu::DeviceType::Cpu, @@ -846,7 +961,25 @@ impl WgpuRenderer { } } + pub fn max_texture_size(&self) -> u32 { + self.max_texture_size + } + pub fn draw(&mut self, scene: &Scene) { + let last_error = self.last_error.lock().unwrap().take(); + if let Some(error) = last_error { + self.failed_frame_count += 1; + log::error!( + "GPU error during frame (failure {} of 20): {error}", + self.failed_frame_count + ); + if self.failed_frame_count > 20 { + panic!("Too many consecutive GPU errors. Last error: {error}"); + } + } else { + self.failed_frame_count = 0; + } + self.atlas.before_frame(); let frame = match self.surface.get_current_texture() { @@ -860,6 +993,10 @@ impl WgpuRenderer { return; } }; + + // Now that we know the surface is healthy, ensure intermediate textures exist + self.ensure_intermediate_textures(); + let frame_view = frame .texture .create_view(&wgpu::TextureViewDescriptor::default()); @@ -1020,7 +1157,7 @@ impl WgpuRenderer { if overflow { drop(encoder); - if self.instance_buffer_capacity >= 256 * 1024 * 1024 { + if self.instance_buffer_capacity >= self.max_buffer_size { log::error!( "instance buffer size grew too large: {}", self.instance_buffer_capacity @@ -1249,11 +1386,15 @@ impl WgpuRenderer { vec![PathSprite { bounds }] }; + let Some(path_intermediate_view) = self.path_intermediate_view.as_ref() else { + return true; + }; + let sprite_data = unsafe { Self::instance_bytes(&sprites) }; self.draw_instances_with_texture( sprite_data, sprites.len() as u32, - &self.path_intermediate_view, + path_intermediate_view, &self.pipelines.paths, instance_offset, pass, @@ -1297,10 +1438,14 @@ impl WgpuRenderer { }], }); + let Some(path_intermediate_view) = self.path_intermediate_view.as_ref() else { + return true; + }; + let (target_view, resolve_target) = if let Some(ref msaa_view) = self.path_msaa_view { - (msaa_view, Some(&self.path_intermediate_view)) + (msaa_view, Some(path_intermediate_view)) } else { - (&self.path_intermediate_view, None) + (path_intermediate_view, None) }; { @@ -1329,7 +1474,7 @@ impl WgpuRenderer { } fn grow_instance_buffer(&mut self) { - let new_capacity = self.instance_buffer_capacity * 2; + let new_capacity = (self.instance_buffer_capacity * 2).min(self.max_buffer_size); log::info!("increased instance buffer size to {}", new_capacity); self.instance_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { label: Some("instance_buffer"), diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index 177f8639ca1a5d75bd0130979f4d550e3622a1b4..6273d773d8c4651fd292555e18d2a2462e6358df 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -19,8 +19,6 @@ doctest = true [dependencies] anyhow.workspace = true async-compression.workspace = true -async-fs.workspace = true -async-tar.workspace = true bytes.workspace = true derive_more.workspace = true futures.workspace = true @@ -31,7 +29,11 @@ parking_lot.workspace = true serde.workspace = true serde_json.workspace = true serde_urlencoded.workspace = true -sha2.workspace = true -tempfile.workspace = true url.workspace = true + +[target.'cfg(not(target_family = "wasm"))'.dependencies] util.workspace = true +async-fs.workspace = true +async-tar.workspace = true +sha2.workspace = true +tempfile.workspace = true diff --git a/crates/http_client/src/async_body.rs b/crates/http_client/src/async_body.rs index 8fb49f218568ea36078d772a7225229f31a916c4..a59a7339db1e4449b875e2c539e98c86b4279365 100644 --- a/crates/http_client/src/async_body.rs +++ b/crates/http_client/src/async_body.rs @@ -7,6 +7,7 @@ use std::{ use bytes::Bytes; use futures::AsyncRead; use http_body::{Body, Frame}; +use serde::Serialize; /// Based on the implementation of AsyncBody in /// . @@ -88,6 +89,19 @@ impl From<&'static str> for AsyncBody { } } +/// Newtype wrapper that serializes a value as JSON into an `AsyncBody`. +pub struct Json(pub T); + +impl From> for AsyncBody { + fn from(json: Json) -> Self { + Self::from_bytes( + serde_json::to_vec(&json.0) + .expect("failed to serialize JSON") + .into(), + ) + } +} + impl> From> for AsyncBody { fn from(body: Option) -> Self { match body { diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 1182ef74ca3d59a2d59419e185ff5bd673c5d505..bbbe3b1a832332bd6bee693b4c0b916b4f4c182a 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -1,9 +1,11 @@ mod async_body; +#[cfg(not(target_family = "wasm"))] pub mod github; +#[cfg(not(target_family = "wasm"))] pub mod github_download; pub use anyhow::{Result, anyhow}; -pub use async_body::{AsyncBody, Inner}; +pub use async_body::{AsyncBody, Inner, Json}; use derive_more::Deref; use http::HeaderValue; pub use http::{self, Method, Request, Response, StatusCode, Uri, request::Builder}; diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 9ed9a8b658cc8bbf89c9d14d131fc8faefbc80ed..3536e73a9db6247a798145f186ae20d2efe29da5 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -23,6 +23,7 @@ pub enum IconName { AiOpenAi, AiOpenAiCompat, AiOpenRouter, + AiVercel, AiVZero, AiXAi, AiZed, @@ -112,6 +113,8 @@ pub enum IconName { ExpandUp, ExpandVertical, Eye, + FastForward, + FastForwardOff, File, FileCode, FileDiff, @@ -142,6 +145,7 @@ pub enum IconName { GitBranch, GitBranchAlt, GitBranchPlus, + GitCommit, GitGraph, Github, Hash, @@ -172,7 +176,9 @@ pub enum IconName { Mic, MicMute, Minimize, + NewThread, Notepad, + OpenFolder, Option, PageDown, PageUp, @@ -187,6 +193,7 @@ pub enum IconName { Power, Public, PullRequest, + QueueMessage, Quote, Reader, RefreshTitle, @@ -220,10 +227,6 @@ pub enum IconName { Star, StarFilled, Stop, - Supermaven, - SupermavenDisabled, - SupermavenError, - SupermavenInit, SwatchBook, SweepAi, SweepAiDisabled, diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 1449052983a49a539201360ec48dd37c04a4ccae..d183615317ecaa481cda45d780c64b2ddf7ec833 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -4,7 +4,7 @@ use crate::{ DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture, RunnableTag, TextObject, TreeSitterOptions, diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup}, - language_settings::{LanguageSettings, language_settings}, + language_settings::{AutoIndentMode, LanguageSettings, language_settings}, outline::OutlineItem, row_chunk::RowChunks, syntax_map::{ @@ -187,7 +187,7 @@ struct BufferBranchState { /// state of a buffer. pub struct BufferSnapshot { pub text: text::BufferSnapshot, - pub syntax: SyntaxSnapshot, + pub(crate) syntax: SyntaxSnapshot, tree_sitter_data: Arc, diagnostics: TreeMap, remote_selections: TreeMap, @@ -1776,7 +1776,9 @@ impl Buffer { self.syntax_map.lock().contains_unknown_injections() } - #[cfg(any(test, feature = "test-support"))] + /// Sets the sync parse timeout for this buffer. + /// + /// Setting this to `None` disables sync parsing entirely. pub fn set_sync_parse_timeout(&mut self, timeout: Option) { self.sync_parse_timeout = timeout; } @@ -2736,17 +2738,18 @@ impl Buffer { .filter(|((_, (range, _)), _)| { let language = before_edit.language_at(range.start); let language_id = language.map(|l| l.id()); - if let Some((cached_language_id, auto_indent)) = previous_setting + if let Some((cached_language_id, apply_syntax_indent)) = previous_setting && cached_language_id == language_id { - auto_indent + apply_syntax_indent } else { // The auto-indent setting is not present in editorconfigs, hence // we can avoid passing the file here. - let auto_indent = + let auto_indent_mode = language_settings(language.map(|l| l.name()), None, cx).auto_indent; - previous_setting = Some((language_id, auto_indent)); - auto_indent + let apply_syntax_indent = auto_indent_mode == AutoIndentMode::SyntaxAware; + previous_setting = Some((language_id, apply_syntax_indent)); + apply_syntax_indent } }) .map(|((ix, (range, _)), new_text)| { @@ -3706,6 +3709,14 @@ impl BufferSnapshot { None } + pub fn captures( + &self, + range: Range, + query: fn(&Grammar) -> Option<&tree_sitter::Query>, + ) -> SyntaxMapCaptures<'_> { + self.syntax.captures(range, &self.text, query) + } + #[ztracing::instrument(skip_all)] fn get_highlights(&self, range: Range) -> (SyntaxMapCaptures<'_>, Vec) { let captures = self.syntax.captures(range, &self.text, |grammar| { diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index fd14f42a93179ae0423f5acfa6ede3cceec94935..29b569ba1aa68fe83f3456a2eaf9911b4c83677d 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -23,7 +23,7 @@ mod toolchain; pub mod buffer_tests; use crate::language_settings::SoftWrap; -pub use crate::language_settings::{EditPredictionsMode, IndentGuideSettings}; +pub use crate::language_settings::{AutoIndentMode, EditPredictionsMode, IndentGuideSettings}; use anyhow::{Context as _, Result}; use async_trait::async_trait; use collections::{HashMap, HashSet, IndexSet}; @@ -491,6 +491,7 @@ pub trait LspAdapter: 'static + Send + Sync + DynLspInstaller { async fn initialization_options( self: Arc, _: &Arc, + _cx: &mut AsyncApp, ) -> Result> { Ok(None) } @@ -834,6 +835,11 @@ pub struct LanguageConfig { pub name: LanguageName, /// The name of this language for a Markdown code fence block pub code_fence_block_name: Option>, + /// Alternative language names that Jupyter kernels may report for this language. + /// Used when a kernel's `language` field differs from Zed's language name. + /// For example, the Nu extension would set this to `["nushell"]`. + #[serde(default)] + pub kernel_language_names: Vec>, // The name of the grammar in a WASM bundle (experimental). pub grammar: Option>, /// The criteria for matching this language to a given file. @@ -1140,6 +1146,7 @@ impl Default for LanguageConfig { Self { name: LanguageName::new_static(""), code_fence_block_name: None, + kernel_language_names: Default::default(), grammar: None, matcher: LanguageMatcher::default(), brackets: Default::default(), @@ -2074,6 +2081,23 @@ impl Language { .unwrap_or_else(|| self.config.name.as_ref().to_lowercase().into()) } + pub fn matches_kernel_language(&self, kernel_language: &str) -> bool { + let kernel_language_lower = kernel_language.to_lowercase(); + + if self.code_fence_block_name().to_lowercase() == kernel_language_lower { + return true; + } + + if self.config.name.as_ref().to_lowercase() == kernel_language_lower { + return true; + } + + self.config + .kernel_language_names + .iter() + .any(|name| name.to_lowercase() == kernel_language_lower) + } + pub fn context_provider(&self) -> Option> { self.context_provider.clone() } @@ -2638,6 +2662,7 @@ impl LspAdapter for FakeLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _cx: &mut AsyncApp, ) -> Result> { Ok(self.initialization_options.clone()) } diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 1741d6163c13b5622192fca43b3204911486f25a..f2c55fd1e8a3b8bf5b6c2dd8ea24d1343385fa78 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -12,7 +12,7 @@ use itertools::{Either, Itertools}; use settings::{DocumentFoldingRanges, DocumentSymbols, IntoGpui, SemanticTokens}; pub use settings::{ - CompletionSettingsContent, EditPredictionPromptFormat, EditPredictionProvider, + AutoIndentMode, CompletionSettingsContent, EditPredictionPromptFormat, EditPredictionProvider, EditPredictionsMode, FormatOnSave, Formatter, FormatterList, InlayHintKind, LanguageSettingsContent, LspInsertMode, RewrapBehavior, ShowWhitespaceSetting, SoftWrap, WordsCompletionMode, @@ -144,8 +144,8 @@ pub struct LanguageSettings { /// Whether to use additional LSP queries to format (and amend) the code after /// every "trigger" symbol input, defined by LSP server capabilities. pub use_on_type_format: bool, - /// Whether indentation should be adjusted based on the context whilst typing. - pub auto_indent: bool, + /// Controls automatic indentation behavior when typing. + pub auto_indent: AutoIndentMode, /// Whether indentation of pasted content should be adjusted based on the context. pub auto_indent_on_paste: bool, /// Controls how the editor handles the autoclosed characters. @@ -229,6 +229,22 @@ pub struct IndentGuideSettings { pub background_coloring: settings::IndentGuideBackgroundColoring, } +impl IndentGuideSettings { + /// Returns the clamped line width in pixels for an indent guide based on + /// whether it is active, or `None` when line coloring is disabled. + pub fn visible_line_width(&self, active: bool) -> Option { + if self.coloring == settings::IndentGuideColoring::Disabled { + return None; + } + let width = if active { + self.active_line_width + } else { + self.line_width + }; + Some(width.clamp(1, 10)) + } +} + #[derive(Debug, Clone, PartialEq)] pub struct LanguageTaskSettings { /// Extra task variables to set for a particular language. @@ -380,8 +396,7 @@ impl InlayHintSettings { } } -/// The settings for edit predictions, such as [GitHub Copilot](https://github.com/features/copilot) -/// or [Supermaven](https://supermaven.com). +/// The settings for edit predictions, such as [GitHub Copilot](https://github.com/features/copilot). #[derive(Clone, Debug, Default)] pub struct EditPredictionSettings { /// The provider that supplies edit predictions. diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 132f971675ede12bb8ef5f941b57415f22d7ba88..89c44513067f6d2309d68a9f38984988358d8877 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -496,7 +496,7 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option { }; Some(Anchor::new( timestamp, - anchor.offset as usize, + anchor.offset as u32, bias, buffer_id, )) diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index bd24424679f3e6cb02303c91e0d86db335cd0a26..c5931c474d2962fc7ceb66954f2f00d3bf14b4f8 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -13,7 +13,7 @@ use std::{ collections::BinaryHeap, fmt, iter, ops::{ControlFlow, Deref, DerefMut, Range}, - sync::Arc, + sync::{Arc, LazyLock}, time::{Duration, Instant}, }; use streaming_iterator::StreamingIterator; @@ -40,6 +40,27 @@ pub struct SyntaxSnapshot { update_count: usize, } +// Dropping deep treesitter Trees can be quite slow due to deallocating lots of memory. +// To avoid blocking the main thread, we offload the drop operation to a background thread. +impl Drop for SyntaxSnapshot { + fn drop(&mut self) { + static DROP_TX: LazyLock>> = + LazyLock::new(|| { + let (tx, rx) = std::sync::mpsc::channel(); + std::thread::Builder::new() + .name("SyntaxSnapshot::drop".into()) + .spawn(move || while let Ok(_) = rx.recv() {}) + .expect("failed to spawn drop thread"); + tx + }); + // This does allocate a new Arc, but it's cheap and avoids blocking the main thread without needing to use an `Option` or `MaybeUninit`. + let _ = DROP_TX.send(std::mem::replace( + &mut self.layers, + SumTree::from_summary(Default::default()), + )); + } +} + #[derive(Default)] pub struct SyntaxMapCaptures<'a> { layers: Vec>, diff --git a/crates/language_extension/src/extension_lsp_adapter.rs b/crates/language_extension/src/extension_lsp_adapter.rs index c2062a294d75657b1421982974019454ecba4aa3..6f5300991fd8afbfaba710ed2bde068dd4d3a969 100644 --- a/crates/language_extension/src/extension_lsp_adapter.rs +++ b/crates/language_extension/src/extension_lsp_adapter.rs @@ -309,6 +309,7 @@ impl LspAdapter for ExtensionLspAdapter { async fn initialization_options( self: Arc, delegate: &Arc, + _: &mut AsyncApp, ) -> Result> { let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _; let json_options = self diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index 313a7a3b4d94726a2e6619eddd0fd14e5e4c30e4..c403774499c9dcb384e93cf19367dc28e336aa60 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -613,6 +613,10 @@ pub trait LanguageModel: Send + Sync { false } + fn supports_fast_mode(&self) -> bool { + false + } + /// Returns the list of supported effort levels that can be used when thinking. fn supported_effort_levels(&self) -> Vec { Vec::new() diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index 18e099b4d6fc62867bf35fbd1d4573093af44744..b2af80a3c295cab1cf40a330eb8d84f94a137eb7 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -4,6 +4,7 @@ use std::sync::Arc; use anyhow::{Context as _, Result}; use client::Client; use cloud_api_client::ClientApiError; +use cloud_api_types::OrganizationId; use cloud_api_types::websocket_protocol::MessageToClient; use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME}; use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _}; @@ -26,29 +27,46 @@ impl fmt::Display for PaymentRequiredError { pub struct LlmApiToken(Arc>>); impl LlmApiToken { - pub async fn acquire(&self, client: &Arc) -> Result { + pub async fn acquire( + &self, + client: &Arc, + organization_id: Option, + ) -> Result { let lock = self.0.upgradable_read().await; if let Some(token) = lock.as_ref() { Ok(token.to_string()) } else { - Self::fetch(RwLockUpgradableReadGuard::upgrade(lock).await, client).await + Self::fetch( + RwLockUpgradableReadGuard::upgrade(lock).await, + client, + organization_id, + ) + .await } } - pub async fn refresh(&self, client: &Arc) -> Result { - Self::fetch(self.0.write().await, client).await + pub async fn refresh( + &self, + client: &Arc, + organization_id: Option, + ) -> Result { + Self::fetch(self.0.write().await, client, organization_id).await } async fn fetch( mut lock: RwLockWriteGuard<'_, Option>, client: &Arc, + organization_id: Option, ) -> Result { let system_id = client .telemetry() .system_id() .map(|system_id| system_id.to_string()); - let result = client.cloud_client().create_llm_token(system_id).await; + let result = client + .cloud_client() + .create_llm_token(system_id, organization_id) + .await; match result { Ok(response) => { *lock = Some(response.token.0.clone()); diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index 04a61ae79474ea525cfe522dac2ac75048e7510b..9be3002deae758ee99432842a31e3b90754ada0f 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -431,6 +431,7 @@ pub struct LanguageModelRequestTool { pub name: String, pub description: String, pub input_schema: serde_json::Value, + pub use_input_streaming: bool, } #[derive(Debug, PartialEq, Hash, Clone, Serialize, Deserialize)] @@ -452,6 +453,33 @@ pub struct LanguageModelRequest { pub temperature: Option, pub thinking_allowed: bool, pub thinking_effort: Option, + pub speed: Option, +} + +#[derive(Clone, Copy, Default, Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum Speed { + #[default] + Standard, + Fast, +} + +impl Speed { + pub fn toggle(self) -> Self { + match self { + Speed::Standard => Speed::Fast, + Speed::Fast => Speed::Standard, + } + } +} + +impl From for anthropic::Speed { + fn from(speed: Speed) -> Self { + match speed { + Speed::Standard => anthropic::Speed::Standard, + Speed::Fast => anthropic::Speed::Fast, + } + } } #[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] diff --git a/crates/language_models/src/language_models.rs b/crates/language_models/src/language_models.rs index 37d4ca5ddd4e5c1e7a0202c88c012d18b018cd4f..f22ea00c9e801e120bf057a06683487bc4deb22a 100644 --- a/crates/language_models/src/language_models.rs +++ b/crates/language_models/src/language_models.rs @@ -25,6 +25,7 @@ use crate::provider::open_ai::OpenAiLanguageModelProvider; use crate::provider::open_ai_compatible::OpenAiCompatibleLanguageModelProvider; use crate::provider::open_router::OpenRouterLanguageModelProvider; use crate::provider::vercel::VercelLanguageModelProvider; +use crate::provider::vercel_ai_gateway::VercelAiGatewayLanguageModelProvider; use crate::provider::x_ai::XAiLanguageModelProvider; pub use crate::settings::*; @@ -208,6 +209,13 @@ fn register_language_model_providers( Arc::new(VercelLanguageModelProvider::new(client.http_client(), cx)), cx, ); + registry.register_provider( + Arc::new(VercelAiGatewayLanguageModelProvider::new( + client.http_client(), + cx, + )), + cx, + ); registry.register_provider( Arc::new(XAiLanguageModelProvider::new(client.http_client(), cx)), cx, diff --git a/crates/language_models/src/provider.rs b/crates/language_models/src/provider.rs index 6e63a5f5745afce2a21f19002706c628360d7792..27f43e37f5be343c3f80201c013e96d858bb00de 100644 --- a/crates/language_models/src/provider.rs +++ b/crates/language_models/src/provider.rs @@ -12,4 +12,5 @@ pub mod open_ai_compatible; pub mod open_router; mod util; pub mod vercel; +pub mod vercel_ai_gateway; pub mod x_ai; diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index c1de89e4f8505433972d4c5673b130a1e4d0e72e..d3bd129248406211e43e69fc5880310a9dedbc97 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -370,6 +370,7 @@ pub fn into_anthropic_count_tokens_request( name: tool.name, description: tool.description, input_schema: tool.input_schema, + eager_input_streaming: tool.use_input_streaming, }) .collect(), tool_choice: request.tool_choice.map(|choice| match choice { @@ -713,6 +714,7 @@ pub fn into_anthropic( name: tool.name, description: tool.description, input_schema: tool.input_schema, + eager_input_streaming: tool.use_input_streaming, }) .collect(), tool_choice: request.tool_choice.map(|choice| match choice { @@ -723,6 +725,7 @@ pub fn into_anthropic( metadata: None, output_config: None, stop_sequences: Vec::new(), + speed: request.speed.map(From::from), temperature: request.temperature.or(Some(default_temperature)), top_k: None, top_p: None, @@ -1103,6 +1106,7 @@ mod tests { tool_choice: None, thinking_allowed: true, thinking_effort: None, + speed: None, }; let anthropic_request = into_anthropic( @@ -1165,6 +1169,7 @@ mod tests { tools: vec![], tool_choice: None, thinking_allowed: true, + speed: None, }; request.messages.push(LanguageModelRequestMessage { role: Role::Assistant, diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index f522b84f10d48dd438d938e820f3b99bc5d186b7..edbccef31d67998596605bf629e0da7bf8561c5b 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -684,6 +684,10 @@ impl LanguageModel for BedrockModel { } } + fn supports_streaming_tools(&self) -> bool { + true + } + fn telemetry_id(&self) -> String { format!("bedrock/{}", self.model.id()) } @@ -1237,8 +1241,25 @@ pub fn map_to_language_model_completion_events( .get_mut(&cb_delta.content_block_index) { tool_use.input_json.push_str(tool_output.input()); + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&tool_use.input_json), + ) { + Some(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: tool_use.id.clone().into(), + name: tool_use.name.clone().into(), + is_input_complete: false, + raw_input: tool_use.input_json.clone(), + input, + thought_signature: None, + }, + ))) + } else { + None + } + } else { + None } - None } Some(ContentBlockDelta::ReasoningContent(thinking)) => match thinking { ReasoningContentBlockDelta::Text(thoughts) => { diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index f822b89916a60c32b5f076580f960d47c6a1463c..d8ffdf8762e2360231deaf835b63f7e4f065af1a 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -3,7 +3,7 @@ use anthropic::AnthropicModelMode; use anyhow::{Context as _, Result, anyhow}; use chrono::{DateTime, Utc}; use client::{Client, UserStore, zed_urls}; -use cloud_api_types::Plan; +use cloud_api_types::{OrganizationId, Plan}; use cloud_llm_client::{ CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, CLIENT_SUPPORTS_STATUS_STREAM_ENDED_HEADER_NAME, CLIENT_SUPPORTS_X_AI_HEADER_NAME, CompletionBody, CompletionEvent, CompletionRequestStatus, @@ -43,7 +43,6 @@ use std::task::Poll; use std::time::Duration; use thiserror::Error; use ui::{TintColor, prelude::*}; -use util::{ResultExt as _, maybe}; use crate::provider::anthropic::{ AnthropicEventMapper, count_anthropic_tokens_with_tiktoken, into_anthropic, @@ -97,7 +96,7 @@ pub struct State { default_model: Option>, default_fast_model: Option>, recommended_models: Vec>, - _fetch_models_task: Task<()>, + _user_store_subscription: Subscription, _settings_subscription: Subscription, _llm_token_subscription: Subscription, } @@ -110,34 +109,41 @@ impl State { cx: &mut Context, ) -> Self { let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); - let mut current_user = user_store.read(cx).watch_current_user(); Self { client: client.clone(), llm_api_token: LlmApiToken::default(), - user_store, + user_store: user_store.clone(), status, models: Vec::new(), default_model: None, default_fast_model: None, recommended_models: Vec::new(), - _fetch_models_task: cx.spawn(async move |this, cx| { - maybe!(async move { - let (client, llm_api_token) = this - .read_with(cx, |this, _cx| (client.clone(), this.llm_api_token.clone()))?; + _user_store_subscription: cx.subscribe( + &user_store, + move |this, _user_store, event, cx| match event { + client::user::Event::PrivateUserInfoUpdated => { + let status = *client.status().borrow(); + if status.is_signed_out() { + return; + } - while current_user.borrow().is_none() { - current_user.next().await; + let client = this.client.clone(); + let llm_api_token = this.llm_api_token.clone(); + let organization_id = this + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); + cx.spawn(async move |this, cx| { + let response = + Self::fetch_models(client, llm_api_token, organization_id).await?; + this.update(cx, |this, cx| this.update_models(response, cx)) + }) + .detach_and_log_err(cx); } - - let response = - Self::fetch_models(client.clone(), llm_api_token.clone()).await?; - this.update(cx, |this, cx| this.update_models(response, cx))?; - anyhow::Ok(()) - }) - .await - .context("failed to fetch Zed models") - .log_err(); - }), + _ => {} + }, + ), _settings_subscription: cx.observe_global::(|_, cx| { cx.notify(); }), @@ -146,9 +152,17 @@ impl State { move |this, _listener, _event, cx| { let client = this.client.clone(); let llm_api_token = this.llm_api_token.clone(); + let organization_id = this + .user_store + .read(cx) + .current_organization() + .map(|o| o.id.clone()); cx.spawn(async move |this, cx| { - llm_api_token.refresh(&client).await?; - let response = Self::fetch_models(client, llm_api_token).await?; + llm_api_token + .refresh(&client, organization_id.clone()) + .await?; + let response = + Self::fetch_models(client, llm_api_token, organization_id).await?; this.update(cx, |this, cx| { this.update_models(response, cx); }) @@ -209,9 +223,10 @@ impl State { async fn fetch_models( client: Arc, llm_api_token: LlmApiToken, + organization_id: Option, ) -> Result { let http_client = &client.http_client(); - let token = llm_api_token.acquire(&client).await?; + let token = llm_api_token.acquire(&client, organization_id).await?; let request = http_client::Request::builder() .method(Method::GET) @@ -273,11 +288,13 @@ impl CloudLanguageModelProvider { &self, model: Arc, llm_api_token: LlmApiToken, + user_store: Entity, ) -> Arc { Arc::new(CloudLanguageModel { id: LanguageModelId(SharedString::from(model.id.0.clone())), model, llm_api_token, + user_store, client: self.client.clone(), request_limiter: RateLimiter::new(4), }) @@ -306,36 +323,46 @@ impl LanguageModelProvider for CloudLanguageModelProvider { } fn default_model(&self, cx: &App) -> Option> { - let default_model = self.state.read(cx).default_model.clone()?; - let llm_api_token = self.state.read(cx).llm_api_token.clone(); - Some(self.create_language_model(default_model, llm_api_token)) + let state = self.state.read(cx); + let default_model = state.default_model.clone()?; + let llm_api_token = state.llm_api_token.clone(); + let user_store = state.user_store.clone(); + Some(self.create_language_model(default_model, llm_api_token, user_store)) } fn default_fast_model(&self, cx: &App) -> Option> { - let default_fast_model = self.state.read(cx).default_fast_model.clone()?; - let llm_api_token = self.state.read(cx).llm_api_token.clone(); - Some(self.create_language_model(default_fast_model, llm_api_token)) + let state = self.state.read(cx); + let default_fast_model = state.default_fast_model.clone()?; + let llm_api_token = state.llm_api_token.clone(); + let user_store = state.user_store.clone(); + Some(self.create_language_model(default_fast_model, llm_api_token, user_store)) } fn recommended_models(&self, cx: &App) -> Vec> { - let llm_api_token = self.state.read(cx).llm_api_token.clone(); - self.state - .read(cx) + let state = self.state.read(cx); + let llm_api_token = state.llm_api_token.clone(); + let user_store = state.user_store.clone(); + state .recommended_models .iter() .cloned() - .map(|model| self.create_language_model(model, llm_api_token.clone())) + .map(|model| { + self.create_language_model(model, llm_api_token.clone(), user_store.clone()) + }) .collect() } fn provided_models(&self, cx: &App) -> Vec> { - let llm_api_token = self.state.read(cx).llm_api_token.clone(); - self.state - .read(cx) + let state = self.state.read(cx); + let llm_api_token = state.llm_api_token.clone(); + let user_store = state.user_store.clone(); + state .models .iter() .cloned() - .map(|model| self.create_language_model(model, llm_api_token.clone())) + .map(|model| { + self.create_language_model(model, llm_api_token.clone(), user_store.clone()) + }) .collect() } @@ -367,6 +394,7 @@ pub struct CloudLanguageModel { id: LanguageModelId, model: Arc, llm_api_token: LlmApiToken, + user_store: Entity, client: Arc, request_limiter: RateLimiter, } @@ -380,12 +408,15 @@ impl CloudLanguageModel { async fn perform_llm_completion( client: Arc, llm_api_token: LlmApiToken, + organization_id: Option, app_version: Option, body: CompletionBody, ) -> Result { let http_client = &client.http_client(); - let mut token = llm_api_token.acquire(&client).await?; + let mut token = llm_api_token + .acquire(&client, organization_id.clone()) + .await?; let mut refreshed_token = false; loop { @@ -416,7 +447,9 @@ impl CloudLanguageModel { } if !refreshed_token && response.needs_llm_token_refresh() { - token = llm_api_token.refresh(&client).await?; + token = llm_api_token + .refresh(&client, organization_id.clone()) + .await?; refreshed_token = true; continue; } @@ -571,6 +604,10 @@ impl LanguageModel for CloudLanguageModel { self.model.supports_thinking } + fn supports_fast_mode(&self) -> bool { + self.model.supports_fast_mode + } + fn supported_effort_levels(&self) -> Vec { self.model .supported_effort_levels @@ -666,12 +703,17 @@ impl LanguageModel for CloudLanguageModel { cloud_llm_client::LanguageModelProvider::Google => { let client = self.client.clone(); let llm_api_token = self.llm_api_token.clone(); + let organization_id = self + .user_store + .read(cx) + .current_organization() + .map(|o| o.id.clone()); let model_id = self.model.id.to_string(); let generate_content_request = into_google(request, model_id.clone(), GoogleModelMode::Default); async move { let http_client = &client.http_client(); - let token = llm_api_token.acquire(&client).await?; + let token = llm_api_token.acquire(&client, organization_id).await?; let request_body = CountTokensBody { provider: cloud_llm_client::LanguageModelProvider::Google, @@ -732,6 +774,13 @@ impl LanguageModel for CloudLanguageModel { let prompt_id = request.prompt_id.clone(); let intent = request.intent; let app_version = Some(cx.update(|cx| AppVersion::global(cx))); + let user_store = self.user_store.clone(); + let organization_id = cx.update(|cx| { + user_store + .read(cx) + .current_organization() + .map(|o| o.id.clone()) + }); let thinking_allowed = request.thinking_allowed; let enable_thinking = thinking_allowed && self.model.supports_thinking; let provider_name = provider_name(&self.model.provider); @@ -763,6 +812,7 @@ impl LanguageModel for CloudLanguageModel { let client = self.client.clone(); let llm_api_token = self.llm_api_token.clone(); + let organization_id = organization_id.clone(); let future = self.request_limiter.stream(async move { let PerformLlmCompletionResponse { response, @@ -770,6 +820,7 @@ impl LanguageModel for CloudLanguageModel { } = Self::perform_llm_completion( client.clone(), llm_api_token, + organization_id, app_version, CompletionBody { thread_id, @@ -799,6 +850,7 @@ impl LanguageModel for CloudLanguageModel { cloud_llm_client::LanguageModelProvider::OpenAi => { let client = self.client.clone(); let llm_api_token = self.llm_api_token.clone(); + let organization_id = organization_id.clone(); let effort = request .thinking_effort .as_ref() @@ -824,6 +876,7 @@ impl LanguageModel for CloudLanguageModel { } = Self::perform_llm_completion( client.clone(), llm_api_token, + organization_id, app_version, CompletionBody { thread_id, @@ -857,6 +910,7 @@ impl LanguageModel for CloudLanguageModel { None, ); let llm_api_token = self.llm_api_token.clone(); + let organization_id = organization_id.clone(); let future = self.request_limiter.stream(async move { let PerformLlmCompletionResponse { response, @@ -864,6 +918,7 @@ impl LanguageModel for CloudLanguageModel { } = Self::perform_llm_completion( client.clone(), llm_api_token, + organization_id, app_version, CompletionBody { thread_id, @@ -898,6 +953,7 @@ impl LanguageModel for CloudLanguageModel { } = Self::perform_llm_completion( client.clone(), llm_api_token, + organization_id, app_version, CompletionBody { thread_id, diff --git a/crates/language_models/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs index 55ca0e526243dbbcb9504ea3948b192d79a02da1..599dd8ac51fd6591987d4ee564b854fcf018d88f 100644 --- a/crates/language_models/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -246,6 +246,10 @@ impl LanguageModel for CopilotChatLanguageModel { self.model.supports_tools() } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_images(&self) -> bool { self.model.supports_vision() } @@ -455,6 +459,23 @@ pub fn map_to_language_model_completion_events( entry.thought_signature = Some(thought_signature); } } + + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: entry.thought_signature.clone(), + }, + ))); + } + } } if let Some(usage) = event.usage { @@ -727,7 +748,7 @@ impl CopilotResponsesEventMapper { } copilot_responses::StreamEvent::GenericError { error } => vec![Err( - LanguageModelCompletionError::Other(anyhow!(format!("{error:?}"))), + LanguageModelCompletionError::Other(anyhow!(error.message)), )], copilot_responses::StreamEvent::Created { .. } @@ -930,6 +951,7 @@ fn into_copilot_responses( temperature, thinking_allowed: _, thinking_effort: _, + speed: _, } = request; let mut input_items: Vec = Vec::new(); diff --git a/crates/language_models/src/provider/deepseek.rs b/crates/language_models/src/provider/deepseek.rs index 2a9f7322b1fb5d3d1e6713c5a084b83dc2b01ce2..0bf86ef15c91b16dbc496ff732b087fedd0da0a9 100644 --- a/crates/language_models/src/provider/deepseek.rs +++ b/crates/language_models/src/provider/deepseek.rs @@ -246,6 +246,10 @@ impl LanguageModel for DeepSeekLanguageModel { true } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_tool_choice(&self, _choice: LanguageModelToolChoice) -> bool { true } @@ -469,6 +473,23 @@ impl DeepSeekEventMapper { entry.arguments.push_str(&arguments); } } + + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: None, + }, + ))); + } + } } } diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index 3cc583ddde1cb03a4fd312b36f4358c0fbf3b4c1..338931cf7ca902225e10a7d09c9e7528128f1491 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -280,6 +280,10 @@ impl LanguageModel for MistralLanguageModel { self.model.supports_tools() } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_tool_choice(&self, _choice: LanguageModelToolChoice) -> bool { self.model.supports_tools() } @@ -508,6 +512,13 @@ pub fn into_mistral( model: model.id().to_string(), messages, stream, + stream_options: if stream { + Some(mistral::StreamOptions { + stream_tool_calls: Some(true), + }) + } else { + None + }, max_tokens: max_output_tokens, temperature: request.temperature, response_format: None, @@ -616,12 +627,16 @@ impl MistralEventMapper { for tool_call in tool_calls { let entry = self.tool_calls_by_index.entry(tool_call.index).or_default(); - if let Some(tool_id) = tool_call.id.clone() { + if let Some(tool_id) = tool_call.id.clone() + && !tool_id.is_empty() + { entry.id = tool_id; } if let Some(function) = tool_call.function.as_ref() { - if let Some(name) = function.name.clone() { + if let Some(name) = function.name.clone() + && !name.is_empty() + { entry.name = name; } @@ -629,6 +644,23 @@ impl MistralEventMapper { entry.arguments.push_str(&arguments); } } + + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: None, + }, + ))); + } + } } } @@ -883,6 +915,7 @@ mod tests { stop: vec![], thinking_allowed: true, thinking_effort: None, + speed: Default::default(), }; let (mistral_request, affinity) = @@ -919,6 +952,7 @@ mod tests { stop: vec![], thinking_allowed: true, thinking_effort: None, + speed: None, }; let (mistral_request, _) = into_mistral(request, mistral::Model::Pixtral12BLatest, None); diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 1de907004b897337ef3a99102ca34ea3f388ee87..f807a0dcb852e0ed3eaf7aec0860faed5834b2f4 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -309,6 +309,7 @@ impl LanguageModel for OpenAiLanguageModel { | Model::FivePointOne | Model::FivePointTwo | Model::FivePointTwoCodex + | Model::FivePointThreeCodex | Model::O1 | Model::O3 => true, Model::ThreePointFiveTurbo @@ -327,6 +328,10 @@ impl LanguageModel for OpenAiLanguageModel { } } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_thinking(&self) -> bool { self.model.reasoning_effort().is_some() } @@ -554,6 +559,7 @@ pub fn into_open_ai_response( temperature, thinking_allowed: _, thinking_effort: _, + speed: _, } = request; let mut input_items = Vec::new(); @@ -822,6 +828,23 @@ impl OpenAiEventMapper { entry.arguments.push_str(&arguments); } } + + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: None, + }, + ))); + } + } } } } @@ -952,6 +975,20 @@ impl OpenAiResponseEventMapper { ResponsesStreamEvent::FunctionCallArgumentsDelta { item_id, delta, .. } => { if let Some(entry) = self.function_calls_by_item.get_mut(&item_id) { entry.arguments.push_str(&delta); + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&entry.arguments), + ) { + return vec![Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: LanguageModelToolUseId::from(entry.call_id.clone()), + name: entry.name.clone(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: None, + }, + ))]; + } } Vec::new() } @@ -1032,9 +1069,9 @@ impl OpenAiResponseEventMapper { } ResponsesStreamEvent::Error { error } | ResponsesStreamEvent::GenericError { error } => { - vec![Err(LanguageModelCompletionError::Other(anyhow!(format!( - "{error:?}" - ))))] + vec![Err(LanguageModelCompletionError::Other(anyhow!( + error.message + )))] } ResponsesStreamEvent::OutputTextDone { .. } => Vec::new(), ResponsesStreamEvent::OutputItemDone { .. } @@ -1180,8 +1217,11 @@ pub fn count_open_ai_tokens( | Model::FiveCodex | Model::FiveMini | Model::FiveNano => tiktoken_rs::num_tokens_from_messages(model.id(), &messages), - // GPT-5.1, 5.2, and 5.2-codex don't have dedicated tiktoken support; use gpt-5 tokenizer - Model::FivePointOne | Model::FivePointTwo | Model::FivePointTwoCodex => { + // GPT-5.1, 5.2, 5.2-codex, and 5.3-codex don't have dedicated tiktoken support; use gpt-5 tokenizer + Model::FivePointOne + | Model::FivePointTwo + | Model::FivePointTwoCodex + | Model::FivePointThreeCodex => { tiktoken_rs::num_tokens_from_messages("gpt-5", &messages) } } @@ -1431,6 +1471,7 @@ mod tests { temperature: None, thinking_allowed: true, thinking_effort: None, + speed: None, }; // Validate that all models are supported by tiktoken-rs @@ -1562,12 +1603,14 @@ mod tests { name: "get_weather".into(), description: "Fetches the weather".into(), input_schema: json!({ "type": "object" }), + use_input_streaming: false, }], tool_choice: Some(LanguageModelToolChoice::Any), stop: vec!["".into()], temperature: None, thinking_allowed: false, thinking_effort: None, + speed: None, }; let response = into_open_ai_response( @@ -1662,19 +1705,30 @@ mod tests { ]; let mapped = map_response_events(events); + assert_eq!(mapped.len(), 3); + // First event is the partial tool use (from FunctionCallArgumentsDelta) assert!(matches!( mapped[0], + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + is_input_complete: false, + .. + }) + )); + // Second event is the complete tool use (from FunctionCallArgumentsDone) + assert!(matches!( + mapped[1], LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref id, ref name, ref raw_input, + is_input_complete: true, .. }) if id.to_string() == "call_123" && name.as_ref() == "get_weather" && raw_input == "{\"city\":\"Boston\"}" )); assert!(matches!( - mapped[1], + mapped[2], LanguageModelCompletionEvent::Stop(StopReason::ToolUse) )); } @@ -1870,13 +1924,27 @@ mod tests { ]; let mapped = map_response_events(events); + assert_eq!(mapped.len(), 3); + // First event is the partial tool use (from FunctionCallArgumentsDelta) assert!(matches!( mapped[0], - LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref raw_input, .. }) - if raw_input == "{\"city\":\"Boston\"}" + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + is_input_complete: false, + .. + }) )); + // Second event is the complete tool use (from the Incomplete response output) assert!(matches!( mapped[1], + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + ref raw_input, + is_input_complete: true, + .. + }) + if raw_input == "{\"city\":\"Boston\"}" + )); + assert!(matches!( + mapped[2], LanguageModelCompletionEvent::Stop(StopReason::MaxTokens) )); } @@ -1968,4 +2036,80 @@ mod tests { LanguageModelCompletionEvent::Stop(StopReason::ToolUse) )); } + + #[test] + fn responses_stream_emits_partial_tool_use_events() { + let events = vec![ + ResponsesStreamEvent::OutputItemAdded { + output_index: 0, + sequence_number: None, + item: ResponseOutputItem::FunctionCall(ResponseFunctionToolCall { + id: Some("item_fn".to_string()), + status: Some("in_progress".to_string()), + name: Some("get_weather".to_string()), + call_id: Some("call_abc".to_string()), + arguments: String::new(), + }), + }, + ResponsesStreamEvent::FunctionCallArgumentsDelta { + item_id: "item_fn".into(), + output_index: 0, + delta: "{\"city\":\"Bos".into(), + sequence_number: None, + }, + ResponsesStreamEvent::FunctionCallArgumentsDelta { + item_id: "item_fn".into(), + output_index: 0, + delta: "ton\"}".into(), + sequence_number: None, + }, + ResponsesStreamEvent::FunctionCallArgumentsDone { + item_id: "item_fn".into(), + output_index: 0, + arguments: "{\"city\":\"Boston\"}".into(), + sequence_number: None, + }, + ResponsesStreamEvent::Completed { + response: ResponseSummary::default(), + }, + ]; + + let mapped = map_response_events(events); + // Two partial events + one complete event + Stop + assert!(mapped.len() >= 3); + + // The last complete ToolUse event should have is_input_complete: true + let complete_tool_use = mapped.iter().find(|e| { + matches!( + e, + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + is_input_complete: true, + .. + }) + ) + }); + assert!( + complete_tool_use.is_some(), + "should have a complete tool use event" + ); + + // All ToolUse events before the final one should have is_input_complete: false + let tool_uses: Vec<_> = mapped + .iter() + .filter(|e| matches!(e, LanguageModelCompletionEvent::ToolUse(_))) + .collect(); + assert!( + tool_uses.len() >= 2, + "should have at least one partial and one complete event" + ); + + let last = tool_uses.last().unwrap(); + assert!(matches!( + last, + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + is_input_complete: true, + .. + }) + )); + } } diff --git a/crates/language_models/src/provider/open_ai_compatible.rs b/crates/language_models/src/provider/open_ai_compatible.rs index d47ea26c594ab0abb5c859ed549d43e0ed3f859b..b478bc843c05e01d428561d9c255ef0d2ca97148 100644 --- a/crates/language_models/src/provider/open_ai_compatible.rs +++ b/crates/language_models/src/provider/open_ai_compatible.rs @@ -319,6 +319,10 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { } } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_split_token_display(&self) -> bool { true } diff --git a/crates/language_models/src/provider/open_router.rs b/crates/language_models/src/provider/open_router.rs index a044c7c25d7858f69dc8c4ac9fa0c8bda73f6e91..e0e56bc1beadd8309a4c1b3c7626efa99c1c6473 100644 --- a/crates/language_models/src/provider/open_router.rs +++ b/crates/language_models/src/provider/open_router.rs @@ -1,4 +1,4 @@ -use anyhow::{Result, anyhow}; +use anyhow::Result; use collections::HashMap; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task}; @@ -314,6 +314,10 @@ impl LanguageModel for OpenRouterLanguageModel { self.model.supports_tool_calls() } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_thinking(&self) -> bool { matches!(self.model.mode, OpenRouterModelMode::Thinking { .. }) } @@ -591,14 +595,21 @@ impl OpenRouterEventMapper { &mut self, event: ResponseStreamEvent, ) -> Vec> { + let mut events = Vec::new(); + + if let Some(usage) = event.usage { + events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage { + input_tokens: usage.prompt_tokens, + output_tokens: usage.completion_tokens, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }))); + } + let Some(choice) = event.choices.first() else { - return vec![Err(LanguageModelCompletionError::from(anyhow!( - "Response contained no choices" - )))]; + return events; }; - let mut events = Vec::new(); - if let Some(details) = choice.delta.reasoning_details.clone() { // Emit reasoning_details immediately events.push(Ok(LanguageModelCompletionEvent::ReasoningDetails( @@ -643,16 +654,24 @@ impl OpenRouterEventMapper { entry.thought_signature = Some(signature); } } - } - } - if let Some(usage) = event.usage { - events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage { - input_tokens: usage.prompt_tokens, - output_tokens: usage.completion_tokens, - cache_creation_input_tokens: 0, - cache_read_input_tokens: 0, - }))); + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: entry.thought_signature.clone(), + }, + ))); + } + } + } } match choice.finish_reason.as_deref() { @@ -891,7 +910,7 @@ mod tests { ResponseStreamEvent { id: Some("response_123".into()), created: 1234567890, - model: "google/gemini-3-pro-preview".into(), + model: "google/gemini-3.1-pro-preview".into(), choices: vec![ChoiceDelta { index: 0, delta: ResponseMessageDelta { @@ -916,7 +935,7 @@ mod tests { ResponseStreamEvent { id: Some("response_123".into()), created: 1234567890, - model: "google/gemini-3-pro-preview".into(), + model: "google/gemini-3.1-pro-preview".into(), choices: vec![ChoiceDelta { index: 0, delta: ResponseMessageDelta { @@ -942,7 +961,7 @@ mod tests { ResponseStreamEvent { id: Some("response_123".into()), created: 1234567890, - model: "google/gemini-3-pro-preview".into(), + model: "google/gemini-3.1-pro-preview".into(), choices: vec![ChoiceDelta { index: 0, delta: ResponseMessageDelta { @@ -969,7 +988,7 @@ mod tests { ResponseStreamEvent { id: Some("response_123".into()), created: 1234567890, - model: "google/gemini-3-pro-preview".into(), + model: "google/gemini-3.1-pro-preview".into(), choices: vec![ChoiceDelta { index: 0, delta: ResponseMessageDelta { @@ -1055,6 +1074,32 @@ mod tests { ); } + #[gpui::test] + async fn test_usage_only_chunk_with_empty_choices_does_not_error() { + let mut mapper = OpenRouterEventMapper::new(); + + let events = mapper.map_event(ResponseStreamEvent { + id: Some("response_123".into()), + created: 1234567890, + model: "google/gemini-3-flash-preview".into(), + choices: Vec::new(), + usage: Some(open_router::Usage { + prompt_tokens: 12, + completion_tokens: 7, + total_tokens: 19, + }), + }); + + assert_eq!(events.len(), 1); + match events.into_iter().next().unwrap() { + Ok(LanguageModelCompletionEvent::UsageUpdate(usage)) => { + assert_eq!(usage.input_tokens, 12); + assert_eq!(usage.output_tokens, 7); + } + other => panic!("Expected usage update event, got: {other:?}"), + } + } + #[gpui::test] async fn test_agent_prevents_empty_reasoning_details_overwrite() { // This test verifies that the agent layer prevents empty reasoning_details diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs index 3b324e46927f5864d83a5e4b74c46f5e39e8ab3a..b71da5b7db05710ee30115ab54379c9ee4e4c750 100644 --- a/crates/language_models/src/provider/vercel.rs +++ b/crates/language_models/src/provider/vercel.rs @@ -248,6 +248,10 @@ impl LanguageModel for VercelLanguageModel { true } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { match choice { LanguageModelToolChoice::Auto diff --git a/crates/language_models/src/provider/vercel_ai_gateway.rs b/crates/language_models/src/provider/vercel_ai_gateway.rs new file mode 100644 index 0000000000000000000000000000000000000000..78f900de0c94fd3bbbff3962e92d1a8cb9f3e118 --- /dev/null +++ b/crates/language_models/src/provider/vercel_ai_gateway.rs @@ -0,0 +1,710 @@ +use anyhow::Result; +use collections::BTreeMap; +use futures::{AsyncReadExt, FutureExt, StreamExt, future::BoxFuture}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; +use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, http}; +use language_model::{ + ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, + LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, + LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, + LanguageModelRequest, LanguageModelToolChoice, LanguageModelToolSchemaFormat, RateLimiter, + env_var, +}; +use open_ai::ResponseStreamEvent; +use serde::Deserialize; +pub use settings::OpenAiCompatibleModelCapabilities as ModelCapabilities; +pub use settings::VercelAiGatewayAvailableModel as AvailableModel; +use settings::{Settings, SettingsStore}; +use std::sync::{Arc, LazyLock}; +use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*}; +use ui_input::InputField; +use util::ResultExt; + +const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("vercel_ai_gateway"); +const PROVIDER_NAME: LanguageModelProviderName = + LanguageModelProviderName::new("Vercel AI Gateway"); + +const API_URL: &str = "https://ai-gateway.vercel.sh/v1"; +const API_KEY_ENV_VAR_NAME: &str = "VERCEL_AI_GATEWAY_API_KEY"; +static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); + +#[derive(Default, Clone, Debug, PartialEq)] +pub struct VercelAiGatewaySettings { + pub api_url: String, + pub available_models: Vec, +} + +pub struct VercelAiGatewayLanguageModelProvider { + http_client: Arc, + state: Entity, +} + +pub struct State { + api_key_state: ApiKeyState, + http_client: Arc, + available_models: Vec, + fetch_models_task: Option>>, +} + +impl State { + fn is_authenticated(&self) -> bool { + self.api_key_state.has_key() + } + + fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); + self.api_key_state + .store(api_url, api_key, |this| &mut this.api_key_state, cx) + } + + fn authenticate(&mut self, cx: &mut Context) -> Task> { + let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); + let task = self + .api_key_state + .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + + cx.spawn(async move |this, cx| { + let result = task.await; + this.update(cx, |this, cx| this.restart_fetch_models_task(cx)) + .ok(); + result + }) + } + + fn fetch_models( + &mut self, + cx: &mut Context, + ) -> Task> { + let http_client = self.http_client.clone(); + let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); + let api_key = self.api_key_state.key(&api_url); + cx.spawn(async move |this, cx| { + let models = list_models(http_client.as_ref(), &api_url, api_key.as_deref()).await?; + this.update(cx, |this, cx| { + this.available_models = models; + cx.notify(); + }) + .map_err(|e| LanguageModelCompletionError::Other(e))?; + Ok(()) + }) + } + + fn restart_fetch_models_task(&mut self, cx: &mut Context) { + if self.is_authenticated() { + let task = self.fetch_models(cx); + self.fetch_models_task.replace(task); + } else { + self.available_models = Vec::new(); + } + } +} + +impl VercelAiGatewayLanguageModelProvider { + pub fn new(http_client: Arc, cx: &mut App) -> Self { + let state = cx.new(|cx| { + cx.observe_global::({ + let mut last_settings = VercelAiGatewayLanguageModelProvider::settings(cx).clone(); + move |this: &mut State, cx| { + let current_settings = VercelAiGatewayLanguageModelProvider::settings(cx); + if current_settings != &last_settings { + last_settings = current_settings.clone(); + this.authenticate(cx).detach(); + cx.notify(); + } + } + }) + .detach(); + State { + api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + http_client: http_client.clone(), + available_models: Vec::new(), + fetch_models_task: None, + } + }); + + Self { http_client, state } + } + + fn settings(cx: &App) -> &VercelAiGatewaySettings { + &crate::AllLanguageModelSettings::get_global(cx).vercel_ai_gateway + } + + fn api_url(cx: &App) -> SharedString { + let api_url = &Self::settings(cx).api_url; + if api_url.is_empty() { + API_URL.into() + } else { + SharedString::new(api_url.as_str()) + } + } + + fn default_available_model() -> AvailableModel { + AvailableModel { + name: "openai/gpt-5.3-codex".to_string(), + display_name: Some("GPT 5.3 Codex".to_string()), + max_tokens: 400_000, + max_output_tokens: Some(128_000), + max_completion_tokens: None, + capabilities: ModelCapabilities::default(), + } + } + + fn create_language_model(&self, model: AvailableModel) -> Arc { + Arc::new(VercelAiGatewayLanguageModel { + id: LanguageModelId::from(model.name.clone()), + model, + state: self.state.clone(), + http_client: self.http_client.clone(), + request_limiter: RateLimiter::new(4), + }) + } +} + +impl LanguageModelProviderState for VercelAiGatewayLanguageModelProvider { + type ObservableEntity = State; + + fn observable_entity(&self) -> Option> { + Some(self.state.clone()) + } +} + +impl LanguageModelProvider for VercelAiGatewayLanguageModelProvider { + fn id(&self) -> LanguageModelProviderId { + PROVIDER_ID + } + + fn name(&self) -> LanguageModelProviderName { + PROVIDER_NAME + } + + fn icon(&self) -> IconOrSvg { + IconOrSvg::Icon(IconName::AiVercel) + } + + fn default_model(&self, _cx: &App) -> Option> { + Some(self.create_language_model(Self::default_available_model())) + } + + fn default_fast_model(&self, _cx: &App) -> Option> { + None + } + + fn provided_models(&self, cx: &App) -> Vec> { + let mut models = BTreeMap::default(); + + let default_model = Self::default_available_model(); + models.insert(default_model.name.clone(), default_model); + + for model in self.state.read(cx).available_models.clone() { + models.insert(model.name.clone(), model); + } + + for model in &Self::settings(cx).available_models { + models.insert(model.name.clone(), model.clone()); + } + + models + .into_values() + .map(|model| self.create_language_model(model)) + .collect() + } + + fn is_authenticated(&self, cx: &App) -> bool { + self.state.read(cx).is_authenticated() + } + + fn authenticate(&self, cx: &mut App) -> Task> { + self.state.update(cx, |state, cx| state.authenticate(cx)) + } + + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { + cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) + .into() + } + + fn reset_credentials(&self, cx: &mut App) -> Task> { + self.state + .update(cx, |state, cx| state.set_api_key(None, cx)) + } +} + +pub struct VercelAiGatewayLanguageModel { + id: LanguageModelId, + model: AvailableModel, + state: Entity, + http_client: Arc, + request_limiter: RateLimiter, +} + +impl VercelAiGatewayLanguageModel { + fn stream_open_ai( + &self, + request: open_ai::Request, + cx: &AsyncApp, + ) -> BoxFuture< + 'static, + Result< + futures::stream::BoxStream<'static, Result>, + LanguageModelCompletionError, + >, + > { + let http_client = self.http_client.clone(); + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { + let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); + (state.api_key_state.key(&api_url), api_url) + }); + + let future = self.request_limiter.stream(async move { + let provider = PROVIDER_NAME; + let Some(api_key) = api_key else { + return Err(LanguageModelCompletionError::NoApiKey { provider }); + }; + let request = open_ai::stream_completion( + http_client.as_ref(), + provider.0.as_str(), + &api_url, + &api_key, + request, + ); + let response = request.await.map_err(map_open_ai_error)?; + Ok(response) + }); + + async move { Ok(future.await?.boxed()) }.boxed() + } +} + +fn map_open_ai_error(error: open_ai::RequestError) -> LanguageModelCompletionError { + match error { + open_ai::RequestError::HttpResponseError { + status_code, + body, + headers, + .. + } => { + let retry_after = headers + .get(http::header::RETRY_AFTER) + .and_then(|value| value.to_str().ok()?.parse::().ok()) + .map(std::time::Duration::from_secs); + + LanguageModelCompletionError::from_http_status( + PROVIDER_NAME, + status_code, + extract_error_message(&body), + retry_after, + ) + } + open_ai::RequestError::Other(error) => LanguageModelCompletionError::Other(error), + } +} + +fn extract_error_message(body: &str) -> String { + let json = match serde_json::from_str::(body) { + Ok(json) => json, + Err(_) => return body.to_string(), + }; + + let message = json + .get("error") + .and_then(|value| { + value + .get("message") + .and_then(serde_json::Value::as_str) + .or_else(|| value.as_str()) + }) + .or_else(|| json.get("message").and_then(serde_json::Value::as_str)) + .map(ToString::to_string) + .unwrap_or_else(|| body.to_string()); + + clean_error_message(&message) +} + +fn clean_error_message(message: &str) -> String { + let lower = message.to_lowercase(); + + if lower.contains("vercel_oidc_token") && lower.contains("oidc token") { + return "Authentication failed for Vercel AI Gateway. Use a Vercel AI Gateway key (vck_...).\nCreate or manage keys in Vercel AI Gateway console.\nIf this persists, regenerate the key and update it in Vercel AI Gateway provider settings in Zed.".to_string(); + } + + if lower.contains("invalid api key") || lower.contains("invalid_api_key") { + return "Authentication failed for Vercel AI Gateway. Check that your Vercel AI Gateway key starts with vck_ and is active.".to_string(); + } + + message.to_string() +} + +fn has_tag(tags: &[String], expected: &str) -> bool { + tags.iter() + .any(|tag| tag.trim().eq_ignore_ascii_case(expected)) +} + +impl LanguageModel for VercelAiGatewayLanguageModel { + fn id(&self) -> LanguageModelId { + self.id.clone() + } + + fn name(&self) -> LanguageModelName { + LanguageModelName::from( + self.model + .display_name + .clone() + .unwrap_or_else(|| self.model.name.clone()), + ) + } + + fn provider_id(&self) -> LanguageModelProviderId { + PROVIDER_ID + } + + fn provider_name(&self) -> LanguageModelProviderName { + PROVIDER_NAME + } + + fn supports_tools(&self) -> bool { + self.model.capabilities.tools + } + + fn tool_input_format(&self) -> LanguageModelToolSchemaFormat { + LanguageModelToolSchemaFormat::JsonSchemaSubset + } + + fn supports_images(&self) -> bool { + self.model.capabilities.images + } + + fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { + match choice { + LanguageModelToolChoice::Auto => self.model.capabilities.tools, + LanguageModelToolChoice::Any => self.model.capabilities.tools, + LanguageModelToolChoice::None => true, + } + } + + fn supports_streaming_tools(&self) -> bool { + true + } + + fn supports_split_token_display(&self) -> bool { + true + } + + fn telemetry_id(&self) -> String { + format!("vercel_ai_gateway/{}", self.model.name) + } + + fn max_token_count(&self) -> u64 { + self.model.max_tokens + } + + fn max_output_tokens(&self) -> Option { + self.model.max_output_tokens + } + + fn count_tokens( + &self, + request: LanguageModelRequest, + cx: &App, + ) -> BoxFuture<'static, Result> { + let max_token_count = self.max_token_count(); + cx.background_spawn(async move { + let messages = crate::provider::open_ai::collect_tiktoken_messages(request); + let model = if max_token_count >= 100_000 { + "gpt-4o" + } else { + "gpt-4" + }; + tiktoken_rs::num_tokens_from_messages(model, &messages).map(|tokens| tokens as u64) + }) + .boxed() + } + + fn stream_completion( + &self, + request: LanguageModelRequest, + cx: &AsyncApp, + ) -> BoxFuture< + 'static, + Result< + futures::stream::BoxStream< + 'static, + Result, + >, + LanguageModelCompletionError, + >, + > { + let request = crate::provider::open_ai::into_open_ai( + request, + &self.model.name, + self.model.capabilities.parallel_tool_calls, + self.model.capabilities.prompt_cache_key, + self.max_output_tokens(), + None, + ); + let completions = self.stream_open_ai(request, cx); + async move { + let mapper = crate::provider::open_ai::OpenAiEventMapper::new(); + Ok(mapper.map_stream(completions.await?).boxed()) + } + .boxed() + } +} + +#[derive(Deserialize)] +struct ModelsResponse { + data: Vec, +} + +#[derive(Deserialize)] +struct ApiModel { + id: String, + name: Option, + context_window: Option, + max_tokens: Option, + #[serde(default)] + r#type: Option, + #[serde(default)] + supported_parameters: Vec, + #[serde(default)] + tags: Vec, + architecture: Option, +} + +#[derive(Deserialize)] +struct ApiModelArchitecture { + #[serde(default)] + input_modalities: Vec, +} + +async fn list_models( + client: &dyn HttpClient, + api_url: &str, + api_key: Option<&str>, +) -> Result, LanguageModelCompletionError> { + let uri = format!("{api_url}/models?include_mappings=true"); + let mut request_builder = HttpRequest::builder() + .method(Method::GET) + .uri(uri) + .header("Accept", "application/json"); + if let Some(api_key) = api_key { + request_builder = request_builder.header("Authorization", format!("Bearer {}", api_key)); + } + let request = request_builder + .body(AsyncBody::default()) + .map_err(|error| LanguageModelCompletionError::BuildRequestBody { + provider: PROVIDER_NAME, + error, + })?; + let mut response = + client + .send(request) + .await + .map_err(|error| LanguageModelCompletionError::HttpSend { + provider: PROVIDER_NAME, + error, + })?; + + let mut body = String::new(); + response + .body_mut() + .read_to_string(&mut body) + .await + .map_err(|error| LanguageModelCompletionError::ApiReadResponseError { + provider: PROVIDER_NAME, + error, + })?; + + if !response.status().is_success() { + return Err(LanguageModelCompletionError::from_http_status( + PROVIDER_NAME, + response.status(), + extract_error_message(&body), + None, + )); + } + + let response: ModelsResponse = serde_json::from_str(&body).map_err(|error| { + LanguageModelCompletionError::DeserializeResponse { + provider: PROVIDER_NAME, + error, + } + })?; + + let mut models = Vec::new(); + for model in response.data { + if let Some(model_type) = model.r#type.as_deref() + && model_type != "language" + { + continue; + } + let supports_tools = model + .supported_parameters + .iter() + .any(|parameter| parameter == "tools") + || has_tag(&model.tags, "tool-use") + || has_tag(&model.tags, "tools"); + let supports_images = model.architecture.is_some_and(|architecture| { + architecture + .input_modalities + .iter() + .any(|modality| modality == "image") + }) || has_tag(&model.tags, "vision") + || has_tag(&model.tags, "image-input"); + let parallel_tool_calls = model + .supported_parameters + .iter() + .any(|parameter| parameter == "parallel_tool_calls"); + let prompt_cache_key = model + .supported_parameters + .iter() + .any(|parameter| parameter == "prompt_cache_key" || parameter == "cache_control"); + models.push(AvailableModel { + name: model.id.clone(), + display_name: model.name.or(Some(model.id)), + max_tokens: model.context_window.or(model.max_tokens).unwrap_or(128_000), + max_output_tokens: model.max_tokens, + max_completion_tokens: None, + capabilities: ModelCapabilities { + tools: supports_tools, + images: supports_images, + parallel_tool_calls, + prompt_cache_key, + chat_completions: true, + }, + }); + } + + Ok(models) +} + +struct ConfigurationView { + api_key_editor: Entity, + state: Entity, + load_credentials_task: Option>, +} + +impl ConfigurationView { + fn new(state: Entity, window: &mut Window, cx: &mut Context) -> Self { + let api_key_editor = + cx.new(|cx| InputField::new(window, cx, "vck_000000000000000000000000000")); + + cx.observe(&state, |_, _, cx| cx.notify()).detach(); + + let load_credentials_task = Some(cx.spawn_in(window, { + let state = state.clone(); + async move |this, cx| { + if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { + let _ = task.await; + } + this.update(cx, |this, cx| { + this.load_credentials_task = None; + cx.notify(); + }) + .log_err(); + } + })); + + Self { + api_key_editor, + state, + load_credentials_task, + } + } + + fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { + let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string(); + if api_key.is_empty() { + return; + } + + self.api_key_editor + .update(cx, |editor, cx| editor.set_text("", window, cx)); + + let state = self.state.clone(); + cx.spawn_in(window, async move |_, cx| { + state + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) + .await + }) + .detach_and_log_err(cx); + } + + fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context) { + self.api_key_editor + .update(cx, |editor, cx| editor.set_text("", window, cx)); + + let state = self.state.clone(); + cx.spawn_in(window, async move |_, cx| { + state + .update(cx, |state, cx| state.set_api_key(None, cx)) + .await + }) + .detach_and_log_err(cx); + } + + fn should_render_editor(&self, cx: &Context) -> bool { + !self.state.read(cx).is_authenticated() + } +} + +impl Render for ConfigurationView { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + let env_var_set = self.state.read(cx).api_key_state.is_from_env_var(); + let configured_card_label = if env_var_set { + format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable") + } else { + let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); + if api_url == API_URL { + "API key configured".to_string() + } else { + format!("API key configured for {}", api_url) + } + }; + + if self.load_credentials_task.is_some() { + div().child(Label::new("Loading credentials...")).into_any() + } else if self.should_render_editor(cx) { + v_flex() + .size_full() + .on_action(cx.listener(Self::save_api_key)) + .child(Label::new( + "To use Zed's agent with Vercel AI Gateway, you need to add an API key. Follow these steps:", + )) + .child( + List::new() + .child( + ListBulletItem::new("") + .child(Label::new("Create an API key in")) + .child(ButtonLink::new( + "Vercel AI Gateway's console", + "https://vercel.com/d?to=%2F%5Bteam%5D%2F%7E%2Fai%2Fapi-keys&title=Go+to+AI+Gateway", + )), + ) + .child(ListBulletItem::new( + "Paste your API key below and hit enter to start using the assistant", + )), + ) + .child(self.api_key_editor.clone()) + .child( + Label::new(format!( + "You can also set the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed.", + )) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .into_any_element() + } else { + ConfiguredApiCard::new(configured_card_label) + .disabled(env_var_set) + .when(env_var_set, |this| { + this.tooltip_label(format!("To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable.")) + }) + .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))) + .into_any_element() + } + } +} diff --git a/crates/language_models/src/provider/x_ai.rs b/crates/language_models/src/provider/x_ai.rs index 06564224dea9621d594e5cf3f4a84093f1620446..f1f8bb658f04a91341951d1602af04f858af7bd3 100644 --- a/crates/language_models/src/provider/x_ai.rs +++ b/crates/language_models/src/provider/x_ai.rs @@ -257,6 +257,10 @@ impl LanguageModel for XAiLanguageModel { self.model.supports_images() } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { match choice { LanguageModelToolChoice::Auto @@ -265,8 +269,7 @@ impl LanguageModel for XAiLanguageModel { } } fn tool_input_format(&self) -> LanguageModelToolSchemaFormat { - let model_id = self.model.id().trim().to_lowercase(); - if model_id.eq(x_ai::Model::Grok4.id()) || model_id.eq(x_ai::Model::GrokCodeFast1.id()) { + if self.model.requires_json_schema_subset() { LanguageModelToolSchemaFormat::JsonSchemaSubset } else { LanguageModelToolSchemaFormat::JsonSchema diff --git a/crates/language_models/src/settings.rs b/crates/language_models/src/settings.rs index 512ea05b0c6cfb7d91b39beb8aafb0de7916a78e..7466a337f636abcd8ad70343dfd64a825a7fb6a7 100644 --- a/crates/language_models/src/settings.rs +++ b/crates/language_models/src/settings.rs @@ -8,7 +8,7 @@ use crate::provider::{ deepseek::DeepSeekSettings, google::GoogleSettings, lmstudio::LmStudioSettings, mistral::MistralSettings, ollama::OllamaSettings, open_ai::OpenAiSettings, open_ai_compatible::OpenAiCompatibleSettings, open_router::OpenRouterSettings, - vercel::VercelSettings, x_ai::XAiSettings, + vercel::VercelSettings, vercel_ai_gateway::VercelAiGatewaySettings, x_ai::XAiSettings, }; #[derive(Debug, RegisterSetting)] @@ -24,6 +24,7 @@ pub struct AllLanguageModelSettings { pub openai: OpenAiSettings, pub openai_compatible: HashMap, OpenAiCompatibleSettings>, pub vercel: VercelSettings, + pub vercel_ai_gateway: VercelAiGatewaySettings, pub x_ai: XAiSettings, pub zed_dot_dev: ZedDotDevSettings, } @@ -44,6 +45,7 @@ impl settings::Settings for AllLanguageModelSettings { let openai = language_models.openai.unwrap(); let openai_compatible = language_models.openai_compatible.unwrap(); let vercel = language_models.vercel.unwrap(); + let vercel_ai_gateway = language_models.vercel_ai_gateway.unwrap(); let x_ai = language_models.x_ai.unwrap(); let zed_dot_dev = language_models.zed_dot_dev.unwrap(); Self { @@ -107,6 +109,10 @@ impl settings::Settings for AllLanguageModelSettings { api_url: vercel.api_url.unwrap(), available_models: vercel.available_models.unwrap_or_default(), }, + vercel_ai_gateway: VercelAiGatewaySettings { + api_url: vercel_ai_gateway.api_url.unwrap(), + available_models: vercel_ai_gateway.available_models.unwrap_or_default(), + }, x_ai: XAiSettings { api_url: x_ai.api_url.unwrap(), available_models: x_ai.available_models.unwrap_or_default(), diff --git a/crates/language_selector/Cargo.toml b/crates/language_selector/Cargo.toml index 115509f512ae40f8f5ec9f8f588814cc4a3fa6af..1c236bba260e5825156005663b121dcd9ebc7b26 100644 --- a/crates/language_selector/Cargo.toml +++ b/crates/language_selector/Cargo.toml @@ -29,3 +29,4 @@ workspace.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } +serde_json.workspace = true diff --git a/crates/language_selector/src/language_selector.rs b/crates/language_selector/src/language_selector.rs index 4c8de90c14c556270386acad34b47961326b3f36..17a39d4979a1321a4b0e612bff228f186098babf 100644 --- a/crates/language_selector/src/language_selector.rs +++ b/crates/language_selector/src/language_selector.rs @@ -71,11 +71,16 @@ impl LanguageSelector { window: &mut Window, cx: &mut Context, ) -> Self { + let current_language_name = buffer + .read(cx) + .language() + .map(|language| language.name().as_ref().to_string()); let delegate = LanguageSelectorDelegate::new( cx.entity().downgrade(), buffer, project, language_registry, + current_language_name, ); let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx)); @@ -109,6 +114,7 @@ pub struct LanguageSelectorDelegate { candidates: Vec, matches: Vec, selected_index: usize, + current_language_candidate_index: Option, } impl LanguageSelectorDelegate { @@ -117,6 +123,7 @@ impl LanguageSelectorDelegate { buffer: Entity, project: Entity, language_registry: Arc, + current_language_name: Option, ) -> Self { let candidates = language_registry .language_names() @@ -132,6 +139,12 @@ impl LanguageSelectorDelegate { .map(|(candidate_id, name)| StringMatchCandidate::new(candidate_id, name.as_ref())) .collect::>(); + let current_language_candidate_index = current_language_name.as_ref().and_then(|name| { + candidates + .iter() + .position(|candidate| candidate.string == *name) + }); + Self { language_selector, buffer, @@ -139,7 +152,8 @@ impl LanguageSelectorDelegate { language_registry, candidates, matches: vec![], - selected_index: 0, + selected_index: current_language_candidate_index.unwrap_or(0), + current_language_candidate_index, } } @@ -239,8 +253,9 @@ impl PickerDelegate for LanguageSelectorDelegate { ) -> gpui::Task<()> { let background = cx.background_executor().clone(); let candidates = self.candidates.clone(); + let query_is_empty = query.is_empty(); cx.spawn_in(window, async move |this, cx| { - let matches = if query.is_empty() { + let matches = if query_is_empty { candidates .into_iter() .enumerate() @@ -264,12 +279,21 @@ impl PickerDelegate for LanguageSelectorDelegate { .await }; - this.update(cx, |this, cx| { + this.update_in(cx, |this, window, cx| { let delegate = &mut this.delegate; delegate.matches = matches; delegate.selected_index = delegate .selected_index .min(delegate.matches.len().saturating_sub(1)); + + if query_is_empty { + if let Some(index) = delegate + .current_language_candidate_index + .and_then(|ci| delegate.matches.iter().position(|m| m.candidate_id == ci)) + { + this.set_selected_index(index, None, false, window, cx); + } + } cx.notify(); }) .log_err(); @@ -295,3 +319,255 @@ impl PickerDelegate for LanguageSelectorDelegate { ) } } + +#[cfg(test)] +mod tests { + use super::*; + use editor::Editor; + use gpui::{TestAppContext, VisualTestContext}; + use language::{Language, LanguageConfig}; + use project::{Project, ProjectPath}; + use serde_json::json; + use std::sync::Arc; + use util::{path, rel_path::rel_path}; + use workspace::{AppState, MultiWorkspace, Workspace}; + + fn init_test(cx: &mut TestAppContext) -> Arc { + cx.update(|cx| { + let app_state = AppState::test(cx); + settings::init(cx); + super::init(cx); + editor::init(cx); + app_state + }) + } + + fn register_test_languages(project: &Entity, cx: &mut VisualTestContext) { + project.read_with(cx, |project, _| { + let language_registry = project.languages(); + language_registry.add(Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + None, + ))); + language_registry.add(Arc::new(Language::new( + LanguageConfig { + name: "TypeScript".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["ts".to_string()], + ..Default::default() + }, + ..Default::default() + }, + None, + ))); + }); + } + + async fn open_file_editor( + workspace: &Entity, + project: &Entity, + file_path: &str, + cx: &mut VisualTestContext, + ) -> Entity { + let worktree_id = project.update(cx, |project, cx| { + project + .worktrees(cx) + .next() + .expect("project should have a worktree") + .read(cx) + .id() + }); + let project_path = ProjectPath { + worktree_id, + path: rel_path(file_path).into(), + }; + let opened_item = workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_path(project_path, None, true, window, cx) + }) + .await + .expect("file should open"); + + cx.update(|_, cx| { + opened_item + .act_as::(cx) + .expect("opened item should be an editor") + }) + } + + async fn open_empty_editor( + workspace: &Entity, + project: &Entity, + cx: &mut VisualTestContext, + ) -> Entity { + let create_buffer = project.update(cx, |project, cx| project.create_buffer(None, true, cx)); + let buffer = create_buffer.await.expect("empty buffer should be created"); + let editor = cx.new_window_entity(|window, cx| { + Editor::for_buffer(buffer.clone(), Some(project.clone()), window, cx) + }); + workspace.update_in(cx, |workspace, window, cx| { + workspace.add_item_to_center(Box::new(editor.clone()), window, cx); + }); + // Ensure the buffer has no language after the editor is created + buffer.update(cx, |buffer, cx| { + buffer.set_language(None, cx); + }); + editor + } + + async fn set_editor_language( + project: &Entity, + editor: &Entity, + language_name: &str, + cx: &mut VisualTestContext, + ) { + let language = project + .read_with(cx, |project, _| { + project.languages().language_for_name(language_name) + }) + .await + .expect("language should exist in registry"); + editor.update(cx, move |editor, cx| { + let (_, buffer, _) = editor + .active_excerpt(cx) + .expect("editor should have an active excerpt"); + buffer.update(cx, |buffer, cx| { + buffer.set_language(Some(language), cx); + }); + }); + } + + fn active_picker( + workspace: &Entity, + cx: &mut VisualTestContext, + ) -> Entity> { + workspace.update(cx, |workspace, cx| { + workspace + .active_modal::(cx) + .expect("language selector should be open") + .read(cx) + .picker + .clone() + }) + } + + fn open_selector( + workspace: &Entity, + cx: &mut VisualTestContext, + ) -> Entity> { + cx.dispatch_action(Toggle); + cx.run_until_parked(); + active_picker(workspace, cx) + } + + fn close_selector(workspace: &Entity, cx: &mut VisualTestContext) { + cx.dispatch_action(Toggle); + cx.run_until_parked(); + workspace.read_with(cx, |workspace, cx| { + assert!( + workspace.active_modal::(cx).is_none(), + "language selector should be closed" + ); + }); + } + + fn assert_selected_language_for_editor( + workspace: &Entity, + editor: &Entity, + expected_language_name: Option<&str>, + cx: &mut VisualTestContext, + ) { + workspace.update_in(cx, |workspace, window, cx| { + let was_activated = workspace.activate_item(editor, true, true, window, cx); + assert!( + was_activated, + "editor should be activated before opening the modal" + ); + }); + cx.run_until_parked(); + + let picker = open_selector(workspace, cx); + picker.read_with(cx, |picker, _| { + let selected_match = picker + .delegate + .matches + .get(picker.delegate.selected_index) + .expect("selected index should point to a match"); + let selected_candidate = picker + .delegate + .candidates + .get(selected_match.candidate_id) + .expect("selected match should map to a candidate"); + + if let Some(expected_language_name) = expected_language_name { + let current_language_candidate_index = picker + .delegate + .current_language_candidate_index + .expect("current language should map to a candidate"); + assert_eq!( + selected_match.candidate_id, + current_language_candidate_index + ); + assert_eq!(selected_candidate.string, expected_language_name); + } else { + assert!(picker.delegate.current_language_candidate_index.is_none()); + assert_eq!(picker.delegate.selected_index, 0); + } + }); + close_selector(workspace, cx); + } + + #[gpui::test] + async fn test_language_selector_selects_current_language_per_active_editor( + cx: &mut TestAppContext, + ) { + let app_state = init_test(cx); + app_state + .fs + .as_fake() + .insert_tree( + path!("/test"), + json!({ + "rust_file.rs": "fn main() {}\n", + "typescript_file.ts": "const value = 1;\n", + }), + ) + .await; + + let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = + multi_workspace.read_with(cx, |multi_workspace, _| multi_workspace.workspace().clone()); + register_test_languages(&project, cx); + + let rust_editor = open_file_editor(&workspace, &project, "rust_file.rs", cx).await; + let typescript_editor = + open_file_editor(&workspace, &project, "typescript_file.ts", cx).await; + let empty_editor = open_empty_editor(&workspace, &project, cx).await; + + set_editor_language(&project, &rust_editor, "Rust", cx).await; + set_editor_language(&project, &typescript_editor, "TypeScript", cx).await; + cx.run_until_parked(); + + assert_selected_language_for_editor(&workspace, &rust_editor, Some("Rust"), cx); + assert_selected_language_for_editor(&workspace, &typescript_editor, Some("TypeScript"), cx); + // Ensure the empty editor's buffer has no language before asserting + let (_, buffer, _) = empty_editor.read_with(cx, |editor, cx| { + editor + .active_excerpt(cx) + .expect("editor should have an active excerpt") + }); + buffer.update(cx, |buffer, cx| { + buffer.set_language(None, cx); + }); + assert_selected_language_for_editor(&workspace, &empty_editor, None, cx); + } +} diff --git a/crates/language_tools/src/highlights_tree_view.rs b/crates/language_tools/src/highlights_tree_view.rs index e3e6dccf9b7eedfa747e36010b9f9353b40d0275..fb92e21ab33eb3b6a3cd498a6ffbdd764947ea9e 100644 --- a/crates/language_tools/src/highlights_tree_view.rs +++ b/crates/language_tools/src/highlights_tree_view.rs @@ -8,6 +8,7 @@ use gpui::{ MouseDownEvent, MouseMoveEvent, ParentElement, Render, ScrollStrategy, SharedString, Styled, Task, UniformListScrollHandle, WeakEntity, Window, actions, div, rems, uniform_list, }; +use language::ToOffset; use menu::{SelectNext, SelectPrevious}; use std::{mem, ops::Range}; use theme::ActiveTheme; @@ -37,6 +38,8 @@ actions!( ToggleTextHighlights, /// Toggles showing semantic token highlights. ToggleSemanticTokens, + /// Toggles showing syntax token highlights. + ToggleSyntaxTokens, ] ); @@ -61,9 +64,14 @@ pub fn init(cx: &mut App) { #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] pub enum HighlightCategory { Text(HighlightKey), + SyntaxToken { + capture_name: SharedString, + theme_key: Option, + }, SemanticToken { token_type: Option, token_modifiers: Option, + theme_key: Option, }, } @@ -71,22 +79,34 @@ impl HighlightCategory { fn label(&self) -> SharedString { match self { HighlightCategory::Text(key) => format!("text: {key:?}").into(), + HighlightCategory::SyntaxToken { + capture_name, + theme_key: Some(theme_key), + } => format!("syntax: {capture_name} \u{2192} {theme_key}").into(), + HighlightCategory::SyntaxToken { + capture_name, + theme_key: None, + } => format!("syntax: {capture_name}").into(), HighlightCategory::SemanticToken { - token_type: Some(token_type), - token_modifiers: Some(modifiers), - } => format!("semantic token: {token_type} [{modifiers}]").into(), - HighlightCategory::SemanticToken { - token_type: Some(token_type), - token_modifiers: None, - } => format!("semantic token: {token_type}").into(), - HighlightCategory::SemanticToken { - token_type: None, - token_modifiers: Some(modifiers), - } => format!("semantic token [{modifiers}]").into(), - HighlightCategory::SemanticToken { - token_type: None, - token_modifiers: None, - } => "semantic token".into(), + token_type, + token_modifiers, + theme_key, + } => { + let label = match (token_type, token_modifiers) { + (Some(token_type), Some(modifiers)) => { + format!("semantic token: {token_type} [{modifiers}]") + } + (Some(token_type), None) => format!("semantic token: {token_type}"), + (None, Some(modifiers)) => format!("semantic token [{modifiers}]"), + (None, None) => "semantic token".to_string(), + }; + + if let Some(theme_key) = theme_key { + format!("{label} \u{2192} {theme_key}").into() + } else { + label.into() + } + } } } } @@ -124,6 +144,7 @@ pub struct HighlightsTreeView { display_items: Vec, is_singleton: bool, show_text_highlights: bool, + show_syntax_tokens: bool, show_semantic_tokens: bool, skip_next_scroll: bool, } @@ -157,6 +178,7 @@ impl HighlightsTreeView { display_items: Vec::new(), is_singleton: true, show_text_highlights: true, + show_syntax_tokens: true, show_semantic_tokens: true, skip_next_scroll: false, }; @@ -280,6 +302,7 @@ impl HighlightsTreeView { let mut entries = Vec::new(); + let semantic_theme = cx.theme().syntax().clone(); display_map.update(cx, |display_map, cx| { for (key, text_highlights) in display_map.all_text_highlights() { for range in &text_highlights.1 { @@ -323,6 +346,32 @@ impl HighlightsTreeView { ) else { continue; }; + + let theme_key = + stylizer + .rules_for_token(token.token_type) + .and_then(|rules| { + rules + .iter() + .filter(|rule| { + rule.token_modifiers.iter().all(|modifier| { + stylizer + .has_modifier(token.token_modifiers, modifier) + }) + }) + .fold(None, |theme_key, rule| { + rule.style + .iter() + .find(|style_name| { + semantic_theme.get_opt(style_name).is_some() + }) + .map(|style_name| { + SharedString::from(style_name.clone()) + }) + .or(theme_key) + }) + }); + entries.push(HighlightEntry { excerpt_id, range, @@ -333,6 +382,7 @@ impl HighlightsTreeView { token_modifiers: stylizer .token_modifiers(token.token_modifiers) .map(SharedString::from), + theme_key, }, sort_key, }); @@ -341,6 +391,64 @@ impl HighlightsTreeView { }); }); + let syntax_theme = cx.theme().syntax().clone(); + for (excerpt_id, buffer_snapshot, excerpt_range) in multi_buffer_snapshot.excerpts() { + let start_offset = excerpt_range.context.start.to_offset(buffer_snapshot); + let end_offset = excerpt_range.context.end.to_offset(buffer_snapshot); + let range = start_offset..end_offset; + + let captures = buffer_snapshot.captures(range, |grammar| { + grammar.highlights_config.as_ref().map(|c| &c.query) + }); + let grammars: Vec<_> = captures.grammars().to_vec(); + let highlight_maps: Vec<_> = grammars.iter().map(|g| g.highlight_map()).collect(); + + for capture in captures { + let highlight_id = highlight_maps[capture.grammar_index].get(capture.index); + let Some(style) = highlight_id.style(&syntax_theme) else { + continue; + }; + + let theme_key = highlight_id + .name(&syntax_theme) + .map(|theme_key| SharedString::from(theme_key.to_string())); + + let capture_name = grammars[capture.grammar_index] + .highlights_config + .as_ref() + .and_then(|config| config.query.capture_names().get(capture.index as usize)) + .map(|capture_name| SharedString::from((*capture_name).to_string())) + .unwrap_or_else(|| SharedString::from("unknown")); + + let start_anchor = buffer_snapshot.anchor_before(capture.node.start_byte()); + let end_anchor = buffer_snapshot.anchor_after(capture.node.end_byte()); + + let start = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, start_anchor); + let end = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, end_anchor); + + let (start, end) = match (start, end) { + (Some(s), Some(e)) => (s, e), + _ => continue, + }; + + let range = start..end; + let (range_display, sort_key) = + format_anchor_range(&range, excerpt_id, &multi_buffer_snapshot, is_singleton); + + entries.push(HighlightEntry { + excerpt_id, + range, + range_display, + style, + category: HighlightCategory::SyntaxToken { + capture_name, + theme_key, + }, + sort_key, + }); + } + } + entries.sort_by(|a, b| { a.sort_key .cmp(&b.sort_key) @@ -387,6 +495,7 @@ impl HighlightsTreeView { fn should_show_entry(&self, entry: &HighlightEntry) -> bool { match entry.category { HighlightCategory::Text(_) => self.show_text_highlights, + HighlightCategory::SyntaxToken { .. } => self.show_syntax_tokens, HighlightCategory::SemanticToken { .. } => self.show_semantic_tokens, } } @@ -695,14 +804,14 @@ impl Render for HighlightsTreeView { this.child(Label::new("All highlights are filtered out")) .child( Label::new( - "Enable text or semantic highlights in the toolbar", + "Enable text, syntax, or semantic highlights in the toolbar", ) .size(LabelSize::Small), ) } else { this.child(Label::new("No highlights found")).child( Label::new( - "The editor has no text or semantic token highlights", + "The editor has no text, syntax, or semantic token highlights", ) .size(LabelSize::Small), ) @@ -762,6 +871,7 @@ impl Item for HighlightsTreeView { Task::ready(Some(cx.new(|cx| { let mut clone = Self::new(self.workspace_handle.clone(), None, window, cx); clone.show_text_highlights = self.show_text_highlights; + clone.show_syntax_tokens = self.show_syntax_tokens; clone.show_semantic_tokens = self.show_semantic_tokens; clone.skip_next_scroll = false; if let Some(editor) = &self.editor { @@ -810,14 +920,18 @@ impl HighlightsTreeToolbarItemView { } fn render_settings_button(&self, cx: &Context) -> PopoverMenu { - let (show_text, show_semantic) = self + let (show_text, show_syntax, show_semantic) = self .tree_view .as_ref() .map(|view| { let v = view.read(cx); - (v.show_text_highlights, v.show_semantic_tokens) + ( + v.show_text_highlights, + v.show_syntax_tokens, + v.show_semantic_tokens, + ) }) - .unwrap_or((true, true)); + .unwrap_or((true, true, true)); let tree_view = self.tree_view.as_ref().map(|v| v.downgrade()); @@ -833,6 +947,7 @@ impl HighlightsTreeToolbarItemView { .with_handle(self.toggle_settings_handle.clone()) .menu(move |window, cx| { let tree_view_for_text = tree_view.clone(); + let tree_view_for_syntax = tree_view.clone(); let tree_view_for_semantic = tree_view.clone(); let menu = ContextMenu::build(window, cx, move |menu, _, _| { @@ -860,6 +975,30 @@ impl HighlightsTreeToolbarItemView { } }, ) + .toggleable_entry( + "Syntax Tokens", + show_syntax, + IconPosition::Start, + Some(ToggleSyntaxTokens.boxed_clone()), + { + let tree_view = tree_view_for_syntax.clone(); + move |_, cx| { + if let Some(view) = tree_view.as_ref() { + view.update(cx, |view, cx| { + view.show_syntax_tokens = !view.show_syntax_tokens; + let snapshot = view.editor.as_ref().map(|s| { + s.editor.read(cx).buffer().read(cx).snapshot(cx) + }); + if let Some(snapshot) = snapshot { + view.rebuild_display_items(&snapshot, cx); + } + cx.notify(); + }) + .ok(); + } + } + }, + ) .toggleable_entry( "Semantic Tokens", show_semantic, diff --git a/crates/language_tools/src/lsp_button.rs b/crates/language_tools/src/lsp_button.rs index 54aae61a696672b5767e05f3cc85aba57d4d3e41..781c18eb84cbc9ad7a1b666c089a7b65460c327b 100644 --- a/crates/language_tools/src/lsp_button.rs +++ b/crates/language_tools/src/lsp_button.rs @@ -333,13 +333,7 @@ impl LanguageServerState { }) .unwrap_or((None, None, None)); - let truncated_message = message.as_ref().and_then(|message| { - message - .lines() - .filter(|line| !line.trim().is_empty()) - .map(SharedString::new) - .next() - }); + let server_message = message.clone(); let submenu_server_name = server_info.name.clone(); let submenu_server_info = server_info.clone(); @@ -549,9 +543,9 @@ impl LanguageServerState { submenu = submenu.separator().custom_row({ let binary_path = binary_path.clone(); let server_version = server_version.clone(); - let truncated_message = truncated_message.clone(); + let server_message = server_message.clone(); let process_memory_cache = process_memory_cache.clone(); - move |_, _| { + move |_, cx| { let memory_usage = process_id.map(|pid| { process_memory_cache.borrow_mut().get_memory_usage(pid) }); @@ -567,63 +561,63 @@ impl LanguageServerState { } }); - let metadata_label = - match (&server_version, &memory_label, &truncated_message) { - (None, None, None) => None, - (Some(version), None, None) => { - Some(format!("v{}", version.as_ref())) - } - (None, Some(memory), None) => Some(memory.clone()), - (Some(version), Some(memory), None) => { - Some(format!("v{} • {}", version.as_ref(), memory)) - } - (None, None, Some(message)) => Some(message.to_string()), - (Some(version), None, Some(message)) => Some(format!( - "v{}\n\n{}", - version.as_ref(), - message.as_ref() - )), - (None, Some(memory), Some(message)) => { - Some(format!("{}\n\n{}", memory, message.as_ref())) - } - (Some(version), Some(memory), Some(message)) => { - Some(format!( - "v{} • {}\n\n{}", - version.as_ref(), - memory, - message.as_ref() - )) - } - }; + let version_label = + server_version.as_ref().map(|v| format!("v{}", v.as_ref())); + + let separator_color = + cx.theme().colors().icon_disabled.opacity(0.8); - h_flex() + v_flex() .id("metadata-container") - .ml_neg_1() .gap_1() - .max_w(rems(164.)) - .child( - Icon::new(IconName::Circle) - .color(status_color) - .size(IconSize::Small), - ) + .when_some(server_message.as_ref(), |this, _| { + this.w(rems_from_px(240.)) + }) .child( - Label::new(status_label) - .size(LabelSize::Small) - .color(Color::Muted), - ) - .when_some(metadata_label.as_ref(), |submenu, metadata| { - submenu + h_flex() + .ml_neg_1() + .gap_1() .child( - Icon::new(IconName::Dash) - .color(Color::Disabled) - .size(IconSize::XSmall), + Icon::new(IconName::Circle) + .color(status_color) + .size(IconSize::Small), ) .child( - Label::new(metadata) + Label::new(status_label) .size(LabelSize::Small) - .color(Color::Muted) - .truncate(), + .color(Color::Muted), ) + .when_some(version_label.as_ref(), |row, version| { + row.child( + Icon::new(IconName::Dash) + .color(Color::Custom(separator_color)) + .size(IconSize::XSmall), + ) + .child( + Label::new(version) + .size(LabelSize::Small) + .color(Color::Muted), + ) + }) + .when_some(memory_label.as_ref(), |row, memory| { + row.child( + Icon::new(IconName::Dash) + .color(Color::Custom(separator_color)) + .size(IconSize::XSmall), + ) + .child( + Label::new(memory) + .size(LabelSize::Small) + .color(Color::Muted), + ) + }), + ) + .when_some(server_message.clone(), |container, message| { + container.child( + Label::new(message) + .color(Color::Muted) + .size(LabelSize::Small), + ) }) .when_some(binary_path.clone(), |el, path| { el.tooltip(Tooltip::text(path)) diff --git a/crates/languages/src/bash/brackets.scm b/crates/languages/src/bash/brackets.scm index 88a2a1b67f602afb4e7de21a0ec0a523d33e37ee..aba1fa2b35735d4380761ea6e1360305556072b3 100644 --- a/crates/languages/src/bash/brackets.scm +++ b/crates/languages/src/bash/brackets.scm @@ -1,12 +1,62 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) -(("do" @open "done" @close) (#set! newline.only) (#set! rainbow.exclude)) -((case_statement ("in" @open "esac" @close)) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement (elif_clause ("then" @open)) (else_clause ("else" @close))) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement (else_clause ("else" @open)) "fi" @close) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement ("then" @open) (elif_clause ("elif" @close))) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement ("then" @open) (else_clause ("else" @close))) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement ("then" @open "fi" @close)) (#set! newline.only) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) + +(("do" @open + "done" @close) + (#set! newline.only) + (#set! rainbow.exclude)) + +((case_statement + ("in" @open + "esac" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + (elif_clause + "then" @open) + (else_clause + "else" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + (else_clause + "else" @open) + "fi" @close) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + "then" @open + (elif_clause + "elif" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + "then" @open + (else_clause + "else" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + ("then" @open + "fi" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/bash/highlights.scm b/crates/languages/src/bash/highlights.scm index 4a8d7eaf345b147270302b5ba8f20c975494766e..bc1c3b7ec1159f6d19cdf20ab36e0a02db076c66 100644 --- a/crates/languages/src/bash/highlights.scm +++ b/crates/languages/src/bash/highlights.scm @@ -43,13 +43,17 @@ (comment) @keyword.directive) (#match? @keyword.directive "^#![ \t]*/")) -(function_definition name: (word) @function) -(command_name (word) @function) +(function_definition + name: (word) @function) + +(command_name + (word) @function) (command argument: [ (word) @variable.parameter - (_ (word) @variable.parameter) + (_ + (word) @variable.parameter) ]) [ @@ -65,7 +69,6 @@ (expansion) ] @embedded - [ "$" "&&" @@ -89,9 +92,7 @@ (test_operator) @keyword.operator -[ - ";" -] @punctuation.delimiter +";" @punctuation.delimiter [ "(" @@ -104,6 +105,7 @@ (simple_expansion "$" @punctuation.special) + (expansion "${" @punctuation.special "}" @punctuation.special) @embedded @@ -112,10 +114,11 @@ "$(" @punctuation.special ")" @punctuation.special) -( - (command (_) @constant) - (#match? @constant "^-") -) +((command + (_) @constant) + (#match? @constant "^-")) + +(case_item + value: (_) @string.regex) -(case_item value: (_) @string.regex) (special_variable_name) @variable.special diff --git a/crates/languages/src/bash/indents.scm b/crates/languages/src/bash/indents.scm index 468fc595e56e2616547dc3e752318cd89df4a363..25a0dc20fd7fff62cd355d20917260e8e781e90e 100644 --- a/crates/languages/src/bash/indents.scm +++ b/crates/languages/src/bash/indents.scm @@ -1,12 +1,27 @@ -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent (function_definition) @start.function + (if_statement) @start.if + (elif_clause) @start.elif + (else_clause) @start.else + (for_statement) @start.for + (while_statement) @start.while + (case_statement) @start.case + (case_item) @start.case_item diff --git a/crates/languages/src/bash/injections.scm b/crates/languages/src/bash/injections.scm index 9117c713b98fdd2896b13e4949a77c6489b9ee36..2f0e58eb6431515b86b6042e5828263341513e99 100644 --- a/crates/languages/src/bash/injections.scm +++ b/crates/languages/src/bash/injections.scm @@ -1,3 +1,2 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) diff --git a/crates/languages/src/bash/overrides.scm b/crates/languages/src/bash/overrides.scm index 81fec9a5f57b28fc67b4781ec37df43559e21dc9..544e9876f8ea8f1d676ee21731fdcb30fc7163ec 100644 --- a/crates/languages/src/bash/overrides.scm +++ b/crates/languages/src/bash/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string) @string diff --git a/crates/languages/src/bash/redactions.scm b/crates/languages/src/bash/redactions.scm index 000cb042a573112a7d3c46f56862ba4119fdfdf3..5c2c83aa666e31ae23b7e54f966638f41f98244e 100644 --- a/crates/languages/src/bash/redactions.scm +++ b/crates/languages/src/bash/redactions.scm @@ -1,2 +1,2 @@ (variable_assignment - value: (_) @redact) + value: (_) @redact) diff --git a/crates/languages/src/bash/runnables.scm b/crates/languages/src/bash/runnables.scm index c88e549347b4d4897c43d22d24550f3904d8c5d1..3856495422dcd84b9c3619d34778e2183aae8498 100644 --- a/crates/languages/src/bash/runnables.scm +++ b/crates/languages/src/bash/runnables.scm @@ -1,5 +1,5 @@ ; Run bash scripts -( - (program . (_) @run) @_bash-script - (#set! tag bash-script) -) +((program + . + (_) @run) @_bash-script + (#set! tag bash-script)) diff --git a/crates/languages/src/bash/textobjects.scm b/crates/languages/src/bash/textobjects.scm index cca2f7d9e9e4a876984a602ee308ad7270b684dc..9a5e4853ee711abbc7407185a6da19b0c9cc3fef 100644 --- a/crates/languages/src/bash/textobjects.scm +++ b/crates/languages/src/bash/textobjects.scm @@ -2,6 +2,6 @@ body: (_ "{" (_)* @function.inside - "}" )) @function.around + "}")) @function.around (comment) @comment.around diff --git a/crates/languages/src/c/brackets.scm b/crates/languages/src/c/brackets.scm index 2149bddc6c9a7ec04667d03da75580b676e12a28..313d212a5eb28d006775781576d50e359be675a2 100644 --- a/crates/languages/src/c/brackets.scm +++ b/crates/languages/src/c/brackets.scm @@ -1,5 +1,16 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/c/highlights.scm b/crates/languages/src/c/highlights.scm index e426bd4f9048a96c09aef297f95c420c9ec21458..dc5a3bd99937eb3cd1a3af6efb7124aebc4008f1 100644 --- a/crates/languages/src/c/highlights.scm +++ b/crates/languages/src/c/highlights.scm @@ -116,19 +116,23 @@ (identifier) @variable ((identifier) @constant - (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) + (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) (call_expression function: (identifier) @function) + (call_expression function: (field_expression field: (field_identifier) @function)) + (function_declarator declarator: (identifier) @function) + (preproc_function_def name: (identifier) @function.special) (field_identifier) @property + (statement_identifier) @label [ @@ -139,6 +143,7 @@ ; GNU __attribute__ (attribute_specifier) @attribute + (attribute_specifier (argument_list (identifier) @attribute)) @@ -146,5 +151,6 @@ ; C23 [[attributes]] (attribute prefix: (identifier) @attribute) + (attribute name: (identifier) @attribute) diff --git a/crates/languages/src/c/imports.scm b/crates/languages/src/c/imports.scm index c3c2c9e68c4503d323d039f9c042d9501b5e4126..2aaab2106f5422db426876a7fa65c9674fe93174 100644 --- a/crates/languages/src/c/imports.scm +++ b/crates/languages/src/c/imports.scm @@ -1,7 +1,7 @@ (preproc_include - path: [ - ( - (system_lib_string) @source @wildcard - (#strip! @source "[<>]")) - (string_literal (string_content) @source @wildcard) - ]) @import + path: [ + ((system_lib_string) @source @wildcard + (#strip! @source "[<>]")) + (string_literal + (string_content) @source @wildcard) + ]) @import diff --git a/crates/languages/src/c/indents.scm b/crates/languages/src/c/indents.scm index b6d3c3c3bf7d1a05fd90667e42418bf9a389f8fb..0b55631e5ca6cdfca377f5bf4018d751cdf31bf4 100644 --- a/crates/languages/src/c/indents.scm +++ b/crates/languages/src/c/indents.scm @@ -9,15 +9,25 @@ (else_clause) ] @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent ((comment) @indent - (#match? @indent "^/\\*")) + (#match? @indent "^/\\*")) (if_statement) @start.if + (for_statement) @start.for + (while_statement) @start.while + (do_statement) @start.do + (switch_statement) @start.switch + (else_clause) @start.else diff --git a/crates/languages/src/c/injections.scm b/crates/languages/src/c/injections.scm index 9ec3cf1f780123426f681ad758179b81697e59c5..010c697f08adec1d196833b4de492027a80960a4 100644 --- a/crates/languages/src/c/injections.scm +++ b/crates/languages/src/c/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) ((comment) @injection.content (#match? @injection.content "^(///|//!|/\\*\\*|/\\*!)(.*)") @@ -8,9 +7,9 @@ (#set! injection.include-children)) (preproc_def - value: (preproc_arg) @injection.content - (#set! injection.language "c")) + value: (preproc_arg) @injection.content + (#set! injection.language "c")) (preproc_function_def - value: (preproc_arg) @injection.content - (#set! injection.language "c")) + value: (preproc_arg) @injection.content + (#set! injection.language "c")) diff --git a/crates/languages/src/c/outline.scm b/crates/languages/src/c/outline.scm index efd911836cb718d698460adfe2d91d19cb976b30..abc9608343826545b9ebfd5f915d6352943911f6 100644 --- a/crates/languages/src/c/outline.scm +++ b/crates/languages/src/c/outline.scm @@ -1,91 +1,89 @@ (preproc_def - "#define" @context - name: (_) @name) @item + "#define" @context + name: (_) @name) @item (preproc_function_def - "#define" @context - name: (_) @name - parameters: (preproc_params - "(" @context - ")" @context)) @item + "#define" @context + name: (_) @name + parameters: (preproc_params + "(" @context + ")" @context)) @item (struct_specifier - "struct" @context - name: (_) @name) @item + "struct" @context + name: (_) @name) @item (union_specifier - "union" @context - name: (_) @name) @item + "union" @context + name: (_) @name) @item (enum_specifier - "enum" @context - name: (_) @name) @item + "enum" @context + name: (_) @name) @item (enumerator - name: (_) @name) @item + name: (_) @name) @item (field_declaration - type: (_) @context - declarator: (field_identifier) @name) @item + type: (_) @context + declarator: (field_identifier) @name) @item (type_definition - "typedef" @context - declarator: (_) @name) @item + "typedef" @context + declarator: (_) @name) @item (declaration - (type_qualifier)? @context - type: (_)? @context - declarator: [ - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - ] -) @item + (type_qualifier)? @context + type: (_)? @context + declarator: [ + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + ]) @item (function_definition - (type_qualifier)? @context - type: (_)? @context - declarator: [ - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - ] -) @item + (type_qualifier)? @context + type: (_)? @context + declarator: [ + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + ]) @item (comment) @annotation diff --git a/crates/languages/src/c/overrides.scm b/crates/languages/src/c/overrides.scm index 36473eb300fd01370e1947873435a821e2d6417a..7c4cf69697200efa1cedd59b895d5ebd064ce486 100644 --- a/crates/languages/src/c/overrides.scm +++ b/crates/languages/src/c/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string_literal) @string diff --git a/crates/languages/src/c/runnables.scm b/crates/languages/src/c/runnables.scm index 5a203f5d7a6eea3ab831a1b4281a74d3795ca74f..50c5ef5b71b4df5d0735a6a5019e9aee5a19f083 100644 --- a/crates/languages/src/c/runnables.scm +++ b/crates/languages/src/c/runnables.scm @@ -1,10 +1,6 @@ ; Tag the main function -( - (function_definition - declarator: (function_declarator - declarator: (identifier) @run - ) - ) @_c-main +((function_definition + declarator: (function_declarator + declarator: (identifier) @run)) @_c-main (#eq? @run "main") - (#set! tag c-main) -) + (#set! tag c-main)) diff --git a/crates/languages/src/c/textobjects.scm b/crates/languages/src/c/textobjects.scm index e29f508b701c8ee22eec27af47d899d446e67860..fd5ec0b49b7484a8ef2cbb7cb321f7020bdaeff8 100644 --- a/crates/languages/src/c/textobjects.scm +++ b/crates/languages/src/c/textobjects.scm @@ -1,31 +1,34 @@ (declaration - declarator: (function_declarator)) @function.around + declarator: (function_declarator)) @function.around (function_definition - body: (_ - "{" - (_)* @function.inside - "}" )) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (preproc_function_def - value: (_) @function.inside) @function.around + value: (_) @function.inside) @function.around (comment) @comment.around (struct_specifier - body: (_ - "{" - (_)* @class.inside - "}")) @class.around + body: (_ + "{" + (_)* @class.inside + "}")) @class.around (enum_specifier - body: (_ - "{" - [(_) ","?]* @class.inside - "}")) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (union_specifier - body: (_ - "{" - (_)* @class.inside - "}")) @class.around + body: (_ + "{" + (_)* @class.inside + "}")) @class.around diff --git a/crates/languages/src/cpp/brackets.scm b/crates/languages/src/cpp/brackets.scm index 9eaebba332861ef716902b3827d4940b71f37221..e0330c9b1f2ebdd45480c54e9053503a6b6f611b 100644 --- a/crates/languages/src/cpp/brackets.scm +++ b/crates/languages/src/cpp/brackets.scm @@ -1,6 +1,19 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/cpp/config.toml b/crates/languages/src/cpp/config.toml index 10c36a6ded1e1f3a1204d1e15af47fee78b8e049..e2608a8ce5f17cb648e4f86dc27da60ed8bdd2ae 100644 --- a/crates/languages/src/cpp/config.toml +++ b/crates/languages/src/cpp/config.toml @@ -1,6 +1,6 @@ name = "C++" grammar = "cpp" -path_suffixes = ["cc", "hh", "cpp", "h", "hpp", "cxx", "hxx", "c++", "h++", "ipp", "inl", "ino", "ixx", "cu", "cuh", "C", "H"] +path_suffixes = ["cc", "hh", "cpp", "cppm", "h", "hpp", "cxx", "hxx", "c++", "h++", "ipp", "inl", "ino", "ixx", "cu", "cuh", "C", "H"] line_comments = ["// ", "/// ", "//! "] first_line_pattern = '^//.*-\*-\s*C\+\+\s*-\*-' decrease_indent_patterns = [ diff --git a/crates/languages/src/cpp/highlights.scm b/crates/languages/src/cpp/highlights.scm index dbb79e69b04e351ca231b45b21507e305b2cabf5..e074707d05dec638a1be9ea840c31f47537c438a 100644 --- a/crates/languages/src/cpp/highlights.scm +++ b/crates/languages/src/cpp/highlights.scm @@ -1,13 +1,15 @@ (identifier) @variable + (field_identifier) @property + (namespace_identifier) @namespace (concept_definition - name: (identifier) @concept) + name: (identifier) @concept) (requires_clause - constraint: (template_type - name: (type_identifier) @concept)) + constraint: (template_type + name: (type_identifier) @concept)) (module_name (identifier) @module) @@ -83,18 +85,23 @@ (operator_name "<=>" @operator.spaceship) -(destructor_name (identifier) @function) +(destructor_name + (identifier) @function) ((namespace_identifier) @type - (#match? @type "^[A-Z]")) + (#match? @type "^[A-Z]")) (auto) @type + (type_identifier) @type + type: (primitive_type) @type.builtin + (sized_type_specifier) @type.builtin ; GNU __attribute__ (attribute_specifier) @attribute + (attribute_specifier (argument_list (identifier) @attribute)) @@ -102,15 +109,18 @@ type: (primitive_type) @type.builtin ; C++11 [[attributes]] (attribute prefix: (identifier) @attribute) + (attribute name: (identifier) @attribute) ((identifier) @constant.builtin - (#match? @constant.builtin "^_*[A-Z][A-Z\\d_]*$")) + (#match? @constant.builtin "^_*[A-Z][A-Z\\d_]*$")) (statement_identifier) @label + (this) @variable.builtin -("static_assert") @function.builtin + +"static_assert" @function.builtin [ "alignas" @@ -197,7 +207,7 @@ type: (primitive_type) @type.builtin [ (null) - ("nullptr") + "nullptr" ] @constant.builtin (number_literal) @number @@ -285,5 +295,8 @@ type: (primitive_type) @type.builtin (binary_expression operator: "<=>" @operator.spaceship) -(conditional_expression ":" @operator) -(user_defined_literal (literal_suffix) @operator) +(conditional_expression + ":" @operator) + +(user_defined_literal + (literal_suffix) @operator) diff --git a/crates/languages/src/cpp/imports.scm b/crates/languages/src/cpp/imports.scm index a4ef817a80dbcd44336bdd8cd681587662aad435..43adde711b5352ef0d92566d4bdde91a847319b8 100644 --- a/crates/languages/src/cpp/imports.scm +++ b/crates/languages/src/cpp/imports.scm @@ -1,5 +1,6 @@ (preproc_include - path: [ - ((system_lib_string) @source @wildcard) - (string_literal (string_content) @source @wildcard) - ]) @import + path: [ + (system_lib_string) @source @wildcard + (string_literal + (string_content) @source @wildcard) + ]) @import diff --git a/crates/languages/src/cpp/indents.scm b/crates/languages/src/cpp/indents.scm index 985ebda6ffe679f479804d667db011587eacb2f9..0b55631e5ca6cdfca377f5bf4018d751cdf31bf4 100644 --- a/crates/languages/src/cpp/indents.scm +++ b/crates/languages/src/cpp/indents.scm @@ -1,23 +1,33 @@ [ - (field_expression) - (assignment_expression) - (init_declarator) - (if_statement) - (for_statement) - (while_statement) - (do_statement) - (else_clause) + (field_expression) + (assignment_expression) + (init_declarator) + (if_statement) + (for_statement) + (while_statement) + (do_statement) + (else_clause) ] @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent ((comment) @indent - (#match? @indent "^/\\*")) + (#match? @indent "^/\\*")) (if_statement) @start.if + (for_statement) @start.for + (while_statement) @start.while + (do_statement) @start.do + (switch_statement) @start.switch + (else_clause) @start.else diff --git a/crates/languages/src/cpp/injections.scm b/crates/languages/src/cpp/injections.scm index 60c6ea7b63eb6dcb7e1bae02c66045266c0b6cd5..0f622d4edbada60d162e14260dfb1d05423cd503 100644 --- a/crates/languages/src/cpp/injections.scm +++ b/crates/languages/src/cpp/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) ((comment) @injection.content (#match? @injection.content "^(///|//!|/\\*\\*|/\\*!)(.*)") @@ -8,12 +7,12 @@ (#set! injection.include-children)) (preproc_def - value: (preproc_arg) @injection.content - (#set! injection.language "c++")) + value: (preproc_arg) @injection.content + (#set! injection.language "c++")) (preproc_function_def - value: (preproc_arg) @injection.content - (#set! injection.language "c++")) + value: (preproc_arg) @injection.content + (#set! injection.language "c++")) (raw_string_literal delimiter: (raw_string_delimiter) @injection.language diff --git a/crates/languages/src/cpp/outline.scm b/crates/languages/src/cpp/outline.scm index 75be97a916dca9f10b044ee9dc01eca09e6372ec..041ff7d1b02ec0be14aead872c5436b2c897e125 100644 --- a/crates/languages/src/cpp/outline.scm +++ b/crates/languages/src/cpp/outline.scm @@ -1,186 +1,195 @@ (preproc_def - "#define" @context - name: (_) @name) @item + "#define" @context + name: (_) @name) @item (preproc_function_def - "#define" @context - name: (_) @name - parameters: (preproc_params - "(" @context - ")" @context)) @item + "#define" @context + name: (_) @name + parameters: (preproc_params + "(" @context + ")" @context)) @item (namespace_definition - "inline"? @context - "namespace" @context - name: (_) @name) @item + "inline"? @context + "namespace" @context + name: (_) @name) @item (type_definition - "typedef" @context - declarator: (_) @name) @item + "typedef" @context + declarator: (_) @name) @item (struct_specifier - "struct" @context - name: (_) @name) @item + "struct" @context + name: (_) @name) @item (class_specifier - "class" @context - name: (_) @name) @item + "class" @context + name: (_) @name) @item (enum_specifier - "enum" @context - [ - "class" - "struct" - ]? @context - name: (_) @name) @item + "enum" @context + [ + "class" + "struct" + ]? @context + name: (_) @name) @item (union_specifier - "union" @context - name: (_) @name) @item + "union" @context + name: (_) @name) @item (enumerator - name: (_) @name) @item + name: (_) @name) @item (concept_definition - "concept" @context - name: (_) @name) @item + "concept" @context + name: (_) @name) @item (declaration - [ - (storage_class_specifier) - (type_qualifier) - ]* @context - type: (_) @context - declarator: [ - ; The declaration may define multiple variables, using @item on the - ; declarator so that they get distinct ranges. - (init_declarator - declarator: (_) @item @name) - (identifier) @item @name - ] @item) + [ + (storage_class_specifier) + (type_qualifier) + ]* @context + type: (_) @context + declarator: [ + ; The declaration may define multiple variables, using @item on the + ; declarator so that they get distinct ranges. + (init_declarator + declarator: (_) @item @name) + (identifier) @item @name + ] @item) (function_definition - [ - (storage_class_specifier) - (type_qualifier) - ]* @context - type: (_)? @context - declarator: [ - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - (reference_declarator - ["&" "&&"] @context - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - ] - (type_qualifier)? @context) @item + [ + (storage_class_specifier) + (type_qualifier) + ]* @context + type: (_)? @context + declarator: [ + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + (reference_declarator + [ + "&" + "&&" + ] @context + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + ] + (type_qualifier)? @context) @item (declaration - [ - (storage_class_specifier) - (type_qualifier) - ]* @context - type: (_)? @context - declarator: [ - (field_identifier) @name - (pointer_declarator - "*" @context - declarator: (field_identifier) @name) - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - (reference_declarator - ["&" "&&"] @context - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - ] - (type_qualifier)? @context) @item + [ + (storage_class_specifier) + (type_qualifier) + ]* @context + type: (_)? @context + declarator: [ + (field_identifier) @name + (pointer_declarator + "*" @context + declarator: (field_identifier) @name) + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + (reference_declarator + [ + "&" + "&&" + ] @context + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + ] + (type_qualifier)? @context) @item (field_declaration - [ - (storage_class_specifier) - (type_qualifier) - ]* @context - type: (_) @context - declarator: [ - (field_identifier) @name - (pointer_declarator - "*" @context - declarator: (field_identifier) @name) - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - (reference_declarator - ["&" "&&"] @context - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) + [ + (storage_class_specifier) + (type_qualifier) + ]* @context + type: (_) @context + declarator: [ + (field_identifier) @name + (pointer_declarator + "*" @context + declarator: (field_identifier) @name) + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + (reference_declarator + [ + "&" + "&&" + ] @context + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) ; Fields declarations may define multiple fields, and so @item is on the ; declarator so they each get distinct ranges. - ] @item - (type_qualifier)? @context) + ] @item + (type_qualifier)? @context) (comment) @annotation diff --git a/crates/languages/src/cpp/overrides.scm b/crates/languages/src/cpp/overrides.scm index 36473eb300fd01370e1947873435a821e2d6417a..7c4cf69697200efa1cedd59b895d5ebd064ce486 100644 --- a/crates/languages/src/cpp/overrides.scm +++ b/crates/languages/src/cpp/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string_literal) @string diff --git a/crates/languages/src/cpp/textobjects.scm b/crates/languages/src/cpp/textobjects.scm index 027185a0cfab7b71f3dcd6a5d5507445e2778d34..61260cd814689aef68ca785132929963eb12d54f 100644 --- a/crates/languages/src/cpp/textobjects.scm +++ b/crates/languages/src/cpp/textobjects.scm @@ -1,37 +1,44 @@ (declaration - declarator: (function_declarator)) @function.around + declarator: (function_declarator)) @function.around (function_definition - body: (_ - "{" - (_)* @function.inside - "}" )) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (preproc_function_def - value: (_) @function.inside) @function.around + value: (_) @function.inside) @function.around (comment) @comment.around (struct_specifier - body: (_ - "{" - (_)* @class.inside - "}")) @class.around + body: (_ + "{" + (_)* @class.inside + "}")) @class.around (enum_specifier - body: (_ - "{" - [(_) ","?]* @class.inside - "}")) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (union_specifier - body: (_ - "{" - (_)* @class.inside - "}")) @class.around + body: (_ + "{" + (_)* @class.inside + "}")) @class.around (class_specifier body: (_ - "{" - [(_) ":"? ";"?]* @class.inside - "}"?)) @class.around + "{" + [ + (_) + ":"? + ";"? + ]* @class.inside + "}"?)) @class.around diff --git a/crates/languages/src/css.rs b/crates/languages/src/css.rs index c5c89a0c66431380cf9f500a23b74a19230f3046..6a8fb730a0faa6430d252cdd189d0620fcd07e4a 100644 --- a/crates/languages/src/css.rs +++ b/crates/languages/src/css.rs @@ -134,6 +134,7 @@ impl LspAdapter for CssLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { Ok(Some(json!({ "provideFormatter": true diff --git a/crates/languages/src/css/brackets.scm b/crates/languages/src/css/brackets.scm index 2149bddc6c9a7ec04667d03da75580b676e12a28..313d212a5eb28d006775781576d50e359be675a2 100644 --- a/crates/languages/src/css/brackets.scm +++ b/crates/languages/src/css/brackets.scm @@ -1,5 +1,16 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/css/highlights.scm b/crates/languages/src/css/highlights.scm index 8fbb9f47d2bcdde1a3b20a184885efb5382557a8..b9d708b661b221544fb58a767981d868d33cb9f7 100644 --- a/crates/languages/src/css/highlights.scm +++ b/crates/languages/src/css/highlights.scm @@ -30,14 +30,24 @@ ] @keyword.operator (id_name) @selector.id + (class_name) @selector.class (namespace_name) @namespace -(namespace_selector (tag_name) @namespace "|") + +(namespace_selector + (tag_name) @namespace + "|") (attribute_name) @attribute -(pseudo_element_selector "::" (tag_name) @selector.pseudo) -(pseudo_class_selector ":" (class_name) @selector.pseudo) + +(pseudo_element_selector + "::" + (tag_name) @selector.pseudo) + +(pseudo_class_selector + ":" + (class_name) @selector.pseudo) [ (feature_name) @@ -58,13 +68,11 @@ (parenthesized_query (keyword_query) @property) -( - [ - (property_name) - (plain_value) - ] @variable - (#match? @variable "^--") -) +([ + (property_name) + (plain_value) +] @variable + (#match? @variable "^--")) [ "@media" @@ -80,6 +88,7 @@ ] @keyword (string_value) @string + (color_value) @string.special [ @@ -97,7 +106,8 @@ ";" ] @punctuation.delimiter -(id_selector "#" @punctuation.delimiter) +(id_selector + "#" @punctuation.delimiter) [ "{" diff --git a/crates/languages/src/css/indents.scm b/crates/languages/src/css/indents.scm index e9754690920500f55e611f981e46d0365560eb4f..a768bb040790087fa905c09a436e81c923db240a 100644 --- a/crates/languages/src/css/indents.scm +++ b/crates/languages/src/css/indents.scm @@ -1 +1,3 @@ -(_ "{" "}" @end) @indent +(_ + "{" + "}" @end) @indent diff --git a/crates/languages/src/css/injections.scm b/crates/languages/src/css/injections.scm index 9117c713b98fdd2896b13e4949a77c6489b9ee36..2f0e58eb6431515b86b6042e5828263341513e99 100644 --- a/crates/languages/src/css/injections.scm +++ b/crates/languages/src/css/injections.scm @@ -1,3 +1,2 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) diff --git a/crates/languages/src/css/outline.scm b/crates/languages/src/css/outline.scm index 645616f9056d0cc9e85232e94f5d0666809884c6..6e6e9d3a03c7efd4e7d1814e74705ba3c34e20a1 100644 --- a/crates/languages/src/css/outline.scm +++ b/crates/languages/src/css/outline.scm @@ -1,18 +1,16 @@ (stylesheet - (import_statement - "@import" @context - ((string_value) @name)) @item) - + (import_statement + "@import" @context + (string_value) @name) @item) (rule_set - (selectors - . - (_) @name - ("," @name (_) @name)* - )) @item + (selectors + . + (_) @name + ("," @name + (_) @name)*)) @item (media_statement - "@media" @context - (_) @name - (block) -) @item + "@media" @context + (_) @name + (block)) @item diff --git a/crates/languages/src/css/overrides.scm b/crates/languages/src/css/overrides.scm index e5eade479723c33894b6165085603631bdfe8c64..7ca202fd7bc3db34dd71d5ae7893efe853101ced 100644 --- a/crates/languages/src/css/overrides.scm +++ b/crates/languages/src/css/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string_value) @string diff --git a/crates/languages/src/css/textobjects.scm b/crates/languages/src/css/textobjects.scm index c9c6207b851e6b4c5908ce7d664616798db27f3b..88ae6bb8423feec432de6e168507233c1f293b09 100644 --- a/crates/languages/src/css/textobjects.scm +++ b/crates/languages/src/css/textobjects.scm @@ -1,30 +1,31 @@ (comment) @comment.around (rule_set - (block ( - "{" - (_)* @function.inside - "}" ))) @function.around + (block + ("{" + (_)* @function.inside + "}"))) @function.around + (keyframe_block - (block ( - "{" - (_)* @function.inside - "}" ))) @function.around + (block + ("{" + (_)* @function.inside + "}"))) @function.around (media_statement - (block ( - "{" - (_)* @class.inside - "}" ))) @class.around + (block + ("{" + (_)* @class.inside + "}"))) @class.around (supports_statement - (block ( - "{" - (_)* @class.inside - "}" ))) @class.around + (block + ("{" + (_)* @class.inside + "}"))) @class.around (keyframes_statement - (keyframe_block_list ( - "{" - (_)* @class.inside - "}" ))) @class.around + (keyframe_block_list + ("{" + (_)* @class.inside + "}"))) @class.around diff --git a/crates/languages/src/diff/highlights.scm b/crates/languages/src/diff/highlights.scm index 4a344389032b9ff12f7c00e42adffb00721737e1..a2e33190f154d6a210572dbb066000dca6f30455 100644 --- a/crates/languages/src/diff/highlights.scm +++ b/crates/languages/src/diff/highlights.scm @@ -4,14 +4,14 @@ (addition) (new_file) ] @string -;; TODO: This should eventually be `@diff.plus` with a fallback of `@string` +; TODO: This should eventually be `@diff.plus` with a fallback of `@string` [ (deletion) (old_file) ] @keyword -;; TODO: This should eventually be `@diff.minus` with a fallback of `@keyword` +; TODO: This should eventually be `@diff.minus` with a fallback of `@keyword` (commit) @constant (location) @attribute @@ -22,7 +22,7 @@ (mode) @number -([ +[ ".." "+" "++" @@ -32,7 +32,7 @@ "--" "---" "----" -] @punctuation.special) +] @punctuation.special [ (binary_change) diff --git a/crates/languages/src/diff/injections.scm b/crates/languages/src/diff/injections.scm index 01e833d1e31d480b66a558bdfb8f07b2f0cdbc46..2f0e58eb6431515b86b6042e5828263341513e99 100644 --- a/crates/languages/src/diff/injections.scm +++ b/crates/languages/src/diff/injections.scm @@ -1,2 +1,2 @@ ((comment) @injection.content - (#set! injection.language "comment")) + (#set! injection.language "comment")) diff --git a/crates/languages/src/gitcommit/highlights.scm b/crates/languages/src/gitcommit/highlights.scm index 8670a6615aa49b8062a1ef2375884bf298b1df76..750a400f2b147c459d1a7932cd476286dc8189fc 100644 --- a/crates/languages/src/gitcommit/highlights.scm +++ b/crates/languages/src/gitcommit/highlights.scm @@ -1,18 +1,36 @@ (subject) @markup.heading + (path) @string.special.path + (branch) @string.special.symbol + (commit) @constant + (item) @markup.link.url + (header) @tag + (comment) @comment -(change kind: "new file" @diff.plus) -(change kind: "deleted" @diff.minus) -(change kind: "modified" @diff.delta) -(change kind: "renamed" @diff.delta.moved) +(change + kind: "new file" @diff.plus) + +(change + kind: "deleted" @diff.minus) + +(change + kind: "modified" @diff.delta) + +(change + kind: "renamed" @diff.delta.moved) (trailer key: (trailer_key) @variable.other.member value: (trailer_value) @string) -[":" "=" "->" (scissors)] @punctuation.delimiter +[ + ":" + "=" + "->" + (scissors) +] @punctuation.delimiter diff --git a/crates/languages/src/gitcommit/injections.scm b/crates/languages/src/gitcommit/injections.scm index 8fb9b459679489be7588d1ab9b6d53e40ea10c60..07c2dd95ca69642b15a7a778ab7e0caad47586cb 100644 --- a/crates/languages/src/gitcommit/injections.scm +++ b/crates/languages/src/gitcommit/injections.scm @@ -1,9 +1,8 @@ ((comment) @content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) ((scissors) @content - (#set! "language" "diff")) + (#set! "language" "diff")) ((rebase_command) @content - (#set! "language" "git_rebase")) + (#set! "language" "git_rebase")) diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index abcb890566d9c0d0d6d9fe85b565c74825775250..5942a51f2a481b66cc8ba46072bd28c8285cbc07 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -8,8 +8,10 @@ pub use language::*; use language::{LanguageToolchainStore, LspAdapterDelegate, LspInstaller}; use lsp::{LanguageServerBinary, LanguageServerName}; +use project::lsp_store::language_server_settings; use regex::Regex; -use serde_json::json; +use serde_json::{Value, json}; +use settings::SemanticTokenRules; use smol::fs; use std::{ borrow::Cow, @@ -24,7 +26,17 @@ use std::{ }, }; use task::{TaskTemplate, TaskTemplates, TaskVariables, VariableName}; -use util::{ResultExt, fs::remove_matching, maybe}; +use util::{ResultExt, fs::remove_matching, maybe, merge_json_value_into}; + +use crate::LanguageDir; + +pub(crate) fn semantic_token_rules() -> SemanticTokenRules { + let content = LanguageDir::get("go/semantic_token_rules.json") + .expect("missing go/semantic_token_rules.json"); + let json = std::str::from_utf8(&content.data).expect("invalid utf-8 in semantic_token_rules"); + settings::parse_json_with_comments::(json) + .expect("failed to parse go semantic_token_rules.json") +} fn server_binary_arguments() -> Vec { vec!["-mode=stdio".into()] @@ -192,9 +204,10 @@ impl LspAdapter for GoLspAdapter { async fn initialization_options( self: Arc, - _: &Arc, + delegate: &Arc, + cx: &mut AsyncApp, ) -> Result> { - Ok(Some(json!({ + let mut default_config = json!({ "usePlaceholders": false, "hints": { "assignVariableTypes": true, @@ -205,7 +218,33 @@ impl LspAdapter for GoLspAdapter { "parameterNames": true, "rangeVariableTypes": true } - }))) + }); + + let project_initialization_options = cx.update(|cx| { + language_server_settings(delegate.as_ref(), &self.name(), cx) + .and_then(|s| s.initialization_options.clone()) + }); + + if let Some(override_options) = project_initialization_options { + merge_json_value_into(override_options, &mut default_config); + } + + Ok(Some(default_config)) + } + + async fn workspace_configuration( + self: Arc, + delegate: &Arc, + _: Option, + _: Option, + cx: &mut AsyncApp, + ) -> Result { + Ok(cx + .update(|cx| { + language_server_settings(delegate.as_ref(), &self.name(), cx) + .and_then(|settings| settings.settings.clone()) + }) + .unwrap_or_default()) } async fn label_for_completion( diff --git a/crates/languages/src/go/brackets.scm b/crates/languages/src/go/brackets.scm index 05fb1d7f9219889d652bbdbb294ca45e72cc9c05..6bee4099173ee83cc03e4f1d24d7000d102880fb 100644 --- a/crates/languages/src/go/brackets.scm +++ b/crates/languages/src/go/brackets.scm @@ -1,6 +1,19 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) -((rune_literal) @open @close (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) + +((rune_literal) @open @close + (#set! rainbow.exclude)) diff --git a/crates/languages/src/go/debugger.scm b/crates/languages/src/go/debugger.scm index f22b91f938e1159fa9bfec99f5000976766faf06..306b0448a7d817040562152b39d410100b207f1a 100644 --- a/crates/languages/src/go/debugger.scm +++ b/crates/languages/src/go/debugger.scm @@ -1,26 +1,44 @@ -(parameter_declaration (identifier) @debug-variable) +(parameter_declaration + (identifier) @debug-variable) -(short_var_declaration (expression_list (identifier) @debug-variable)) +(short_var_declaration + (expression_list + (identifier) @debug-variable)) -(var_declaration (var_spec (identifier) @debug-variable)) +(var_declaration + (var_spec + (identifier) @debug-variable)) -(const_declaration (const_spec (identifier) @debug-variable)) +(const_declaration + (const_spec + (identifier) @debug-variable)) -(assignment_statement (expression_list (identifier) @debug-variable)) +(assignment_statement + (expression_list + (identifier) @debug-variable)) -(binary_expression (identifier) @debug-variable +(binary_expression + (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(call_expression (argument_list (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]"))) +(call_expression + (argument_list + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]"))) -(return_statement (expression_list (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]"))) +(return_statement + (expression_list + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]"))) -(range_clause (expression_list (identifier) @debug-variable)) +(range_clause + (expression_list + (identifier) @debug-variable)) -(parenthesized_expression (identifier) @debug-variable +(parenthesized_expression + (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) (block) @debug-scope + (function_declaration) @debug-scope diff --git a/crates/languages/src/go/highlights.scm b/crates/languages/src/go/highlights.scm index 15a512d6b7e359bf7290aee9d433f1ae7be352ec..670b4f05a961e35d3826c294d061ea7757fd1c0f 100644 --- a/crates/languages/src/go/highlights.scm +++ b/crates/languages/src/go/highlights.scm @@ -1,10 +1,12 @@ (identifier) @variable (type_identifier) @type + (type_spec name: (type_identifier) @type.definition) (field_identifier) @property + (package_identifier) @namespace (label_name) @label @@ -26,6 +28,7 @@ (method_declaration name: (field_identifier) @function.method) + (method_elem name: (field_identifier) @function.method) @@ -144,8 +147,7 @@ ; Go directives ((comment) @preproc - (#match? @preproc "^//go:")) + (#match? @preproc "^//go:")) ((comment) @preproc - (#match? @preproc "^// \\+build")) - + (#match? @preproc "^// \\+build")) diff --git a/crates/languages/src/go/imports.scm b/crates/languages/src/go/imports.scm index 7f0ff2d46e6a271d4258d23f46cc942830e2c6f9..23e480c10b20b76c6724df29a550e627c2aee799 100644 --- a/crates/languages/src/go/imports.scm +++ b/crates/languages/src/go/imports.scm @@ -1,14 +1,12 @@ (import_spec - name: [ - (dot) - (package_identifier) - ] - path: (interpreted_string_literal - (interpreted_string_literal_content) @namespace) -) @wildcard @import + name: [ + (dot) + (package_identifier) + ] + path: (interpreted_string_literal + (interpreted_string_literal_content) @namespace)) @wildcard @import (import_spec - !name - path: (interpreted_string_literal - (interpreted_string_literal_content) @namespace) -) @wildcard @import + !name + path: (interpreted_string_literal + (interpreted_string_literal_content) @namespace)) @wildcard @import diff --git a/crates/languages/src/go/indents.scm b/crates/languages/src/go/indents.scm index abbb72eb379d5fbb52267a633c60def07895a081..21e8cf7abbc1420ba94063a7ae6655ec0daa9baa 100644 --- a/crates/languages/src/go/indents.scm +++ b/crates/languages/src/go/indents.scm @@ -1,9 +1,17 @@ [ - (assignment_statement) - (call_expression) - (selector_expression) + (assignment_statement) + (call_expression) + (selector_expression) ] @indent -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/crates/languages/src/go/injections.scm b/crates/languages/src/go/injections.scm index 58583f4d22c7db8016397d8e47cd817b7c240764..73cf0bd352de0213f9a0d1efff300039f52a0697 100644 --- a/crates/languages/src/go/injections.scm +++ b/crates/languages/src/go/injections.scm @@ -1,7 +1,6 @@ ; Refer to https://github.com/nvim-treesitter/nvim-treesitter/blob/master/queries/go/injections.scm#L4C1-L16C41 ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) (call_expression (selector_expression) @_function @@ -14,722 +13,718 @@ (raw_string_literal) (interpreted_string_literal) ] @injection.content - (#set! injection.language "regex") - )) + (#set! injection.language "regex"))) ; INJECT SQL -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] (#match? @_comment "^\\/\\*\\s*sql\\s*\\*\\/$") - (#set! injection.language "sql") -) + (#set! injection.language "sql")) ; INJECT JSON -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*json\\s*\\*\\/") ; /* json */ or /*json*/ - (#set! injection.language "json") -) + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*json\\s*\\*\\/") + ; /* json */ or /*json*/ + (#set! injection.language "json")) ; INJECT YAML -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*yaml\\s*\\*\\/") ; /* yaml */ or /*yaml*/ - (#set! injection.language "yaml") -) + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*yaml\\s*\\*\\/") + ; /* yaml */ or /*yaml*/ + (#set! injection.language "yaml")) ; INJECT XML -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*xml\\s*\\*\\/") ; /* xml */ or /*xml*/ - (#set! injection.language "xml") -) + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*xml\\s*\\*\\/") + ; /* xml */ or /*xml*/ + (#set! injection.language "xml")) ; INJECT HTML -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*html\\s*\\*\\/") ; /* html */ or /*html*/ - (#set! injection.language "html") -) + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*html\\s*\\*\\/") + ; /* html */ or /*html*/ + (#set! injection.language "html")) ; INJECT JS -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*js\\s*\\*\\/") ; /* js */ or /*js*/ - (#set! injection.language "javascript") -) - + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*js\\s*\\*\\/") + ; /* js */ or /*js*/ + (#set! injection.language "javascript")) ; INJECT CSS -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*css\\s*\\*\\/") ; /* css */ or /*css*/ - (#set! injection.language "css") -) - + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*css\\s*\\*\\/") + ; /* css */ or /*css*/ + (#set! injection.language "css")) ; INJECT LUA -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*lua\\s*\\*\\/") ; /* lua */ or /*lua*/ - (#set! injection.language "lua") -) + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*lua\\s*\\*\\/") + ; /* lua */ or /*lua*/ + (#set! injection.language "lua")) ; INJECT BASH -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*bash\\s*\\*\\/") ; /* bash */ or /*bash*/ - (#set! injection.language "bash") -) + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*bash\\s*\\*\\/") + ; /* bash */ or /*bash*/ + (#set! injection.language "bash")) ; INJECT CSV -( +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + ((comment) @_comment + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (argument_list + (comment) @_comment [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - ((comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ) - ] - (#match? @_comment "^\\/\\*\\s*csv\\s*\\*\\/") ; /* csv */ or /*csv */ - (#set! injection.language "csv") -) + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]) +] + (#match? @_comment "^\\/\\*\\s*csv\\s*\\*\\/") + ; /* csv */ or /*csv */ + (#set! injection.language "csv")) diff --git a/crates/languages/src/go/outline.scm b/crates/languages/src/go/outline.scm index c745f55aff7dcd4b3bfd802884db7a985c1387fa..da42904fab942635b1140b486dde0c25694147d3 100644 --- a/crates/languages/src/go/outline.scm +++ b/crates/languages/src/go/outline.scm @@ -1,67 +1,61 @@ (comment) @annotation (type_declaration - "type" @context - [ - (type_spec - name: (_) @name) @item - ( - "(" - (type_spec - name: (_) @name) @item - ")" - ) - ] -) + "type" @context + [ + (type_spec + name: (_) @name) @item + ("(" + (type_spec + name: (_) @name) @item + ")") + ]) (function_declaration - "func" @context - name: (identifier) @name - parameters: (parameter_list - "(" - ")")) @item + "func" @context + name: (identifier) @name + parameters: (parameter_list + "(" + ")")) @item (method_declaration - "func" @context - receiver: (parameter_list - "(" @context - (parameter_declaration - name: (_) @context - type: (_) @context) - ")" @context) - name: (field_identifier) @name - parameters: (parameter_list - "(" - ")")) @item + "func" @context + receiver: (parameter_list + "(" @context + (parameter_declaration + name: (_) @context + type: (_) @context) + ")" @context) + name: (field_identifier) @name + parameters: (parameter_list + "(" + ")")) @item (const_declaration - "const" @context - (const_spec - name: (identifier) @name) @item) + "const" @context + (const_spec + name: (identifier) @name) @item) (source_file - (var_declaration - "var" @context - [ - ; The declaration may define multiple variables, and so @item is on - ; the identifier so they get distinct ranges. - (var_spec - name: (identifier) @name @item) - (var_spec_list - (var_spec - name: (identifier) @name @item) - ) - ] - ) -) + (var_declaration + "var" @context + [ + ; The declaration may define multiple variables, and so @item is on + ; the identifier so they get distinct ranges. + (var_spec + name: (identifier) @name @item) + (var_spec_list + (var_spec + name: (identifier) @name @item)) + ])) (method_elem - name: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) @item + name: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) @item ; Fields declarations may define multiple fields, and so @item is on the ; declarator so they each get distinct ranges. (field_declaration - name: (_) @name @item) + name: (_) @name @item) diff --git a/crates/languages/src/go/overrides.scm b/crates/languages/src/go/overrides.scm index aae1520301bbb2a04b04f930b747d290051bc9cc..7989c4271f0ec9f18a6f75315f01d13454fca7b9 100644 --- a/crates/languages/src/go/overrides.scm +++ b/crates/languages/src/go/overrides.scm @@ -1,4 +1,5 @@ (comment) @comment.inclusive + [ (interpreted_string_literal) (raw_string_literal) diff --git a/crates/languages/src/go/runnables.scm b/crates/languages/src/go/runnables.scm index 786a923566d433f20b042178df2609e993e6da15..d00be6e1d0db4b8fd97596002099525128458a7f 100644 --- a/crates/languages/src/go/runnables.scm +++ b/crates/languages/src/go/runnables.scm @@ -1,170 +1,118 @@ ; Functions names start with `Test` -( - ( - (function_declaration name: (_) @run - (#match? @run "^Test.*") - (#not-match? @run "^TestMain$")) - ) @_ - (#set! tag go-test) -) +(((function_declaration + name: (_) @run + (#match? @run "^Test.*") + (#not-match? @run "^TestMain$"))) @_ + (#set! tag go-test)) ; Suite test methods (testify/suite) -( - (method_declaration - receiver: (parameter_list - (parameter_declaration - type: [ - (pointer_type (type_identifier) @_suite_name) - (type_identifier) @_suite_name - ] - ) - ) - name: (field_identifier) @run @_subtest_name - (#match? @_subtest_name "^Test.*") - (#match? @_suite_name ".*Suite") - ) @_ - (#set! tag go-testify-suite) -) +((method_declaration + receiver: (parameter_list + (parameter_declaration + type: [ + (pointer_type + (type_identifier) @_suite_name) + (type_identifier) @_suite_name + ])) + name: (field_identifier) @run @_subtest_name + (#match? @_subtest_name "^Test.*") + (#match? @_suite_name ".*Suite")) @_ + (#set! tag go-testify-suite)) ; `go:generate` comments -( - ((comment) @_comment @run - (#match? @_comment "^//go:generate")) - (#set! tag go-generate) -) +(((comment) @_comment @run + (#match? @_comment "^//go:generate")) + (#set! tag go-generate)) ; `t.Run` -( - ( - (call_expression - function: ( - selector_expression - field: _ @run @_name - (#eq? @_name "Run") - ) - arguments: ( - argument_list - . - [ - (interpreted_string_literal) - (raw_string_literal) - ] @_subtest_name - . - (func_literal - parameters: ( - parameter_list - (parameter_declaration - name: (identifier) @_param_name - type: (pointer_type - (qualified_type - package: (package_identifier) @_pkg - name: (type_identifier) @_type - (#eq? @_pkg "testing") - (#eq? @_type "T") - ) - ) - ) - ) - ) @_second_argument - ) - ) - ) @_ - (#set! tag go-subtest) -) +(((call_expression + function: (selector_expression + field: _ @run @_name + (#eq? @_name "Run")) + arguments: (argument_list + . + [ + (interpreted_string_literal) + (raw_string_literal) + ] @_subtest_name + . + (func_literal + parameters: (parameter_list + (parameter_declaration + name: (identifier) @_param_name + type: (pointer_type + (qualified_type + package: (package_identifier) @_pkg + name: (type_identifier) @_type + (#eq? @_pkg "testing") + (#eq? @_type "T")))))) @_second_argument))) @_ + (#set! tag go-subtest)) ; Functions names start with `Example` -( - ( - (function_declaration name: (_) @run @_name - (#match? @_name "^Example.*")) - ) @_ - (#set! tag go-example) -) +(((function_declaration + name: (_) @run @_name + (#match? @_name "^Example.*"))) @_ + (#set! tag go-example)) ; Functions names start with `Benchmark` -( - ( - (function_declaration name: (_) @run @_name - (#match? @_name "^Benchmark.*")) - ) @_ - (#set! tag go-benchmark) -) +(((function_declaration + name: (_) @run @_name + (#match? @_name "^Benchmark.*"))) @_ + (#set! tag go-benchmark)) ; Functions names start with `Fuzz` -( - ( - (function_declaration name: (_) @run @_name - (#match? @_name "^Fuzz")) - ) @_ - (#set! tag go-fuzz) -) +(((function_declaration + name: (_) @run @_name + (#match? @_name "^Fuzz"))) @_ + (#set! tag go-fuzz)) ; go run -( - ( - (function_declaration name: (_) @run - (#eq? @run "main")) - ) @_ - (#set! tag go-main) -) +(((function_declaration + name: (_) @run + (#eq? @run "main"))) @_ + (#set! tag go-main)) ; Table test cases - slice and map with explicit variable -( - (short_var_declaration - left: (expression_list (identifier) @_collection_var) - right: (expression_list - (composite_literal - type: [ - (slice_type) - (map_type - key: (type_identifier) @_key_type - (#eq? @_key_type "string") - ) - ] - body: (literal_value - [ +((short_var_declaration + left: (expression_list + (identifier) @_collection_var) + right: (expression_list + (composite_literal + type: [ + (slice_type) + (map_type + key: (type_identifier) @_key_type + (#eq? @_key_type "string")) + ] + body: (literal_value + [ + (literal_element + (literal_value + (keyed_element + (literal_element + (identifier) @_field_name) + (literal_element + [ + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ])))) + (keyed_element (literal_element - (literal_value - (keyed_element - (literal_element - (identifier) @_field_name - ) - (literal_element - [ - (interpreted_string_literal) @run @_table_test_case_name - (raw_string_literal) @run @_table_test_case_name - ] - ) - ) - ) - ) - (keyed_element - (literal_element - [ - (interpreted_string_literal) @run @_table_test_case_name - (raw_string_literal) @run @_table_test_case_name - ] - ) - ) - ] - ) - ) - ) - ) + [ + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ])) + ])))) (for_statement (range_clause left: (expression_list [ - ( - (identifier) - (identifier) @_loop_var_inner - ) + ((identifier) + (identifier) @_loop_var_inner) (identifier) @_loop_var_outer - ] - ) + ]) right: (identifier) @_range_var - (#eq? @_range_var @_collection_var) - ) + (#eq? @_range_var @_collection_var)) body: (block (statement_list (expression_statement @@ -172,8 +120,7 @@ function: (selector_expression operand: (identifier) field: (field_identifier) @_run_method - (#eq? @_run_method "Run") - ) + (#eq? @_run_method "Run")) arguments: (argument_list . [ @@ -181,8 +128,7 @@ operand: (identifier) @_tc_var (#eq? @_tc_var @_loop_var_inner) field: (field_identifier) @_field_check - (#eq? @_field_check @_field_name) - ) + (#eq? @_field_check @_field_name)) (identifier) @_arg_var (#eq? @_arg_var @_loop_var_outer) ] @@ -195,113 +141,72 @@ package: (package_identifier) @_pkg name: (type_identifier) @_type (#eq? @_pkg "testing") - (#eq? @_type "T") - ) - ) - ) - ) - ) - ) - ) - ) - ) - ) - ) @_ - (#set! tag go-table-test-case) -) + (#eq? @_type "T")))))))))))) @_ + (#set! tag go-table-test-case)) ; Table test cases - slice and map declared right inside the loop without ; explicit variable -( - (for_statement - (range_clause - left: (expression_list +((for_statement + (range_clause + left: (expression_list + [ + ((identifier) + (identifier) @_loop_var_inner) + (identifier) @_loop_var_outer + ]) + right: (composite_literal + type: [ + (slice_type) + (map_type + key: (type_identifier) @_key_type + (#eq? @_key_type "string")) + ] + body: (literal_value [ - ( - (identifier) - (identifier) @_loop_var_inner - ) - (identifier) @_loop_var_outer - ] - ) - right: (composite_literal - type: [ - (slice_type) - (map_type - key: (type_identifier) @_key_type - (#eq? @_key_type "string") - ) - ] - body: (literal_value - [ + (literal_element + (literal_value + (keyed_element + (literal_element + (identifier) @_field_name) + (literal_element + [ + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ])))) + (keyed_element (literal_element - (literal_value - (keyed_element - (literal_element - (identifier) @_field_name - ) - (literal_element - [ - (interpreted_string_literal) @run @_table_test_case_name - (raw_string_literal) @run @_table_test_case_name - ] - ) - ) - ) - ) - (keyed_element - (literal_element - [ - (interpreted_string_literal) @run @_table_test_case_name - (raw_string_literal) @run @_table_test_case_name - ] - ) - ) - ] - ) - ) - ) - body: (block - (statement_list - (expression_statement - (call_expression - function: (selector_expression - operand: (identifier) - field: (field_identifier) @_run_method - (#eq? @_run_method "Run") - ) - arguments: (argument_list - . [ - (selector_expression - operand: (identifier) @_tc_var - (#eq? @_tc_var @_loop_var_inner) - field: (field_identifier) @_field_check - (#eq? @_field_check @_field_name) - ) - (identifier) @_arg_var - (#eq? @_arg_var @_loop_var_outer) - ] - . - (func_literal - parameters: (parameter_list - (parameter_declaration - type: (pointer_type - (qualified_type - package: (package_identifier) @_pkg - name: (type_identifier) @_type - (#eq? @_pkg "testing") - (#eq? @_type "T") - ) - ) - ) - ) - ) - ) - ) - ) - ) - ) - ) @_ - (#set! tag go-table-test-case-without-explicit-variable) -) + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ])) + ]))) + body: (block + (statement_list + (expression_statement + (call_expression + function: (selector_expression + operand: (identifier) + field: (field_identifier) @_run_method + (#eq? @_run_method "Run")) + arguments: (argument_list + . + [ + (selector_expression + operand: (identifier) @_tc_var + (#eq? @_tc_var @_loop_var_inner) + field: (field_identifier) @_field_check + (#eq? @_field_check @_field_name)) + (identifier) @_arg_var + (#eq? @_arg_var @_loop_var_outer) + ] + . + (func_literal + parameters: (parameter_list + (parameter_declaration + type: (pointer_type + (qualified_type + package: (package_identifier) @_pkg + name: (type_identifier) @_type + (#eq? @_pkg "testing") + (#eq? @_type "T")))))))))))) @_ + (#set! tag go-table-test-case-without-explicit-variable)) diff --git a/crates/languages/src/go/semantic_token_rules.json b/crates/languages/src/go/semantic_token_rules.json new file mode 100644 index 0000000000000000000000000000000000000000..627a5c5f187b47918e6a56069c5ed1bda8583aa6 --- /dev/null +++ b/crates/languages/src/go/semantic_token_rules.json @@ -0,0 +1,7 @@ +[ + { + "token_type": "variable", + "token_modifiers": ["readonly"], + "style": ["constant"] + } +] diff --git a/crates/languages/src/go/textobjects.scm b/crates/languages/src/go/textobjects.scm index eb4f3a00501021167c3c2b9136d5cef2f131878f..4e0a78991a4b1ca49f48b0c1c73c51ff5e002f50 100644 --- a/crates/languages/src/go/textobjects.scm +++ b/crates/languages/src/go/textobjects.scm @@ -1,24 +1,27 @@ (function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (method_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (type_declaration - (type_spec (struct_type (field_declaration_list ( - "{" - (_)* @class.inside - "}")?)))) @class.around + (type_spec + (struct_type + (field_declaration_list + ("{" + (_)* @class.inside + "}")?)))) @class.around (type_declaration - (type_spec (interface_type - (_)* @class.inside))) @class.around + (type_spec + (interface_type + (_)* @class.inside))) @class.around (type_declaration) @class.around diff --git a/crates/languages/src/gomod/highlights.scm b/crates/languages/src/gomod/highlights.scm index 03be1b5957160820033d93b35b39d4329b7890a6..f026035cb126382274e783ece2515148b6cffd73 100644 --- a/crates/languages/src/gomod/highlights.scm +++ b/crates/languages/src/gomod/highlights.scm @@ -15,6 +15,6 @@ (comment) @comment [ -(version) -(go_version) + (version) + (go_version) ] @string diff --git a/crates/languages/src/gomod/injections.scm b/crates/languages/src/gomod/injections.scm index 321c90add3710f35721daeb6b42abe38af094953..2f0e58eb6431515b86b6042e5828263341513e99 100644 --- a/crates/languages/src/gomod/injections.scm +++ b/crates/languages/src/gomod/injections.scm @@ -1,2 +1,2 @@ ((comment) @injection.content - (#set! injection.language "comment")) + (#set! injection.language "comment")) diff --git a/crates/languages/src/gomod/structure.scm b/crates/languages/src/gomod/structure.scm index ce1bc9aa3ee0b1f77086103bad91825b5927005f..2da1b0d5e643d2235b9555c15cfe3624f14758f2 100644 --- a/crates/languages/src/gomod/structure.scm +++ b/crates/languages/src/gomod/structure.scm @@ -1,35 +1,29 @@ (require_directive "require" @structure.anchor - ("(") @structure.open - (")") @structure.close -) + "(" @structure.open + ")" @structure.close) (exclude_directive "exclude" @structure.anchor - ("(") @structure.open - (")") @structure.close -) + "(" @structure.open + ")" @structure.close) (module_directive "module" @structure.anchor - ("(") @structure.open - (")") @structure.close -) + "(" @structure.open + ")" @structure.close) (replace_directive "replace" @structure.anchor - ("(") @structure.open - (")") @structure.close -) + "(" @structure.open + ")" @structure.close) (retract_directive "retract" @structure.anchor - ("(") @structure.open - (")") @structure.close -) + "(" @structure.open + ")" @structure.close) (ignore_directive "ignore" @structure.anchor - ("(") @structure.open - (")") @structure.close -) + "(" @structure.open + ")" @structure.close) diff --git a/crates/languages/src/gowork/highlights.scm b/crates/languages/src/gowork/highlights.scm index 9c84bcc4496394817190a86fa8cd4995b39475a2..b9d3d42e630c5c4f4eb877a330a15371ceb4d96a 100644 --- a/crates/languages/src/gowork/highlights.scm +++ b/crates/languages/src/gowork/highlights.scm @@ -9,6 +9,6 @@ (comment) @comment [ -(version) -(go_version) + (version) + (go_version) ] @string diff --git a/crates/languages/src/gowork/injections.scm b/crates/languages/src/gowork/injections.scm index 321c90add3710f35721daeb6b42abe38af094953..2f0e58eb6431515b86b6042e5828263341513e99 100644 --- a/crates/languages/src/gowork/injections.scm +++ b/crates/languages/src/gowork/injections.scm @@ -1,2 +1,2 @@ ((comment) @injection.content - (#set! injection.language "comment")) + (#set! injection.language "comment")) diff --git a/crates/languages/src/javascript/brackets.scm b/crates/languages/src/javascript/brackets.scm index a16a6432692ec7b9e0e3d24151cb814fc11bd83d..69acbcd614e440d8e8e2010f1677e52cb651e15e 100644 --- a/crates/languages/src/javascript/brackets.scm +++ b/crates/languages/src/javascript/brackets.scm @@ -1,9 +1,29 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -("<" @open "/>" @close) -("" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +("<" @open + "/>" @close) + +("" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/javascript/debugger.scm b/crates/languages/src/javascript/debugger.scm index a99f194a4a4130210b47f8170fca039acc163411..8f384fd8ad9e07fea89972464e64b905086bf580 100644 --- a/crates/languages/src/javascript/debugger.scm +++ b/crates/languages/src/javascript/debugger.scm @@ -1,23 +1,51 @@ -(lexical_declaration (variable_declarator name: (identifier) @debug-variable)) +(lexical_declaration + (variable_declarator + name: (identifier) @debug-variable)) -(for_in_statement left: (identifier) @debug-variable) -(for_statement initializer: (lexical_declaration (variable_declarator name: (identifier) @debug-variable))) +(for_in_statement + left: (identifier) @debug-variable) -(binary_expression left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(binary_expression right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_statement + initializer: (lexical_declaration + (variable_declarator + name: (identifier) @debug-variable))) -(unary_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(update_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_expression + left: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_expression + right: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(unary_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(array (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(update_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(pair value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(return_statement + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(member_expression object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(parenthesized_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(array + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(pair + value: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(member_expression + object: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) (statement_block) @debug-scope + (program) @debug-scope diff --git a/crates/languages/src/javascript/highlights.scm b/crates/languages/src/javascript/highlights.scm index 5561dc31d56d52e6b4d6f71c07137537953410f6..4824684177ae2fe8c5b2f1d582b4d443216b9519 100644 --- a/crates/languages/src/javascript/highlights.scm +++ b/crates/languages/src/javascript/highlights.scm @@ -1,56 +1,33 @@ ; Variables - (identifier) @variable (call_expression function: (member_expression object: (identifier) @type - (#any-of? - @type - "Promise" - "Array" - "Object" - "Map" - "Set" - "WeakMap" - "WeakSet" - "Date" - "Error" - "TypeError" - "RangeError" - "SyntaxError" - "ReferenceError" - "EvalError" - "URIError" - "RegExp" - "Function" - "Number" - "String" - "Boolean" - "Symbol" - "BigInt" - "Proxy" - "ArrayBuffer" - "DataView" - ) - ) -) + (#any-of? @type + "Promise" "Array" "Object" "Map" "Set" "WeakMap" "WeakSet" "Date" "Error" "TypeError" + "RangeError" "SyntaxError" "ReferenceError" "EvalError" "URIError" "RegExp" "Function" + "Number" "String" "Boolean" "Symbol" "BigInt" "Proxy" "ArrayBuffer" "DataView"))) ; Properties - (property_identifier) @property + (shorthand_property_identifier) @property + (shorthand_property_identifier_pattern) @property + (private_property_identifier) @property ; Function and method calls - (call_expression function: (identifier) @function) (call_expression function: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method)) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method)) (new_expression constructor: (identifier) @type) @@ -59,36 +36,58 @@ module: (identifier) @type) ; Function and method definitions - (function_expression name: (identifier) @function) + (function_declaration name: (identifier) @function) + (method_definition - name: [(property_identifier) (private_property_identifier)] @function.method) + name: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + (method_definition - name: (property_identifier) @constructor - (#eq? @constructor "constructor")) + name: (property_identifier) @constructor + (#eq? @constructor "constructor")) (pair - key: [(property_identifier) (private_property_identifier)] @function.method - value: [(function_expression) (arrow_function)]) + key: [ + (property_identifier) + (private_property_identifier) + ] @function.method + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method) - right: [(function_expression) (arrow_function)]) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + right: [ + (function_expression) + (arrow_function) + ]) (variable_declarator name: (identifier) @function - value: [(function_expression) (arrow_function)]) + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (identifier) @function - right: [(function_expression) (arrow_function)]) + right: [ + (function_expression) + (arrow_function) + ]) ; Parameters - (required_parameter (identifier) @variable.parameter) @@ -120,25 +119,26 @@ ; Special identifiers ; +(type_identifier) @type + +(predefined_type) @type.builtin + (class_declaration (type_identifier) @type.class) (extends_clause value: (identifier) @type.class) -(type_identifier) @type -(predefined_type) @type.builtin - ([ (identifier) (shorthand_property_identifier) (shorthand_property_identifier_pattern) - ] @constant - (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) +] @constant + (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) ; Literals - (this) @variable.special + (super) @variable.special [ @@ -163,11 +163,12 @@ (escape_sequence) @string.escape (regex) @string.regex + (regex_flags) @keyword.operator.regex + (number) @number ; Tokens - [ ";" "?." @@ -224,7 +225,8 @@ "..." ] @operator -(regex "/" @string.regex) +(regex + "/" @string.regex) [ "(" @@ -233,14 +235,13 @@ "]" "{" "}" -] @punctuation.bracket +] @punctuation.bracket (ternary_expression [ "?" ":" - ] @operator -) + ] @operator) [ "abstract" @@ -310,7 +311,8 @@ "yield" ] @keyword.control -(switch_default "default" @keyword.control) +(switch_default + "default" @keyword.control) (template_substitution "${" @punctuation.special @@ -320,7 +322,8 @@ "<" @punctuation.bracket ">" @punctuation.bracket) -(decorator "@" @punctuation.special) +(decorator + "@" @punctuation.special) ; JSX elements (jsx_opening_element @@ -328,36 +331,61 @@ (identifier) @type (member_expression object: (identifier) @type - property: (property_identifier) @type - ) - ] -) + property: (property_identifier) @type) + ]) + (jsx_closing_element [ (identifier) @type (member_expression object: (identifier) @type - property: (property_identifier) @type - ) - ] -) + property: (property_identifier) @type) + ]) + (jsx_self_closing_element [ (identifier) @type (member_expression object: (identifier) @type - property: (property_identifier) @type - ) - ] -) - -(jsx_opening_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_self_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_attribute (property_identifier) @attribute.jsx) -(jsx_opening_element (["<" ">"]) @punctuation.bracket.jsx) -(jsx_closing_element ([""]) @punctuation.bracket.jsx) -(jsx_self_closing_element (["<" "/>"]) @punctuation.bracket.jsx) -(jsx_attribute "=" @punctuation.delimiter.jsx) + property: (property_identifier) @type) + ]) + +(jsx_opening_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_closing_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_self_closing_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_attribute + (property_identifier) @attribute.jsx) + +(jsx_opening_element + ([ + "<" + ">" + ]) @punctuation.bracket.jsx) + +(jsx_closing_element + ([ + "" + ]) @punctuation.bracket.jsx) + +(jsx_self_closing_element + ([ + "<" + "/>" + ]) @punctuation.bracket.jsx) + +(jsx_attribute + "=" @punctuation.delimiter.jsx) + (jsx_text) @text.jsx + (html_character_reference) @string.special diff --git a/crates/languages/src/javascript/imports.scm b/crates/languages/src/javascript/imports.scm index e26b97aeef9cb62395e7030f3173208d79187bd6..0e688d53fb6ed639c55c1fa84917711d19c3108a 100644 --- a/crates/languages/src/javascript/imports.scm +++ b/crates/languages/src/javascript/imports.scm @@ -1,14 +1,16 @@ (import_statement - import_clause: (import_clause - [ - (identifier) @name - (named_imports - (import_specifier - name: (_) @name - alias: (_)? @alias)) - ]) - source: (string (string_fragment) @source)) @import + import_clause: (import_clause + [ + (identifier) @name + (named_imports + (import_specifier + name: (_) @name + alias: (_)? @alias)) + ]) + source: (string + (string_fragment) @source)) @import (import_statement - !import_clause - source: (string (string_fragment) @source @wildcard)) @import + !import_clause + source: (string + (string_fragment) @source @wildcard)) @import diff --git a/crates/languages/src/javascript/indents.scm b/crates/languages/src/javascript/indents.scm index 9897f3060eaf37891cf4563cebc93345112422f8..1e72160bca2f5fd04ce6d3bc7b02e9ab029eb018 100644 --- a/crates/languages/src/javascript/indents.scm +++ b/crates/languages/src/javascript/indents.scm @@ -1,20 +1,32 @@ [ - (call_expression) - (assignment_expression) - (member_expression) - (lexical_declaration) - (variable_declaration) - (assignment_expression) - (if_statement) - (for_statement) + (call_expression) + (assignment_expression) + (member_expression) + (lexical_declaration) + (variable_declaration) + (assignment_expression) + (if_statement) + (for_statement) ] @indent -(_ "[" "]" @end) @indent -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent -(jsx_opening_element ">" @end) @indent +(_ + "<" + ">" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent + +(jsx_opening_element + ">" @end) @indent (jsx_element (jsx_opening_element) @start diff --git a/crates/languages/src/javascript/injections.scm b/crates/languages/src/javascript/injections.scm index 244e025a6f5d62f1d3500fc35fc480b1baa2471e..8ccfc5028dea453013134c52db885d51ab2f673b 100644 --- a/crates/languages/src/javascript/injections.scm +++ b/crates/languages/src/javascript/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) (((comment) @_jsdoc_comment (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content @@ -10,119 +9,136 @@ (#set! injection.language "regex")) (call_expression - function: (identifier) @_name (#eq? @_name "css") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + function: (identifier) @_name + (#eq? @_name "css") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression function: (member_expression - object: (identifier) @_obj (#eq? @_obj "styled") + object: (identifier) @_obj + (#eq? @_obj "styled") property: (property_identifier)) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression function: (call_expression - function: (identifier) @_name (#eq? @_name "styled")) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + function: (identifier) @_name + (#eq? @_name "styled")) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression - function: (identifier) @_name (#eq? @_name "html") + function: (identifier) @_name + (#eq? @_name "html") arguments: (template_string) @injection.content - (#set! injection.language "html") -) + (#set! injection.language "html")) (call_expression - function: (identifier) @_name (#eq? @_name "js") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "javascript")) -) + function: (identifier) @_name + (#eq? @_name "js") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "javascript"))) (call_expression - function: (identifier) @_name (#eq? @_name "json") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "json")) -) + function: (identifier) @_name + (#eq? @_name "json") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "json"))) (call_expression - function: (identifier) @_name (#eq? @_name "sql") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "sql")) -) + function: (identifier) @_name + (#eq? @_name "sql") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "sql"))) (call_expression - function: (identifier) @_name (#eq? @_name "ts") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "typescript")) -) + function: (identifier) @_name + (#eq? @_name "ts") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "typescript"))) (call_expression - function: (identifier) @_name (#match? @_name "^ya?ml$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "yaml")) -) + function: (identifier) @_name + (#match? @_name "^ya?ml$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "yaml"))) (call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "graphql")) -) + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql"))) (call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "graphql"))) -) + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql")))) (call_expression - function: (identifier) @_name(#match? @_name "^iso$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "isograph"))) -) + function: (identifier) @_name + (#match? @_name "^iso$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "isograph")))) ; Parse the contents of strings and tagged template ; literals with leading ECMAScript comments: ; '/* html */' or '/*html*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/") - (#set! injection.language "html") -) + (#set! injection.language "html")) ; '/* sql */' or '/*sql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/") - (#set! injection.language "sql") -) + (#set! injection.language "sql")) ; '/* gql */' or '/*gql*/' ; '/* graphql */' or '/*graphql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/") - (#set! injection.language "graphql") -) + (#set! injection.language "graphql")) ; '/* css */' or '/*css*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/") - (#set! injection.language "css") -) + (#set! injection.language "css")) diff --git a/crates/languages/src/javascript/outline.scm b/crates/languages/src/javascript/outline.scm index 5f72103bc63bdfab73f7b858c01abe8d34317b22..7b8e4b2d46c9b88e6b719ceea5bb64eeb19af518 100644 --- a/crates/languages/src/javascript/outline.scm +++ b/crates/languages/src/javascript/outline.scm @@ -1,223 +1,269 @@ (internal_module - "namespace" @context - name: (_) @name) @item + "namespace" @context + name: (_) @name) @item (enum_declaration - "enum" @context - name: (_) @name) @item + "enum" @context + name: (_) @name) @item (function_declaration - "async"? @context - "function" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item + "async"? @context + "function" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item (generator_function_declaration - "async"? @context - "function" @context - "*" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item + "async"? @context + "function" @context + "*" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item (interface_declaration - "interface" @context - name: (_) @name) @item + "interface" @context + name: (_) @name) @item (program - (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item))) + (export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item))) ; Exported array destructuring (program - (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ]))))) + (export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ]))))) ; Exported object destructuring (program - (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)]))))) + (export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ]))))) (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Top-level array destructuring (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Top-level object destructuring (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (class_declaration - "class" @context - name: (_) @name) @item + "class" @context + name: (_) @name) @item ; Method definitions in classes (not in object literals) (class_body - (method_definition - [ - "get" - "set" - "async" - "*" - "readonly" - "static" - (override_modifier) - (accessibility_modifier) - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item) + (method_definition + [ + "get" + "set" + "async" + "*" + "readonly" + "static" + (override_modifier) + (accessibility_modifier) + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item) ; Object literal methods (variable_declarator - value: (object - (method_definition - [ - "get" - "set" - "async" - "*" - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item)) + value: (object + (method_definition + [ + "get" + "set" + "async" + "*" + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item)) (public_field_definition - [ - "declare" - "readonly" - "abstract" - "static" - (accessibility_modifier) - ]* @context - name: (_) @name) @item + [ + "declare" + "readonly" + "abstract" + "static" + (accessibility_modifier) + ]* @context + name: (_) @name) @item ; Add support for (node:test, bun:test and Jest) runnable -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] @context - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] @context + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item ; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#eq? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#eq? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item ; Object properties (pair - key: [ - (property_identifier) @name - (string (string_fragment) @name) - (number) @name - (computed_property_name) @name - ]) @item + key: [ + (property_identifier) @name + (string + (string_fragment) @name) + (number) @name + (computed_property_name) @name + ]) @item ; Nested variables in function bodies (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Nested array destructuring in functions (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Nested object destructuring in functions (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern value: (identifier) @name @item) - (pair_pattern value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (comment) @annotation diff --git a/crates/languages/src/javascript/overrides.scm b/crates/languages/src/javascript/overrides.scm index 6dbbc88ef924c2cac65aaf9ff7e7dba87b99a359..4707e2a89fdd246de8d0152d6284e188caaf539e 100644 --- a/crates/languages/src/javascript/overrides.scm +++ b/crates/languages/src/javascript/overrides.scm @@ -2,7 +2,8 @@ (string) @string -(template_string (string_fragment) @string) +(template_string + (string_fragment) @string) (jsx_element) @element diff --git a/crates/languages/src/javascript/runnables.scm b/crates/languages/src/javascript/runnables.scm index c64aacb50e286b1aeeb1231f2745d4d5923a7c1d..b410fb4d8cadd879f657f20a4685cf3bf834ad86 100644 --- a/crates/languages/src/javascript/runnables.scm +++ b/crates/languages/src/javascript/runnables.scm @@ -1,46 +1,42 @@ ; Add support for (node:test, bun:test and Jest) runnable ; Function expression that has `it`, `test` or `describe` as the function name -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) ; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#eq? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#eq? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) diff --git a/crates/languages/src/javascript/textobjects.scm b/crates/languages/src/javascript/textobjects.scm index eace658e6b9847bcc651deedad2bc27cbfbf6975..f1cc9c9491e20320d193de5dec2a9c438cee5dcc 100644 --- a/crates/languages/src/javascript/textobjects.scm +++ b/crates/languages/src/javascript/textobjects.scm @@ -1,85 +1,91 @@ (comment)+ @comment.around (function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (method_definition - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (function_expression - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around ((arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")) @function.around - (#not-has-parent? @function.around variable_declarator)) + body: (statement_block + "{" + (_)* @function.inside + "}")) @function.around + (#not-has-parent? @function.around variable_declarator)) ; Arrow function in variable declaration - capture the full declaration ([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) ]) @function.around ; Arrow function in variable declaration (captures body for expression-bodied arrows) ([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) ]) @function.around ; Catch-all for arrow functions in other contexts (callbacks, etc.) ((arrow_function - body: (_) @function.inside) @function.around - (#not-has-parent? @function.around variable_declarator)) + body: (_) @function.inside) @function.around + (#not-has-parent? @function.around variable_declarator)) (generator_function - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (generator_function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (class_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (class - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around diff --git a/crates/languages/src/jsdoc/brackets.scm b/crates/languages/src/jsdoc/brackets.scm index 0e1bf5ca191bf7a319e2bcad6673d5cf0401380c..0f6ce4bf3d4c9c903d092fc669a416e83c44e82d 100644 --- a/crates/languages/src/jsdoc/brackets.scm +++ b/crates/languages/src/jsdoc/brackets.scm @@ -1,2 +1,5 @@ -("[" @open "]" @close) -("{" @open "}" @close) +("[" @open + "]" @close) + +("{" @open + "}" @close) diff --git a/crates/languages/src/jsdoc/highlights.scm b/crates/languages/src/jsdoc/highlights.scm index 581b5d8111fe25443de9951cfdddc8c277ad83ff..4b5657cb2d3fa6651e2e2b7eb495c095c0ae8482 100644 --- a/crates/languages/src/jsdoc/highlights.scm +++ b/crates/languages/src/jsdoc/highlights.scm @@ -1,3 +1,5 @@ (tag_name) @keyword.jsdoc + (type) @type.jsdoc + (identifier) @variable.jsdoc diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index b6c3954cf228d90714a5eb5676d86a204b47b88d..3d8ba972eb17b0fe7f9d5070b73a4fb9e94adef3 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -245,6 +245,7 @@ impl LspAdapter for JsonLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { Ok(Some(json!({ "provideFormatter": true diff --git a/crates/languages/src/json/brackets.scm b/crates/languages/src/json/brackets.scm index cd5cdf328b3a04730d56ec0cb06c3802fe07c978..ac2e2ad37bfc6cd2e72323914f6975c5d3cdb60e 100644 --- a/crates/languages/src/json/brackets.scm +++ b/crates/languages/src/json/brackets.scm @@ -1,3 +1,9 @@ -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/json/config.toml b/crates/languages/src/json/config.toml index fa74f0d8f3eddcbe5086d3e14ab3eba19751235c..fcae481bebbdff01957c55190266af545f346327 100644 --- a/crates/languages/src/json/config.toml +++ b/crates/languages/src/json/config.toml @@ -1,6 +1,6 @@ name = "JSON" grammar = "json" -path_suffixes = ["json", "flake.lock", "geojson"] +path_suffixes = ["json", "flake.lock", "geojson", "prettierrc"] line_comments = ["// "] autoclose_before = ",]}" brackets = [ diff --git a/crates/languages/src/json/highlights.scm b/crates/languages/src/json/highlights.scm index 1098320ccba78c143b43c7608b1d4e41ad5ec20d..f9b1c337358d26f08fe5b77b3a6e1a70b3f5b418 100644 --- a/crates/languages/src/json/highlights.scm +++ b/crates/languages/src/json/highlights.scm @@ -1,6 +1,7 @@ (comment) @comment (string) @string + (escape_sequence) @string.escape (pair diff --git a/crates/languages/src/json/indents.scm b/crates/languages/src/json/indents.scm index b7b2a2e7670f324730a64e15f1f59f37ac126270..63b015c2fe74dda013e201d88ebbfe06107def4a 100644 --- a/crates/languages/src/json/indents.scm +++ b/crates/languages/src/json/indents.scm @@ -1,2 +1,5 @@ -(array "]" @end) @indent -(object "}" @end) @indent +(array + "]" @end) @indent + +(object + "}" @end) @indent diff --git a/crates/languages/src/json/outline.scm b/crates/languages/src/json/outline.scm index 43e2743478b27e4430bf3ddf82e49023d3bad584..c7f988077767819128b6f028fbcf196dcf5a5678 100644 --- a/crates/languages/src/json/outline.scm +++ b/crates/languages/src/json/outline.scm @@ -1,2 +1,3 @@ (pair - key: (string (string_content) @name)) @item + key: (string + (string_content) @name)) @item diff --git a/crates/languages/src/json/redactions.scm b/crates/languages/src/json/redactions.scm index 7359637244ac5892c0d57b41e2ef11652a3d0890..c220d0c18b79e007a6de511099254c59214ace74 100644 --- a/crates/languages/src/json/redactions.scm +++ b/crates/languages/src/json/redactions.scm @@ -1,4 +1,11 @@ -(pair value: (number) @redact) -(pair value: (string) @redact) -(array (number) @redact) -(array (string) @redact) +(pair + value: (number) @redact) + +(pair + value: (string) @redact) + +(array + (number) @redact) + +(array + (string) @redact) diff --git a/crates/languages/src/json/runnables.scm b/crates/languages/src/json/runnables.scm index 2396f05a5722f422f46fda8bb09e8b4e25bdb794..a0d95d89b577bf3f5a22b3ff6cedcd7945b4881b 100644 --- a/crates/languages/src/json/runnables.scm +++ b/crates/languages/src/json/runnables.scm @@ -1,21 +1,13 @@ ; Add support `package.json` and `composer.json` script runnable - -( - (document - (object - (pair - key: (string - (string_content) @_name - (#eq? @_name "scripts") - ) - value: (object - (pair - key: (string (string_content) @run @script) - ) - ) - ) - ) - ) - (#set! tag package-script) - (#set! tag composer-script) -) +((document + (object + (pair + key: (string + (string_content) @_name + (#eq? @_name "scripts")) + value: (object + (pair + key: (string + (string_content) @run @script)))))) + (#set! tag package-script) + (#set! tag composer-script)) diff --git a/crates/languages/src/jsonc/brackets.scm b/crates/languages/src/jsonc/brackets.scm index cd5cdf328b3a04730d56ec0cb06c3802fe07c978..ac2e2ad37bfc6cd2e72323914f6975c5d3cdb60e 100644 --- a/crates/languages/src/jsonc/brackets.scm +++ b/crates/languages/src/jsonc/brackets.scm @@ -1,3 +1,9 @@ -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/jsonc/highlights.scm b/crates/languages/src/jsonc/highlights.scm index 1098320ccba78c143b43c7608b1d4e41ad5ec20d..f9b1c337358d26f08fe5b77b3a6e1a70b3f5b418 100644 --- a/crates/languages/src/jsonc/highlights.scm +++ b/crates/languages/src/jsonc/highlights.scm @@ -1,6 +1,7 @@ (comment) @comment (string) @string + (escape_sequence) @string.escape (pair diff --git a/crates/languages/src/jsonc/indents.scm b/crates/languages/src/jsonc/indents.scm index b7b2a2e7670f324730a64e15f1f59f37ac126270..63b015c2fe74dda013e201d88ebbfe06107def4a 100644 --- a/crates/languages/src/jsonc/indents.scm +++ b/crates/languages/src/jsonc/indents.scm @@ -1,2 +1,5 @@ -(array "]" @end) @indent -(object "}" @end) @indent +(array + "]" @end) @indent + +(object + "}" @end) @indent diff --git a/crates/languages/src/jsonc/injections.scm b/crates/languages/src/jsonc/injections.scm index 01e833d1e31d480b66a558bdfb8f07b2f0cdbc46..2f0e58eb6431515b86b6042e5828263341513e99 100644 --- a/crates/languages/src/jsonc/injections.scm +++ b/crates/languages/src/jsonc/injections.scm @@ -1,2 +1,2 @@ ((comment) @injection.content - (#set! injection.language "comment")) + (#set! injection.language "comment")) diff --git a/crates/languages/src/jsonc/outline.scm b/crates/languages/src/jsonc/outline.scm index 43e2743478b27e4430bf3ddf82e49023d3bad584..c7f988077767819128b6f028fbcf196dcf5a5678 100644 --- a/crates/languages/src/jsonc/outline.scm +++ b/crates/languages/src/jsonc/outline.scm @@ -1,2 +1,3 @@ (pair - key: (string (string_content) @name)) @item + key: (string + (string_content) @name)) @item diff --git a/crates/languages/src/jsonc/overrides.scm b/crates/languages/src/jsonc/overrides.scm index 81fec9a5f57b28fc67b4781ec37df43559e21dc9..544e9876f8ea8f1d676ee21731fdcb30fc7163ec 100644 --- a/crates/languages/src/jsonc/overrides.scm +++ b/crates/languages/src/jsonc/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string) @string diff --git a/crates/languages/src/jsonc/redactions.scm b/crates/languages/src/jsonc/redactions.scm index 7359637244ac5892c0d57b41e2ef11652a3d0890..c220d0c18b79e007a6de511099254c59214ace74 100644 --- a/crates/languages/src/jsonc/redactions.scm +++ b/crates/languages/src/jsonc/redactions.scm @@ -1,4 +1,11 @@ -(pair value: (number) @redact) -(pair value: (string) @redact) -(array (number) @redact) -(array (string) @redact) +(pair + value: (number) @redact) + +(pair + value: (string) @redact) + +(array + (number) @redact) + +(array + (string) @redact) diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index c5e64aed639374655b0e60bde5dbd0b3da5468c3..275b8c58ecde831c8f89ae688dc236583b135c07 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -141,6 +141,7 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime name: "go", adapters: vec![go_lsp_adapter.clone()], context: Some(go_context_provider.clone()), + semantic_token_rules: Some(go::semantic_token_rules()), ..Default::default() }, LanguageInfo { @@ -179,7 +180,13 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime }, LanguageInfo { name: "python", - adapters: vec![basedpyright_lsp_adapter, ruff_lsp_adapter], + adapters: vec![ + basedpyright_lsp_adapter, + ruff_lsp_adapter, + ty_lsp_adapter, + py_lsp_adapter, + python_lsp_adapter, + ], context: Some(python_context_provider), toolchain: Some(python_toolchain_provider), manifest_name: Some(SharedString::new_static("pyproject.toml").into()), @@ -281,9 +288,6 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime typescript_lsp_adapter, ); - languages.register_available_lsp_adapter(python_lsp_adapter.name(), python_lsp_adapter); - languages.register_available_lsp_adapter(py_lsp_adapter.name(), py_lsp_adapter); - languages.register_available_lsp_adapter(ty_lsp_adapter.name(), ty_lsp_adapter); // Register Tailwind for the existing languages that should have it by default. // // This can be driven by the `language_servers` setting once we have a way for @@ -368,8 +372,8 @@ fn register_language( ) { let config = load_config(name); if let Some(rules) = &semantic_token_rules { - SettingsStore::update_global(cx, |store, _| { - store.set_language_semantic_token_rules(config.name.0.clone(), rules.clone()); + SettingsStore::update_global(cx, |store, cx| { + store.set_language_semantic_token_rules(config.name.0.clone(), rules.clone(), cx); }); } for adapter in adapters { diff --git a/crates/languages/src/markdown-inline/highlights.scm b/crates/languages/src/markdown-inline/highlights.scm index 3c9f6fbcc340bd085466055c7b35551dd71b8c53..26c066ea0a0f6cc93073f6d525d44f2a6456fd49 100644 --- a/crates/languages/src/markdown-inline/highlights.scm +++ b/crates/languages/src/markdown-inline/highlights.scm @@ -1,6 +1,9 @@ (emphasis) @emphasis.markup + (strong_emphasis) @emphasis.strong.markup + (code_span) @text.literal.markup + (strikethrough) @strikethrough.markup [ @@ -13,8 +16,18 @@ (link_label) ] @link_text.markup -(inline_link ["(" ")"] @link_uri.markup) -(image ["(" ")"] @link_uri.markup) +(inline_link + [ + "(" + ")" + ] @link_uri.markup) + +(image + [ + "(" + ")" + ] @link_uri.markup) + [ (link_destination) (uri_autolink) diff --git a/crates/languages/src/markdown/brackets.scm b/crates/languages/src/markdown/brackets.scm index 172a2e7f723e3a170d80d19fa2f78fa334258105..5aaf93f63da3502c41b43027ee615592521c94ae 100644 --- a/crates/languages/src/markdown/brackets.scm +++ b/crates/languages/src/markdown/brackets.scm @@ -1,7 +1,24 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) -(((fenced_code_block_delimiter) @open (fenced_code_block_delimiter) @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) + +(((fenced_code_block_delimiter) @open + (fenced_code_block_delimiter) @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/markdown/config.toml b/crates/languages/src/markdown/config.toml index 10b1e49757edc106c76e0dc7c591098ebdc6723f..5e7acd230b6f191aebff609bbc1087fbff8d3909 100644 --- a/crates/languages/src/markdown/config.toml +++ b/crates/languages/src/markdown/config.toml @@ -1,6 +1,6 @@ name = "Markdown" grammar = "markdown" -path_suffixes = ["md", "mdx", "mdwn", "markdown", "MD"] +path_suffixes = ["md", "mdx", "mdwn", "mdc", "markdown", "MD"] completion_query_characters = ["-"] block_comment = { start = "", tab_size = 0 } autoclose_before = ";:.,=}])>" diff --git a/crates/languages/src/markdown/highlights.scm b/crates/languages/src/markdown/highlights.scm index 707bcc0816366f5cc875c9f1197b42a2363cab99..1a471a848dfe0c9457ab23ba9dbf3fd9e8438f7d 100644 --- a/crates/languages/src/markdown/highlights.scm +++ b/crates/languages/src/markdown/highlights.scm @@ -9,7 +9,9 @@ (setext_heading) (thematic_break) ] @title.markup -(setext_heading (paragraph) @title.markup) + +(setext_heading + (paragraph) @title.markup) [ (list_marker_plus) @@ -20,10 +22,18 @@ ] @punctuation.list_marker.markup (block_quote_marker) @punctuation.markup -(pipe_table_header "|" @punctuation.markup) -(pipe_table_row "|" @punctuation.markup) -(pipe_table_delimiter_row "|" @punctuation.markup) -(pipe_table_delimiter_cell "-" @punctuation.markup) + +(pipe_table_header + "|" @punctuation.markup) + +(pipe_table_row + "|" @punctuation.markup) + +(pipe_table_delimiter_row + "|" @punctuation.markup) + +(pipe_table_delimiter_cell + "-" @punctuation.markup) [ (fenced_code_block_delimiter) @@ -31,4 +41,5 @@ ] @punctuation.embedded.markup (link_reference_definition) @link_text.markup + (link_destination) @link_uri.markup diff --git a/crates/languages/src/markdown/indents.scm b/crates/languages/src/markdown/indents.scm index dc6dfa6118309c264e146a5af167327947fc6946..742100e3238b6dc7d456307762b2089bb780ac33 100644 --- a/crates/languages/src/markdown/indents.scm +++ b/crates/languages/src/markdown/indents.scm @@ -1,3 +1,4 @@ -(list (list_item) @indent) +(list + (list_item) @indent) (list_item) @start.list_item diff --git a/crates/languages/src/markdown/injections.scm b/crates/languages/src/markdown/injections.scm index f2b959dfdae9d5b0c11146c2f2e5509005a2fe5e..46717b28a97a2019f3bcd6b01815debccb3c3e30 100644 --- a/crates/languages/src/markdown/injections.scm +++ b/crates/languages/src/markdown/injections.scm @@ -4,11 +4,13 @@ (code_fence_content) @injection.content) ((inline) @injection.content - (#set! injection.language "markdown-inline")) + (#set! injection.language "markdown-inline")) ((html_block) @injection.content (#set! injection.language "html")) -((minus_metadata) @injection.content (#set! injection.language "yaml")) +((minus_metadata) @injection.content + (#set! injection.language "yaml")) -((plus_metadata) @injection.content (#set! injection.language "toml")) +((plus_metadata) @injection.content + (#set! injection.language "toml")) diff --git a/crates/languages/src/markdown/outline.scm b/crates/languages/src/markdown/outline.scm index dcca3db4d4cb920a7d9f939dc99197d139c3c2e6..a4d8c586dd991f4ada1b7cffa1b2319eb79a7973 100644 --- a/crates/languages/src/markdown/outline.scm +++ b/crates/languages/src/markdown/outline.scm @@ -1,3 +1,6 @@ (section - (atx_heading - . (_) @context . (_) @name)) @item + (atx_heading + . + (_) @context + . + (_) @name)) @item diff --git a/crates/languages/src/markdown/textobjects.scm b/crates/languages/src/markdown/textobjects.scm index e0f76c5365155687d6d53d38f222513b480a3aa7..c84914b2409dd53c27e22c33d8ca9771b699f48d 100644 --- a/crates/languages/src/markdown/textobjects.scm +++ b/crates/languages/src/markdown/textobjects.scm @@ -1,3 +1,3 @@ (section - (atx_heading) - (_)* @class.inside) @class.around + (atx_heading) + (_)* @class.inside) @class.around diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index b1192464e9601183ac5d91196bfbe529feaa693f..722f4bb795ea857a9d399ef5b291beb8503f1c92 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -112,6 +112,8 @@ impl FromStr for TestRunner { /// Decided to ignore Pyright's sortText() completely and to manually sort all entries fn process_pyright_completions(items: &mut [lsp::CompletionItem]) { for item in items { + let is_named_argument = item.label.ends_with('='); + let is_dunder = item.label.starts_with("__") && item.label.ends_with("__"); let visibility_priority = if is_dunder { @@ -124,23 +126,35 @@ fn process_pyright_completions(items: &mut [lsp::CompletionItem]) { '0' // public }; + let is_external = item + .detail + .as_ref() + .is_some_and(|detail| detail == "Auto-import"); + + let source_priority = if is_external { '1' } else { '0' }; + // Kind priority within same visibility level let kind_priority = match item.kind { - Some(lsp::CompletionItemKind::ENUM_MEMBER) => '0', - Some(lsp::CompletionItemKind::FIELD) => '1', - Some(lsp::CompletionItemKind::PROPERTY) => '2', - Some(lsp::CompletionItemKind::VARIABLE) => '3', - Some(lsp::CompletionItemKind::CONSTANT) => '4', - Some(lsp::CompletionItemKind::METHOD) => '5', - Some(lsp::CompletionItemKind::FUNCTION) => '5', - Some(lsp::CompletionItemKind::CLASS) => '6', - Some(lsp::CompletionItemKind::MODULE) => '7', - _ => '8', + Some(lsp::CompletionItemKind::KEYWORD) => '0', + Some(lsp::CompletionItemKind::ENUM_MEMBER) => '1', + Some(lsp::CompletionItemKind::FIELD) => '2', + Some(lsp::CompletionItemKind::PROPERTY) => '3', + Some(lsp::CompletionItemKind::VARIABLE) => '4', + Some(lsp::CompletionItemKind::CONSTANT) => '5', + Some(lsp::CompletionItemKind::METHOD) => '6', + Some(lsp::CompletionItemKind::FUNCTION) => '6', + Some(lsp::CompletionItemKind::CLASS) => '7', + Some(lsp::CompletionItemKind::MODULE) => '8', + + _ => 'z', }; + // Named arguments get higher priority + let argument_priority = if is_named_argument { '0' } else { '1' }; + item.sort_text = Some(format!( - "{}{}{}", - visibility_priority, kind_priority, item.label + "{}{}{}{}{}", + argument_priority, source_priority, visibility_priority, kind_priority, item.label )); } } @@ -493,6 +507,7 @@ impl LspAdapter for PyrightLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { // Provide minimal initialization options // Virtual environment configuration will be handled through workspace configuration @@ -1689,7 +1704,14 @@ impl LspAdapter for PyLspAdapter { Self::SERVER_NAME } - async fn process_completions(&self, _items: &mut [lsp::CompletionItem]) {} + async fn process_completions(&self, items: &mut [lsp::CompletionItem]) { + for item in items { + let is_named_argument = item.label.ends_with('='); + let priority = if is_named_argument { '0' } else { '1' }; + let sort_text = item.sort_text.take().unwrap_or_else(|| item.label.clone()); + item.sort_text = Some(format!("{}{}", priority, sort_text)); + } + } async fn label_for_completion( &self, @@ -1951,6 +1973,7 @@ impl LspAdapter for BasedPyrightLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { // Provide minimal initialization options // Virtual environment configuration will be handled through workspace configuration diff --git a/crates/languages/src/python/brackets.scm b/crates/languages/src/python/brackets.scm index 9e5b59788fc88fcb0830325417de50a9414828b8..5abcf6bdd43624f625e3c08444701fa67311c00f 100644 --- a/crates/languages/src/python/brackets.scm +++ b/crates/languages/src/python/brackets.scm @@ -1,4 +1,12 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(((string_start) @open (string_end) @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(((string_start) @open + (string_end) @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/python/debugger.scm b/crates/languages/src/python/debugger.scm index 807d6e865d2f60637f60b397ccc1a61fe3360fa1..8c241f8cae0e4c1e2ea39311dd86fda2ba6978dc 100644 --- a/crates/languages/src/python/debugger.scm +++ b/crates/languages/src/python/debugger.scm @@ -1,43 +1,97 @@ (identifier) @debug-variable (#eq? @debug-variable "self") -(assignment left: (identifier) @debug-variable) -(assignment left: (pattern_list (identifier) @debug-variable)) -(assignment left: (tuple_pattern (identifier) @debug-variable)) +(assignment + left: (identifier) @debug-variable) -(augmented_assignment left: (identifier) @debug-variable) +(assignment + left: (pattern_list + (identifier) @debug-variable)) -(for_statement left: (identifier) @debug-variable) -(for_statement left: (pattern_list (identifier) @debug-variable)) -(for_statement left: (tuple_pattern (identifier) @debug-variable)) +(assignment + left: (tuple_pattern + (identifier) @debug-variable)) -(for_in_clause left: (identifier) @debug-variable) -(for_in_clause left: (pattern_list (identifier) @debug-variable)) -(for_in_clause left: (tuple_pattern (identifier) @debug-variable)) +(augmented_assignment + left: (identifier) @debug-variable) -(as_pattern (identifier) @debug-variable) +(for_statement + left: (identifier) @debug-variable) -(binary_operator left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(binary_operator right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(comparison_operator (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_statement + left: (pattern_list + (identifier) @debug-variable)) -(list (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(tuple (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(set (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_statement + left: (tuple_pattern + (identifier) @debug-variable)) -(subscript value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_in_clause + left: (identifier) @debug-variable) -(attribute object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_in_clause + left: (pattern_list + (identifier) @debug-variable)) -(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_in_clause + left: (tuple_pattern + (identifier) @debug-variable)) -(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(as_pattern + (identifier) @debug-variable) -(argument_list (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_operator + left: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(if_statement condition: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_operator + right: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(while_statement condition: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(comparison_operator + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(list + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(tuple + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(set + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(subscript + value: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(attribute + object: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(return_statement + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(parenthesized_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(argument_list + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(if_statement + condition: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(while_statement + condition: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) (block) @debug-scope + (module) @debug-scope diff --git a/crates/languages/src/python/highlights.scm b/crates/languages/src/python/highlights.scm index f15b3a0e2b03d9c913627b319aff9bca6bb8708e..87283aaa799a15ea188f3427b4277e9eaba517c1 100644 --- a/crates/languages/src/python/highlights.scm +++ b/crates/languages/src/python/highlights.scm @@ -1,6 +1,8 @@ ; Identifier naming conventions; these "soft conventions" should stay at the top of the file as they're often overridden (identifier) @variable -(attribute attribute: (identifier) @property) + +(attribute + attribute: (identifier) @property) ; CamelCase for classes ((identifier) @type.class @@ -10,45 +12,56 @@ ((identifier) @constant (#match? @constant "^_*[A-Z][A-Z0-9_]*$")) -(type (identifier) @type) -(generic_type (identifier) @type) +(type + (identifier) @type) + +(generic_type + (identifier) @type) + (comment) @comment + (string) @string + (escape_sequence) @string.escape ; Type alias -(type_alias_statement "type" @keyword) +(type_alias_statement + "type" @keyword) ; TypeVar with constraints in type parameters (type - (tuple (identifier) @type) -) + (tuple + (identifier) @type)) ; Forward references (type - (string) @type -) - + (string) @type) ; Function calls - (call - function: (attribute attribute: (identifier) @function.method.call)) + function: (attribute + attribute: (identifier) @function.method.call)) + (call function: (identifier) @function.call) -(decorator "@" @punctuation.special) +(decorator + "@" @punctuation.special) + (decorator "@" @punctuation.special [ (identifier) @function.decorator - (attribute attribute: (identifier) @function.decorator) - (call function: (identifier) @function.decorator.call) - (call (attribute attribute: (identifier) @function.decorator.call)) + (attribute + attribute: (identifier) @function.decorator) + (call + function: (identifier) @function.decorator.call) + (call + (attribute + attribute: (identifier) @function.decorator.call)) ]) ; Function and class definitions - (function_definition name: (identifier) @function.definition) @@ -69,15 +82,15 @@ ; Function arguments (function_definition parameters: (parameters - [ - (identifier) @variable.parameter; Simple parameters + [ + (identifier) @variable.parameter ; Simple parameters (typed_parameter (identifier) @variable.parameter) ; Typed parameters (default_parameter name: (identifier) @variable.parameter) ; Default parameters (typed_default_parameter name: (identifier) @variable.parameter) ; Typed default parameters - ])) + ])) ; Keyword arguments (call @@ -86,28 +99,30 @@ name: (identifier) @function.kwargs))) ; Class definitions and calling: needs to come after the regex matching above - (class_definition name: (identifier) @type.class.definition) (class_definition superclasses: (argument_list - (identifier) @type.class.inheritance)) + (identifier) @type.class.inheritance)) (call function: (identifier) @type.class.call (#match? @type.class.call "^_*[A-Z][A-Za-z0-9_]*$")) ; Builtins - ((call function: (identifier) @function.builtin) - (#any-of? - @function.builtin - "abs" "all" "any" "ascii" "bin" "bool" "breakpoint" "bytearray" "bytes" "callable" "chr" "classmethod" "compile" "complex" "delattr" "dict" "dir" "divmod" "enumerate" "eval" "exec" "filter" "float" "format" "frozenset" "getattr" "globals" "hasattr" "hash" "help" "hex" "id" "input" "int" "isinstance" "issubclass" "iter" "len" "list" "locals" "map" "max" "memoryview" "min" "next" "object" "oct" "open" "ord" "pow" "print" "property" "range" "repr" "reversed" "round" "set" "setattr" "slice" "sorted" "staticmethod" "str" "sum" "super" "tuple" "type" "vars" "zip" "__import__")) + (#any-of? @function.builtin + "abs" "all" "any" "ascii" "bin" "bool" "breakpoint" "bytearray" "bytes" "callable" "chr" + "classmethod" "compile" "complex" "delattr" "dict" "dir" "divmod" "enumerate" "eval" "exec" + "filter" "float" "format" "frozenset" "getattr" "globals" "hasattr" "hash" "help" "hex" "id" + "input" "int" "isinstance" "issubclass" "iter" "len" "list" "locals" "map" "max" "memoryview" + "min" "next" "object" "oct" "open" "ord" "pow" "print" "property" "range" "repr" "reversed" + "round" "set" "setattr" "slice" "sorted" "staticmethod" "str" "sum" "super" "tuple" "type" + "vars" "zip" "__import__")) ; Literals - [ (true) (false) @@ -124,10 +139,11 @@ ] @number ; Self references - [ - (parameters (identifier) @variable.special) - (attribute (identifier) @variable.special) + (parameters + (identifier) @variable.special) + (attribute + (identifier) @variable.special) (#any-of? @variable.special "self" "cls") ] @@ -152,37 +168,57 @@ ; Docstrings. ([ - (expression_statement (assignment)) + (expression_statement + (assignment)) (type_alias_statement) ] -. (expression_statement (string) @string.doc)+) + . + (expression_statement + (string) @string.doc)+) (module - .(expression_statement (string) @string.doc)+) + . + (expression_statement + (string) @string.doc)+) (class_definition - body: (block .(expression_statement (string) @string.doc)+)) + body: (block + . + (expression_statement + (string) @string.doc)+)) (function_definition "async"? "def" name: (_) (parameters)? - body: (block .(expression_statement (string) @string.doc)+)) + body: (block + . + (expression_statement + (string) @string.doc)+)) (class_definition body: (block - . (comment) @comment* - . (expression_statement (string) @string.doc)+)) + . + (comment) @comment* + . + (expression_statement + (string) @string.doc)+)) (module - . (comment) @comment* - . (expression_statement (string) @string.doc)+) + . + (comment) @comment* + . + (expression_statement + (string) @string.doc)+) (class_definition body: (block - (expression_statement (assignment)) - . (expression_statement (string) @string.doc)+)) + (expression_statement + (assignment)) + . + (expression_statement + (string) @string.doc)+)) (class_definition body: (block @@ -190,9 +226,11 @@ name: (identifier) @function.method.constructor (#eq? @function.method.constructor "__init__") body: (block - (expression_statement (assignment)) - . (expression_statement (string) @string.doc)+)))) - + (expression_statement + (assignment)) + . + (expression_statement + (string) @string.doc)+)))) [ "-" @@ -286,18 +324,23 @@ "lambda" ] @keyword.definition -(decorator (identifier) @attribute.builtin +(decorator + (identifier) @attribute.builtin (#any-of? @attribute.builtin "classmethod" "staticmethod" "property")) ; Builtin types as identifiers [ (call function: (identifier) @type.builtin) - (type (identifier) @type.builtin) - (generic_type (identifier) @type.builtin) + (type + (identifier) @type.builtin) + (generic_type + (identifier) @type.builtin) ; also check if type binary operator left identifier for union types (type (binary_operator left: (identifier) @type.builtin)) - (#any-of? @type.builtin "bool" "bytearray" "bytes" "complex" "dict" "float" "frozenset" "int" "list" "memoryview" "object" "range" "set" "slice" "str" "tuple") + (#any-of? @type.builtin + "bool" "bytearray" "bytes" "complex" "dict" "float" "frozenset" "int" "list" "memoryview" + "object" "range" "set" "slice" "str" "tuple") ] diff --git a/crates/languages/src/python/imports.scm b/crates/languages/src/python/imports.scm index 7a1e2b225b9e310098f316c29fe6b1a27634bf12..26538fee1b41df13f258c8b315cc5e266458efa1 100644 --- a/crates/languages/src/python/imports.scm +++ b/crates/languages/src/python/imports.scm @@ -1,32 +1,38 @@ (import_statement - name: [ - (dotted_name - ((identifier) @namespace ".")* - (identifier) @namespace .) - (aliased_import - name: (dotted_name - ((identifier) @namespace ".")* - (identifier) @namespace .)) - ]) @wildcard @import + name: [ + (dotted_name + ((identifier) @namespace + ".")* + (identifier) @namespace .) + (aliased_import + name: (dotted_name + ((identifier) @namespace + ".")* + (identifier) @namespace .)) + ]) @wildcard @import (import_from_statement - module_name: [ - (dotted_name - ((identifier) @namespace ".")* - (identifier) @namespace .) - (relative_import - (dotted_name - ((identifier) @namespace ".")* - (identifier) @namespace .)?) - ] - (wildcard_import)? @wildcard - name: [ - (dotted_name - ((identifier) @namespace ".")* - (identifier) @name .) - (aliased_import - name: (dotted_name - ((identifier) @namespace ".")* - (identifier) @name .) - alias: (identifier) @alias) - ]?) @import + module_name: [ + (dotted_name + ((identifier) @namespace + ".")* + (identifier) @namespace .) + (relative_import + (dotted_name + ((identifier) @namespace + ".")* + (identifier) @namespace .)?) + ] + (wildcard_import)? @wildcard + name: [ + (dotted_name + ((identifier) @namespace + ".")* + (identifier) @name .) + (aliased_import + name: (dotted_name + ((identifier) @namespace + ".")* + (identifier) @name .) + alias: (identifier) @alias) + ]?) @import diff --git a/crates/languages/src/python/indents.scm b/crates/languages/src/python/indents.scm index 3d4c1cc9c4260d4e925cc373662ae5ca3b82e124..9361aa7158725b22e40040e7d730d2693c688c97 100644 --- a/crates/languages/src/python/indents.scm +++ b/crates/languages/src/python/indents.scm @@ -1,17 +1,37 @@ -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent (function_definition) @start.def + (class_definition) @start.class + (if_statement) @start.if + (for_statement) @start.for + (while_statement) @start.while + (with_statement) @start.with + (match_statement) @start.match + (try_statement) @start.try + (elif_clause) @start.elif + (else_clause) @start.else + (except_clause) @start.except + (finally_clause) @start.finally + (case_clause) @start.case diff --git a/crates/languages/src/python/injections.scm b/crates/languages/src/python/injections.scm index d8470140e999f3dc649c0a498987cfae7df6bf59..bc47469dc870c4dec13f4c30fafc8a2fb29749fd 100644 --- a/crates/languages/src/python/injections.scm +++ b/crates/languages/src/python/injections.scm @@ -1,34 +1,26 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) ; SQL ----------------------------------------------------------------------------- -( +([ + ; function calls + (call [ - ; function calls - (call - [ - (attribute attribute: (identifier) @function_name) - (identifier) @function_name - ] - arguments: (argument_list - (comment) @comment - (string - (string_content) @injection.content - ) - )) - - ; string variables - ((comment) @comment - . - (expression_statement - (assignment - right: (string - (string_content) @injection.content - ) - ) - )) + (attribute + attribute: (identifier)) + (identifier) ] - (#match? @comment "^(#|#\\s+)(?i:sql)\\s*$") - (#set! injection.language "sql") -) + arguments: (argument_list + (comment) @_comment + (string + (string_content) @injection.content))) + ; string variables + ((comment) @_comment + . + (expression_statement + (assignment + right: (string + (string_content) @injection.content)))) +] + (#match? @_comment "^(#|#\\s+)(?i:sql)\\s*$") + (#set! injection.language "sql")) diff --git a/crates/languages/src/python/outline.scm b/crates/languages/src/python/outline.scm index 70beb4e67585918ca1f893140ec79ee2428d47d7..c335eef46545fcc0b493e66c780b6ecf839bd791 100644 --- a/crates/languages/src/python/outline.scm +++ b/crates/languages/src/python/outline.scm @@ -1,11 +1,10 @@ (decorator) @annotation (class_definition - "class" @context - name: (identifier) @name - ) @item + "class" @context + name: (identifier) @name) @item (function_definition - "async"? @context - "def" @context - name: (_) @name) @item + "async"? @context + "def" @context + name: (_) @name) @item diff --git a/crates/languages/src/python/overrides.scm b/crates/languages/src/python/overrides.scm index 81fec9a5f57b28fc67b4781ec37df43559e21dc9..544e9876f8ea8f1d676ee21731fdcb30fc7163ec 100644 --- a/crates/languages/src/python/overrides.scm +++ b/crates/languages/src/python/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string) @string diff --git a/crates/languages/src/python/runnables.scm b/crates/languages/src/python/runnables.scm index 1c7e76d73be2357c71ac2f3adf41b14db969e262..3480d4a81017605da2f7cd473595d339f5d522a8 100644 --- a/crates/languages/src/python/runnables.scm +++ b/crates/languages/src/python/runnables.scm @@ -1,151 +1,108 @@ ; subclasses of unittest.TestCase or TestCase -( - (class_definition - name: (identifier) @run @_unittest_class_name - superclasses: (argument_list - [(identifier) @_superclass - (attribute (identifier) @_superclass)] - ) - (#eq? @_superclass "TestCase") - ) @_python-unittest-class - (#set! tag python-unittest-class) - ) +((class_definition + name: (identifier) @run @_unittest_class_name + superclasses: (argument_list + [ + (identifier) @_superclass + (attribute + (identifier) @_superclass) + ]) + (#eq? @_superclass "TestCase")) @_python-unittest-class + (#set! tag python-unittest-class)) ; test methods whose names start with `test` in a TestCase -( - (class_definition - name: (identifier) @_unittest_class_name - superclasses: (argument_list - [(identifier) @_superclass - (attribute (identifier) @_superclass)] - ) - (#eq? @_superclass "TestCase") - body: (block - (function_definition - name: (identifier) @run @_unittest_method_name - (#match? @_unittest_method_name "^test.*") - ) @_python-unittest-method - (#set! tag python-unittest-method) - ) - ) - ) +(class_definition + name: (identifier) @_unittest_class_name + superclasses: (argument_list + [ + (identifier) @_superclass + (attribute + (identifier) @_superclass) + ]) + (#eq? @_superclass "TestCase") + body: (block + (function_definition + name: (identifier) @run @_unittest_method_name + (#match? @_unittest_method_name "^test.*")) @_python-unittest-method + (#set! tag python-unittest-method))) ; pytest functions -( - (module - (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test_") - ) @_python-pytest-method - ) - (#set! tag python-pytest-method) - ) +((module + (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_")) @_python-pytest-method) + (#set! tag python-pytest-method)) ; decorated pytest functions -( - (module - (decorated_definition - (decorator)+ @_decorator - definition: (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test_") - ) - ) @_python-pytest-method - ) - (#set! tag python-pytest-method) - ) - +((module + (decorated_definition + (decorator)+ @_decorator + definition: (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_"))) @_python-pytest-method) + (#set! tag python-pytest-method)) ; pytest classes -( - (module - (class_definition - name: (identifier) @run @_pytest_class_name - (#match? @_pytest_class_name "^Test") - ) - (#set! tag python-pytest-class) - ) - ) - +(module + (class_definition + name: (identifier) @run @_pytest_class_name + (#match? @_pytest_class_name "^Test")) + (#set! tag python-pytest-class)) ; decorated pytest classes -( - (module - (decorated_definition - (decorator)+ @_decorator - definition: (class_definition - name: (identifier) @run @_pytest_class_name - (#match? @_pytest_class_name "^Test") - ) - ) - (#set! tag python-pytest-class) - ) - ) - +(module + (decorated_definition + (decorator)+ @_decorator + definition: (class_definition + name: (identifier) @run @_pytest_class_name + (#match? @_pytest_class_name "^Test"))) + (#set! tag python-pytest-class)) ; pytest class methods -( - (module - (class_definition - name: (identifier) @_pytest_class_name - (#match? @_pytest_class_name "^Test") - body: (block - [(decorated_definition - (decorator)+ @_decorator - definition: (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test_") - ) - ) - (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test") - ) - ] @_python-pytest-method) - (#set! tag python-pytest-method) - ) - ) - ) +(module + (class_definition + name: (identifier) @_pytest_class_name + (#match? @_pytest_class_name "^Test") + body: (block + [ + (decorated_definition + (decorator)+ @_decorator + definition: (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_"))) + (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test")) + ] @_python-pytest-method) + (#set! tag python-pytest-method))) ; decorated pytest class methods -( - (module - (decorated_definition +(module + (decorated_definition + (decorator)+ @_decorator + definition: (class_definition + name: (identifier) @_pytest_class_name + (#match? @_pytest_class_name "^Test") + body: (block + [ + (decorated_definition (decorator)+ @_decorator - definition: (class_definition - name: (identifier) @_pytest_class_name - (#match? @_pytest_class_name "^Test") - body: (block - [(decorated_definition - (decorator)+ @_decorator - definition: (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test_") - ) - ) - (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test") - ) - ] @_python-pytest-method) - (#set! tag python-pytest-method) - ) - ) - ) - ) + definition: (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_"))) + (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test")) + ] @_python-pytest-method) + (#set! tag python-pytest-method)))) ; module main method -( - (module - (if_statement - condition: (comparison_operator - (identifier) @run @_lhs - operators: "==" - (string) @_rhs - ) - (#eq? @_lhs "__name__") - (#match? @_rhs "^[\"']__main__[\"']$") - (#set! tag python-module-main-method) - ) - ) - ) +(module + (if_statement + condition: (comparison_operator + (identifier) @run @_lhs + operators: "==" + (string) @_rhs) + (#eq? @_lhs "__name__") + (#match? @_rhs "^[\"']__main__[\"']$") + (#set! tag python-module-main-method))) diff --git a/crates/languages/src/regex/brackets.scm b/crates/languages/src/regex/brackets.scm index 191fd9c084a52eced37428281971ff9e569a4932..3779d8514bdee9fed0abe1f14b98851754decd8c 100644 --- a/crates/languages/src/regex/brackets.scm +++ b/crates/languages/src/regex/brackets.scm @@ -1,3 +1,8 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) diff --git a/crates/languages/src/regex/highlights.scm b/crates/languages/src/regex/highlights.scm index b5adecf472941154ae84d2acb62fea218859bbea..b0df4b98be08214554dd58a1dcfd1aab0b06586b 100644 --- a/crates/languages/src/regex/highlights.scm +++ b/crates/languages/src/regex/highlights.scm @@ -51,5 +51,6 @@ (character_class [ "^" @operator.regex - (class_range "-" @operator.regex) + (class_range + "-" @operator.regex) ]) diff --git a/crates/languages/src/rust/brackets.scm b/crates/languages/src/rust/brackets.scm index 7a35adb10021c83b8e08e888187ab133c5313ad9..9d78bb11116a0cbff542c721596ec6f8fc92d0cb 100644 --- a/crates/languages/src/rust/brackets.scm +++ b/crates/languages/src/rust/brackets.scm @@ -1,7 +1,23 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -(closure_parameters "|" @open "|" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +(closure_parameters + "|" @open + "|" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/rust/debugger.scm b/crates/languages/src/rust/debugger.scm index 5347413f698083287b9bedd25f4732d24fbbf76e..3c7195796feb41a771ec8071d78bea60efb61fd9 100644 --- a/crates/languages/src/rust/debugger.scm +++ b/crates/languages/src/rust/debugger.scm @@ -1,50 +1,85 @@ (metavariable) @debug-variable -(parameter (identifier) @debug-variable) +(parameter + (identifier) @debug-variable) (self) @debug-variable -(static_item (identifier) @debug-variable) -(const_item (identifier) @debug-variable) +(static_item + (identifier) @debug-variable) -(let_declaration pattern: (identifier) @debug-variable) +(const_item + (identifier) @debug-variable) -(let_condition (identifier) @debug-variable) +(let_declaration + pattern: (identifier) @debug-variable) -(match_arm (identifier) @debug-variable) +(let_condition + (identifier) @debug-variable) -(for_expression (identifier) @debug-variable) +(match_arm + (identifier) @debug-variable) -(closure_parameters (identifier) @debug-variable) +(for_expression + (identifier) @debug-variable) -(assignment_expression (identifier) @debug-variable) +(closure_parameters + (identifier) @debug-variable) -(field_expression (identifier) @debug-variable) +(assignment_expression + (identifier) @debug-variable) -(binary_expression (identifier) @debug-variable +(field_expression + (identifier) @debug-variable) + +(binary_expression + (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(reference_expression (identifier) @debug-variable +(reference_expression + (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(array_expression (identifier) @debug-variable) -(tuple_expression (identifier) @debug-variable) -(return_expression (identifier) @debug-variable) -(await_expression (identifier) @debug-variable) -(try_expression (identifier) @debug-variable) -(index_expression (identifier) @debug-variable) -(range_expression (identifier) @debug-variable) -(unary_expression (identifier) @debug-variable) +(array_expression + (identifier) @debug-variable) + +(tuple_expression + (identifier) @debug-variable) + +(return_expression + (identifier) @debug-variable) + +(await_expression + (identifier) @debug-variable) + +(try_expression + (identifier) @debug-variable) + +(index_expression + (identifier) @debug-variable) + +(range_expression + (identifier) @debug-variable) + +(unary_expression + (identifier) @debug-variable) + +(if_expression + (identifier) @debug-variable) -(if_expression (identifier) @debug-variable) -(while_expression (identifier) @debug-variable) +(while_expression + (identifier) @debug-variable) -(parenthesized_expression (identifier) @debug-variable) +(parenthesized_expression + (identifier) @debug-variable) -(arguments (identifier) @debug-variable +(arguments + (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(macro_invocation (token_tree (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]"))) +(macro_invocation + (token_tree + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]"))) (block) @debug-scope diff --git a/crates/languages/src/rust/highlights.scm b/crates/languages/src/rust/highlights.scm index 82008d701536177cbe7cab8d6fc6c82e0568e944..57e5ed3f704dcd70974b73e0a0d4e31253191048 100644 --- a/crates/languages/src/rust/highlights.scm +++ b/crates/languages/src/rust/highlights.scm @@ -1,17 +1,33 @@ (identifier) @variable + (metavariable) @variable + (type_identifier) @type + (fragment_specifier) @type + (primitive_type) @type.builtin + (self) @variable.special + (field_identifier) @property + (shorthand_field_identifier) @property -(trait_item name: (type_identifier) @type.interface) -(impl_item trait: (type_identifier) @type.interface) -(abstract_type trait: (type_identifier) @type.interface) -(dynamic_type trait: (type_identifier) @type.interface) -(trait_bounds (type_identifier) @type.interface) +(trait_item + name: (type_identifier) @type.interface) + +(impl_item + trait: (type_identifier) @type.interface) + +(abstract_type + trait: (type_identifier) @type.interface) + +(dynamic_type + trait: (type_identifier) @type.interface) + +(trait_bounds + (type_identifier) @type.interface) (call_expression function: [ @@ -31,8 +47,11 @@ field: (field_identifier) @function.method) ]) -(function_item name: (identifier) @function.definition) -(function_signature_item name: (identifier) @function.definition) +(function_item + name: (identifier) @function.definition) + +(function_signature_item + name: (identifier) @function.definition) (macro_invocation macro: [ @@ -48,17 +67,17 @@ name: (identifier) @function.special.definition) ; Identifier conventions - ; Assume uppercase names are types/enum-constructors ((identifier) @type - (#match? @type "^[A-Z]")) + (#match? @type "^[A-Z]")) ; Assume all-caps names are constants ((identifier) @constant - (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) + (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) ; Ensure enum variants are highlighted correctly regardless of naming convention -(enum_variant name: (identifier) @type) +(enum_variant + name: (identifier) @type) [ "(" @@ -81,9 +100,7 @@ "::" ] @punctuation.delimiter -[ - "#" -] @punctuation.special +"#" @punctuation.special [ "as" @@ -131,7 +148,7 @@ ] @keyword.control (for_expression - ("for" @keyword.control)) + "for" @keyword.control) [ (string_literal) @@ -154,8 +171,10 @@ ] @comment [ - (line_comment (doc_comment)) - (block_comment (doc_comment)) + (line_comment + (doc_comment)) + (block_comment + (doc_comment)) ] @comment.doc [ @@ -198,25 +217,44 @@ ] @operator ; Avoid highlighting these as operators when used in doc comments. -(unary_expression "!" @operator) +(unary_expression + "!" @operator) + operator: "/" @operator (lifetime "'" @lifetime (identifier) @lifetime) -(parameter (identifier) @variable.parameter) - -(attribute_item (attribute [ - (identifier) @attribute - (scoped_identifier name: (identifier) @attribute) - (token_tree (identifier) @attribute (#match? @attribute "^[a-z\\d_]*$")) - (token_tree (identifier) @none "::" (#match? @none "^[a-z\\d_]*$")) -])) - -(inner_attribute_item (attribute [ - (identifier) @attribute - (scoped_identifier name: (identifier) @attribute) - (token_tree (identifier) @attribute (#match? @attribute "^[a-z\\d_]*$")) - (token_tree (identifier) @none "::" (#match? @none "^[a-z\\d_]*$")) -])) +(parameter + (identifier) @variable.parameter) + +(attribute_item + (attribute + [ + (identifier) @attribute + (scoped_identifier + name: (identifier) @attribute) + (token_tree + (identifier) @attribute + (#match? @attribute "^[a-z\\d_]*$")) + (token_tree + (identifier) @none + "::" + (#match? @none "^[a-z\\d_]*$")) + ])) + +(inner_attribute_item + (attribute + [ + (identifier) @attribute + (scoped_identifier + name: (identifier) @attribute) + (token_tree + (identifier) @attribute + (#match? @attribute "^[a-z\\d_]*$")) + (token_tree + (identifier) @none + "::" + (#match? @none "^[a-z\\d_]*$")) + ])) diff --git a/crates/languages/src/rust/imports.scm b/crates/languages/src/rust/imports.scm index 3ce6a4f073506dd4d27320a7fd5bb547927f9c1a..2c368523d63b9c6ae9494b1ab801192161fd7000 100644 --- a/crates/languages/src/rust/imports.scm +++ b/crates/languages/src/rust/imports.scm @@ -1,27 +1,29 @@ (use_declaration) @import (scoped_use_list - path: (_) @namespace - list: (_) @list) + path: (_) @namespace + list: (_) @list) (scoped_identifier - path: (_) @namespace - name: (identifier) @name) + path: (_) @namespace + name: (identifier) @name) -(use_list (identifier) @name) +(use_list + (identifier) @name) -(use_declaration (identifier) @name) +(use_declaration + (identifier) @name) (use_as_clause - path: (scoped_identifier - path: (_) @namespace - name: (_) @name) - alias: (_) @alias) + path: (scoped_identifier + path: (_) @namespace + name: (_) @name) + alias: (_) @alias) (use_as_clause - path: (identifier) @name - alias: (_) @alias) + path: (identifier) @name + alias: (_) @alias) (use_wildcard - (_)? @namespace - "*" @wildcard) + (_)? @namespace + "*" @wildcard) diff --git a/crates/languages/src/rust/indents.scm b/crates/languages/src/rust/indents.scm index 9ab6b029083fd5d8e3249916c00a5f90648eb3e2..b4ef2ebcd78016de1092e718385ab52a89273003 100644 --- a/crates/languages/src/rust/indents.scm +++ b/crates/languages/src/rust/indents.scm @@ -1,14 +1,26 @@ [ - ((where_clause) _ @end) - (field_expression) - (call_expression) - (assignment_expression) - (let_declaration) - (let_chain) - (await_expression) + ((where_clause) + _ @end) + (field_expression) + (call_expression) + (assignment_expression) + (let_declaration) + (let_chain) + (await_expression) ] @indent -(_ "[" "]" @end) @indent -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "<" + ">" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/crates/languages/src/rust/injections.scm b/crates/languages/src/rust/injections.scm index 88df78d793c5666492b0f3917d78b4210be5e094..89d839282d3388f450f9ebdb923167f0986f349c 100644 --- a/crates/languages/src/rust/injections.scm +++ b/crates/languages/src/rust/injections.scm @@ -1,64 +1,67 @@ ([ - (line_comment) - (block_comment) + (line_comment) + (block_comment) ] @injection.content - (#set! injection.language "comment")) + (#set! injection.language "comment")) (macro_invocation - macro: [ - ((identifier) @_macro_name) - (scoped_identifier (identifier) @_macro_name .) - ] - (#not-any-of? @_macro_name "view" "html") - (token_tree) @injection.content - (#set! injection.language "rust")) + macro: [ + (identifier) @_macro_name + (scoped_identifier + (identifier) @_macro_name .) + ] + (#not-any-of? @_macro_name "view" "html") + (token_tree) @injection.content + (#set! injection.language "rust")) ; we need a better way for the leptos extension to declare that ; it wants to inject inside of rust, instead of modifying the rust ; injections to support leptos injections (macro_invocation - macro: [ - ((identifier) @_macro_name) - (scoped_identifier (identifier) @_macro_name .) - ] - (#any-of? @_macro_name "view" "html") - (token_tree) @injection.content - (#set! injection.language "rstml") - ) + macro: [ + (identifier) @_macro_name + (scoped_identifier + (identifier) @_macro_name .) + ] + (#any-of? @_macro_name "view" "html") + (token_tree) @injection.content + (#set! injection.language "rstml")) (macro_invocation - macro: [ - ((identifier) @_macro_name) - (scoped_identifier (identifier) @_macro_name .) - ] - (#any-of? @_macro_name "sql") - (_) @injection.content - (#set! injection.language "sql") - ) + macro: [ + (identifier) @_macro_name + (scoped_identifier + (identifier) @_macro_name .) + ] + (#any-of? @_macro_name "sql") + (_) @injection.content + (#set! injection.language "sql")) ; lazy_regex (macro_invocation - macro: [ - ((identifier) @_macro_name) - (scoped_identifier (identifier) @_macro_name .) - ] - (token_tree [ - (string_literal (string_content) @injection.content) - (raw_string_literal (string_content) @injection.content) + macro: [ + (identifier) @_macro_name + (scoped_identifier + (identifier) @_macro_name .) + ] + (token_tree + [ + (string_literal + (string_content) @injection.content) + (raw_string_literal + (string_content) @injection.content) ]) - (#set! injection.language "regex") - (#any-of? @_macro_name "regex" "bytes_regex") -) + (#set! injection.language "regex") + (#any-of? @_macro_name "regex" "bytes_regex")) (call_expression - function: (scoped_identifier) @_fn_path - arguments: (arguments - [ - (string_literal (string_content) @injection.content) - (raw_string_literal (string_content) @injection.content) - ] - ) - - (#match? @_fn_path ".*Regex(Builder)?::new") - (#set! injection.language "regex") -) + function: (scoped_identifier) @_fn_path + arguments: (arguments + [ + (string_literal + (string_content) @injection.content) + (raw_string_literal + (string_content) @injection.content) + ]) + (#match? @_fn_path ".*Regex(Builder)?::new") + (#set! injection.language "regex")) diff --git a/crates/languages/src/rust/outline.scm b/crates/languages/src/rust/outline.scm index a99f53dd2b3154aa3717f67fd683da4a8b57d31b..03ecb99facdc99cb0be8e2fb6bd4e177cb936b4e 100644 --- a/crates/languages/src/rust/outline.scm +++ b/crates/languages/src/rust/outline.scm @@ -1,73 +1,81 @@ (attribute_item) @annotation + (line_comment) @annotation (struct_item - (visibility_modifier)? @context - "struct" @context - name: (_) @name) @item + (visibility_modifier)? @context + "struct" @context + name: (_) @name) @item (enum_item - (visibility_modifier)? @context - "enum" @context - name: (_) @name) @item + (visibility_modifier)? @context + "enum" @context + name: (_) @name) @item (enum_variant - (visibility_modifier)? @context - name: (_) @name) @item + (visibility_modifier)? @context + name: (_) @name) @item (impl_item - "impl" @context - trait: (_)? @name - "for"? @context - type: (_) @name - body: (_ . "{" @open "}" @close .)) @item + "impl" @context + trait: (_)? @name + "for"? @context + type: (_) @name + body: (_ + . + "{" @open + "}" @close .)) @item (trait_item - (visibility_modifier)? @context - "trait" @context - name: (_) @name) @item + (visibility_modifier)? @context + "trait" @context + name: (_) @name) @item (function_item - (visibility_modifier)? @context - (function_modifiers)? @context - "fn" @context - name: (_) @name - body: (_ . "{" @open "}" @close .)) @item + (visibility_modifier)? @context + (function_modifiers)? @context + "fn" @context + name: (_) @name + body: (_ + . + "{" @open + "}" @close .)) @item (function_signature_item - (visibility_modifier)? @context - (function_modifiers)? @context - "fn" @context - name: (_) @name) @item + (visibility_modifier)? @context + (function_modifiers)? @context + "fn" @context + name: (_) @name) @item (macro_definition - . "macro_rules!" @context - name: (_) @name) @item + . + "macro_rules!" @context + name: (_) @name) @item (mod_item - (visibility_modifier)? @context - "mod" @context - name: (_) @name) @item + (visibility_modifier)? @context + "mod" @context + name: (_) @name) @item (type_item - (visibility_modifier)? @context - "type" @context - name: (_) @name) @item + (visibility_modifier)? @context + "type" @context + name: (_) @name) @item (associated_type - "type" @context - name: (_) @name) @item + "type" @context + name: (_) @name) @item (const_item - (visibility_modifier)? @context - "const" @context - name: (_) @name) @item + (visibility_modifier)? @context + "const" @context + name: (_) @name) @item (static_item - (visibility_modifier)? @context - "static" @context - name: (_) @name) @item + (visibility_modifier)? @context + "static" @context + name: (_) @name) @item (field_declaration - (visibility_modifier)? @context - name: (_) @name) @item + (visibility_modifier)? @context + name: (_) @name) @item diff --git a/crates/languages/src/rust/overrides.scm b/crates/languages/src/rust/overrides.scm index 91fa6139d387db97676cd32a84433b16f3c8e94e..039425a91d519b2b4b030a37ad9e71705833820e 100644 --- a/crates/languages/src/rust/overrides.scm +++ b/crates/languages/src/rust/overrides.scm @@ -2,6 +2,7 @@ (string_literal) (raw_string_literal) ] @string + [ (line_comment) (block_comment) diff --git a/crates/languages/src/rust/runnables.scm b/crates/languages/src/rust/runnables.scm index 7c1571614424161ec866f5fa2607ea55975500e2..ef7050397df586ebb96c2648ea3be282d246e5aa 100644 --- a/crates/languages/src/rust/runnables.scm +++ b/crates/languages/src/rust/runnables.scm @@ -1,92 +1,75 @@ ; Rust mod test -( - (attribute_item (attribute - ( - (identifier) @_attribute) - arguments: ( - (token_tree (identifier) @_test) - (#eq? @_test "test") - ) - ) - (#eq? @_attribute "cfg") - ) - . - (mod_item - name: (_) @run - ) - (#set! tag rust-mod-test) -) +((attribute_item + (attribute + (identifier) @_attribute + arguments: ((token_tree + (identifier) @_test) + (#eq? @_test "test"))) + (#eq? @_attribute "cfg")) + . + (mod_item + name: (_) @run) + (#set! tag rust-mod-test)) ; Rust test -( - ( - (attribute_item (attribute - [((identifier) @_attribute) - (scoped_identifier (identifier) @_attribute) - ]) - (#match? @_attribute "test") - ) @_start - . - (attribute_item) * - . - [(line_comment) (block_comment)] * - . - (function_item - name: (_) @run @_test_name - body: _ - ) @_end - ) - (#set! tag rust-test) -) +(((attribute_item + (attribute + [ + (identifier) @_attribute + (scoped_identifier + (identifier) @_attribute) + ]) + (#match? @_attribute "test")) @_start + . + (attribute_item)* + . + [ + (line_comment) + (block_comment) + ]* + . + (function_item + name: (_) @run @_test_name + body: _) @_end) + (#set! tag rust-test)) ; Rust doc test -( - ( - (line_comment) * - (line_comment - doc: (_) @_comment_content - ) @_start @run - (#match? @_comment_content "```") - . - (line_comment) * - . - (line_comment - doc: (_) @_end_comment_content - ) @_end_code_block - (#match? @_end_comment_content "```") - . - (line_comment) * - (attribute_item) * - . - [(function_item - name: (_) @_doc_test_name - body: _ - ) (function_signature_item - name: (_) @_doc_test_name - ) (struct_item - name: (_) @_doc_test_name - ) (enum_item - name: (_) @_doc_test_name - body: _ - ) ( - (attribute_item) ? - (macro_definition - name: (_) @_doc_test_name) - ) (mod_item - name: (_) @_doc_test_name - )] @_end - ) - (#set! tag rust-doc-test) -) +(((line_comment)* + (line_comment + doc: (_) @_comment_content) @_start @run + (#match? @_comment_content "```") + . + (line_comment)* + . + (line_comment + doc: (_) @_end_comment_content) @_end_code_block + (#match? @_end_comment_content "```") + . + (line_comment)* + (attribute_item)* + . + [ + (function_item + name: (_) @_doc_test_name + body: _) + (function_signature_item + name: (_) @_doc_test_name) + (struct_item + name: (_) @_doc_test_name) + (enum_item + name: (_) @_doc_test_name + body: _) + ((attribute_item)? + (macro_definition + name: (_) @_doc_test_name)) + (mod_item + name: (_) @_doc_test_name) + ] @_end) + (#set! tag rust-doc-test)) ; Rust main function -( - ( - (function_item - name: (_) @run - body: _ - ) @_rust_main_function_end - (#eq? @run "main") - ) - (#set! tag rust-main) -) +(((function_item + name: (_) @run + body: _) @_rust_main_function_end + (#eq? @run "main")) + (#set! tag rust-main)) diff --git a/crates/languages/src/rust/textobjects.scm b/crates/languages/src/rust/textobjects.scm index 4e7e7fa0cd1ba4393bc99998e38e940f751aef97..97a90a54f800942eb733a9bd494b6e56e191a3ec 100644 --- a/crates/languages/src/rust/textobjects.scm +++ b/crates/languages/src/rust/textobjects.scm @@ -2,50 +2,73 @@ (function_signature_item) @function.around (function_item - body: (_ - "{" - (_)* @function.inside - "}" )) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around ; classes (struct_item - body: (_ - ["{" "("]? - [(_) ","?]* @class.inside - ["}" ")"]? )) @class.around + body: (_ + [ + "{" + "(" + ]? + [ + (_) + ","? + ]* @class.inside + [ + "}" + ")" + ]?)) @class.around (enum_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (union_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (trait_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (impl_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (mod_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around ; comments - (line_comment)+ @comment.around (block_comment) @comment.around diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 72e4684ce0a0242e5381c118a9748e3d9718341d..a74275af9631eea603cc957d44867d7d53327682 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -139,6 +139,7 @@ impl LspAdapter for TailwindLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { Ok(Some(json!({ "provideFormatter": true, diff --git a/crates/languages/src/tailwindcss.rs b/crates/languages/src/tailwindcss.rs index 016c2956591a5140ab4b2d8313711382fee47d30..aa310fac3f57477b9c0ef85f24f51e619a893c87 100644 --- a/crates/languages/src/tailwindcss.rs +++ b/crates/languages/src/tailwindcss.rs @@ -135,6 +135,7 @@ impl LspAdapter for TailwindCssLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { Ok(Some(json!({ "provideFormatter": true diff --git a/crates/languages/src/tsx/brackets.scm b/crates/languages/src/tsx/brackets.scm index 0e98b78036b4b19fd63d812fa92d2416788764f4..d72fcb26005a0021907558bbbee7471cfeaec603 100644 --- a/crates/languages/src/tsx/brackets.scm +++ b/crates/languages/src/tsx/brackets.scm @@ -1,11 +1,35 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -("<" @open "/>" @close) -("" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) - -((jsx_element (jsx_opening_element) @open (jsx_closing_element) @close) (#set! newline.only) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +("<" @open + "/>" @close) + +("" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) + +((jsx_element + (jsx_opening_element) @open + (jsx_closing_element) @close) + (#set! newline.only) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/tsx/debugger.scm b/crates/languages/src/tsx/debugger.scm index 3e73dc839e4e5fc5ccc1654e96b327bc8181a2e8..5a6ab143d0dbed601534cc214bd017fcf5c29a41 100644 --- a/crates/languages/src/tsx/debugger.scm +++ b/crates/languages/src/tsx/debugger.scm @@ -1,25 +1,55 @@ -(lexical_declaration (variable_declarator name: (identifier) @debug-variable)) +(lexical_declaration + (variable_declarator + name: (identifier) @debug-variable)) -(for_in_statement left: (identifier) @debug-variable) -(for_statement initializer: (lexical_declaration (variable_declarator name: (identifier) @debug-variable))) +(for_in_statement + left: (identifier) @debug-variable) -(binary_expression left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(binary_expression right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_statement + initializer: (lexical_declaration + (variable_declarator + name: (identifier) @debug-variable))) -(unary_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(update_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_expression + left: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_expression + right: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(unary_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(jsx_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(update_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(array (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(return_statement + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(pair value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(parenthesized_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(member_expression object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(jsx_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(array + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(pair + value: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(member_expression + object: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) (statement_block) @debug-scope + (program) @debug-scope diff --git a/crates/languages/src/tsx/highlights.scm b/crates/languages/src/tsx/highlights.scm index a96bf96281fd90a77a3411d1ad909f22c12ac0df..056956199ef26faea0c9fc09467f48c19a712b14 100644 --- a/crates/languages/src/tsx/highlights.scm +++ b/crates/languages/src/tsx/highlights.scm @@ -1,56 +1,33 @@ ; Variables - (identifier) @variable (call_expression function: (member_expression object: (identifier) @type - (#any-of? - @type - "Promise" - "Array" - "Object" - "Map" - "Set" - "WeakMap" - "WeakSet" - "Date" - "Error" - "TypeError" - "RangeError" - "SyntaxError" - "ReferenceError" - "EvalError" - "URIError" - "RegExp" - "Function" - "Number" - "String" - "Boolean" - "Symbol" - "BigInt" - "Proxy" - "ArrayBuffer" - "DataView" - ) - ) -) + (#any-of? @type + "Promise" "Array" "Object" "Map" "Set" "WeakMap" "WeakSet" "Date" "Error" "TypeError" + "RangeError" "SyntaxError" "ReferenceError" "EvalError" "URIError" "RegExp" "Function" + "Number" "String" "Boolean" "Symbol" "BigInt" "Proxy" "ArrayBuffer" "DataView"))) ; Properties - (property_identifier) @property + (shorthand_property_identifier) @property + (shorthand_property_identifier_pattern) @property + (private_property_identifier) @property ; Function and method calls - (call_expression function: (identifier) @function) (call_expression function: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method)) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method)) (new_expression constructor: (identifier) @type) @@ -59,36 +36,58 @@ module: (identifier) @type) ; Function and method definitions - (function_expression name: (identifier) @function) + (function_declaration name: (identifier) @function) + (method_definition - name: [(property_identifier) (private_property_identifier)] @function.method) + name: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + (method_definition - name: (property_identifier) @constructor - (#eq? @constructor "constructor")) + name: (property_identifier) @constructor + (#eq? @constructor "constructor")) (pair - key: [(property_identifier) (private_property_identifier)] @function.method - value: [(function_expression) (arrow_function)]) + key: [ + (property_identifier) + (private_property_identifier) + ] @function.method + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method) - right: [(function_expression) (arrow_function)]) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + right: [ + (function_expression) + (arrow_function) + ]) (variable_declarator name: (identifier) @function - value: [(function_expression) (arrow_function)]) + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (identifier) @function - right: [(function_expression) (arrow_function)]) + right: [ + (function_expression) + (arrow_function) + ]) ; Parameters - (required_parameter (identifier) @variable.parameter) @@ -122,9 +121,10 @@ name: (identifier) @variable.parameter) ; Special identifiers - (type_annotation) @type + (type_identifier) @type + (predefined_type) @type.builtin (type_alias_declaration @@ -153,12 +153,12 @@ (identifier) (shorthand_property_identifier) (shorthand_property_identifier_pattern) - ] @constant - (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) +] @constant + (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) ; Literals - (this) @variable.special + (super) @variable.special [ @@ -182,11 +182,12 @@ (escape_sequence) @string.escape (regex) @string.regex + (regex_flags) @keyword.operator.regex + (number) @number ; Tokens - [ ";" "?." @@ -244,7 +245,8 @@ "..." ] @operator -(regex "/" @string.regex) +(regex + "/" @string.regex) [ "(" @@ -253,14 +255,13 @@ "]" "{" "}" -] @punctuation.bracket +] @punctuation.bracket (ternary_expression [ "?" ":" - ] @operator -) + ] @operator) ; Keywords [ @@ -334,7 +335,8 @@ "yield" ] @keyword.control -(switch_default "default" @keyword.control) +(switch_default + "default" @keyword.control) (template_substitution "${" @punctuation.special @@ -352,31 +354,32 @@ "<" @punctuation.bracket ">" @punctuation.bracket) -(decorator "@" @punctuation.special) +(decorator + "@" @punctuation.special) (union_type - ("|") @punctuation.special) + "|" @punctuation.special) (intersection_type - ("&") @punctuation.special) + "&" @punctuation.special) (type_annotation - (":") @punctuation.special) + ":" @punctuation.special) (index_signature - (":") @punctuation.special) + ":" @punctuation.special) (type_predicate_annotation - (":") @punctuation.special) + ":" @punctuation.special) (public_field_definition - ("?") @punctuation.special) + "?" @punctuation.special) (property_signature - ("?") @punctuation.special) + "?" @punctuation.special) (method_signature - ("?") @punctuation.special) + "?" @punctuation.special) (optional_parameter ([ @@ -384,44 +387,66 @@ ":" ]) @punctuation.special) - - (jsx_opening_element [ (identifier) @type (member_expression object: (identifier) @type - property: (property_identifier) @type - ) - ] -) + property: (property_identifier) @type) + ]) + (jsx_closing_element [ (identifier) @type (member_expression object: (identifier) @type - property: (property_identifier) @type - ) - ] -) + property: (property_identifier) @type) + ]) + (jsx_self_closing_element [ (identifier) @type (member_expression object: (identifier) @type - property: (property_identifier) @type - ) - ] -) - -(jsx_opening_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_self_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) - -(jsx_attribute (property_identifier) @attribute.jsx) -(jsx_opening_element (["<" ">"]) @punctuation.bracket.jsx) -(jsx_closing_element ([""]) @punctuation.bracket.jsx) -(jsx_self_closing_element (["<" "/>"]) @punctuation.bracket.jsx) -(jsx_attribute "=" @punctuation.delimiter.jsx) + property: (property_identifier) @type) + ]) + +(jsx_opening_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_closing_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_self_closing_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_attribute + (property_identifier) @attribute.jsx) + +(jsx_opening_element + ([ + "<" + ">" + ]) @punctuation.bracket.jsx) + +(jsx_closing_element + ([ + "" + ]) @punctuation.bracket.jsx) + +(jsx_self_closing_element + ([ + "<" + "/>" + ]) @punctuation.bracket.jsx) + +(jsx_attribute + "=" @punctuation.delimiter.jsx) + (jsx_text) @text.jsx + (html_character_reference) @string.special diff --git a/crates/languages/src/tsx/imports.scm b/crates/languages/src/tsx/imports.scm index e26b97aeef9cb62395e7030f3173208d79187bd6..0e688d53fb6ed639c55c1fa84917711d19c3108a 100644 --- a/crates/languages/src/tsx/imports.scm +++ b/crates/languages/src/tsx/imports.scm @@ -1,14 +1,16 @@ (import_statement - import_clause: (import_clause - [ - (identifier) @name - (named_imports - (import_specifier - name: (_) @name - alias: (_)? @alias)) - ]) - source: (string (string_fragment) @source)) @import + import_clause: (import_clause + [ + (identifier) @name + (named_imports + (import_specifier + name: (_) @name + alias: (_)? @alias)) + ]) + source: (string + (string_fragment) @source)) @import (import_statement - !import_clause - source: (string (string_fragment) @source @wildcard)) @import + !import_clause + source: (string + (string_fragment) @source @wildcard)) @import diff --git a/crates/languages/src/tsx/indents.scm b/crates/languages/src/tsx/indents.scm index 9897f3060eaf37891cf4563cebc93345112422f8..1e72160bca2f5fd04ce6d3bc7b02e9ab029eb018 100644 --- a/crates/languages/src/tsx/indents.scm +++ b/crates/languages/src/tsx/indents.scm @@ -1,20 +1,32 @@ [ - (call_expression) - (assignment_expression) - (member_expression) - (lexical_declaration) - (variable_declaration) - (assignment_expression) - (if_statement) - (for_statement) + (call_expression) + (assignment_expression) + (member_expression) + (lexical_declaration) + (variable_declaration) + (assignment_expression) + (if_statement) + (for_statement) ] @indent -(_ "[" "]" @end) @indent -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent -(jsx_opening_element ">" @end) @indent +(_ + "<" + ">" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent + +(jsx_opening_element + ">" @end) @indent (jsx_element (jsx_opening_element) @start diff --git a/crates/languages/src/tsx/injections.scm b/crates/languages/src/tsx/injections.scm index 2cf3ea69ca2fd95402eba6fadb85f3505c5562b7..fda53263f575238051d325cd5820a285f8f24259 100644 --- a/crates/languages/src/tsx/injections.scm +++ b/crates/languages/src/tsx/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) (((comment) @_jsdoc_comment (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content @@ -10,119 +9,137 @@ (#set! injection.language "regex")) (call_expression - function: (identifier) @_name (#eq? @_name "css") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + function: (identifier) @_name + (#eq? @_name "css") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression function: (member_expression - object: (identifier) @_obj (#eq? @_obj "styled") + object: (identifier) @_obj + (#eq? @_obj "styled") property: (property_identifier)) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression function: (call_expression - function: (identifier) @_name (#eq? @_name "styled")) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + function: (identifier) @_name + (#eq? @_name "styled")) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression - function: (identifier) @_name (#eq? @_name "html") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "html")) -) + function: (identifier) @_name + (#eq? @_name "html") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "html"))) (call_expression - function: (identifier) @_name (#eq? @_name "js") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "javascript")) -) + function: (identifier) @_name + (#eq? @_name "js") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "javascript"))) (call_expression - function: (identifier) @_name (#eq? @_name "json") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "json")) -) + function: (identifier) @_name + (#eq? @_name "json") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "json"))) (call_expression - function: (identifier) @_name (#eq? @_name "sql") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "sql")) -) + function: (identifier) @_name + (#eq? @_name "sql") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "sql"))) (call_expression - function: (identifier) @_name (#eq? @_name "ts") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "typescript")) -) + function: (identifier) @_name + (#eq? @_name "ts") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "typescript"))) (call_expression - function: (identifier) @_name (#match? @_name "^ya?ml$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "yaml")) -) + function: (identifier) @_name + (#match? @_name "^ya?ml$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "yaml"))) (call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "graphql")) -) + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql"))) (call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "graphql"))) -) + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql")))) (call_expression - function: (identifier) @_name(#match? @_name "^iso$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "isograph"))) -) + function: (identifier) @_name + (#match? @_name "^iso$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "isograph")))) ; Parse the contents of strings and tagged template ; literals with leading ECMAScript comments: ; '/* html */' or '/*html*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/") - (#set! injection.language "html") -) + (#set! injection.language "html")) ; '/* sql */' or '/*sql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/") - (#set! injection.language "sql") -) + (#set! injection.language "sql")) ; '/* gql */' or '/*gql*/' ; '/* graphql */' or '/*graphql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/") - (#set! injection.language "graphql") -) + (#set! injection.language "graphql")) ; '/* css */' or '/*css*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/") - (#set! injection.language "css") -) + (#set! injection.language "css")) diff --git a/crates/languages/src/tsx/outline.scm b/crates/languages/src/tsx/outline.scm index 54d29007c7b7eb57c0bcaefc2c1e0ab75e4d9a6c..37991965256a0def9b0458958ac4e50c6f337af6 100644 --- a/crates/languages/src/tsx/outline.scm +++ b/crates/languages/src/tsx/outline.scm @@ -1,230 +1,275 @@ (internal_module - "namespace" @context - name: (_) @name) @item + "namespace" @context + name: (_) @name) @item (enum_declaration - "enum" @context - name: (_) @name) @item + "enum" @context + name: (_) @name) @item (type_alias_declaration - "type" @context - name: (_) @name) @item + "type" @context + name: (_) @name) @item (function_declaration - "async"? @context - "function" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item + "async"? @context + "function" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item (generator_function_declaration - "async"? @context - "function" @context - "*" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item + "async"? @context + "function" @context + "*" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item (interface_declaration - "interface" @context - name: (_) @name) @item + "interface" @context + name: (_) @name) @item (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Exported array destructuring (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Exported object destructuring (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Top-level array destructuring (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Top-level object destructuring (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (class_declaration - "class" @context - name: (_) @name) @item + "class" @context + name: (_) @name) @item (abstract_class_declaration - "abstract" @context - "class" @context - name: (_) @name) @item + "abstract" @context + "class" @context + name: (_) @name) @item ; Method definitions in classes (not in object literals) (class_body - (method_definition - [ - "get" - "set" - "async" - "*" - "readonly" - "static" - (override_modifier) - (accessibility_modifier) - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item) + (method_definition + [ + "get" + "set" + "async" + "*" + "readonly" + "static" + (override_modifier) + (accessibility_modifier) + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item) ; Object literal methods (variable_declarator - value: (object - (method_definition - [ - "get" - "set" - "async" - "*" - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item)) + value: (object + (method_definition + [ + "get" + "set" + "async" + "*" + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item)) (public_field_definition - [ - "declare" - "readonly" - "abstract" - "static" - (accessibility_modifier) - ]* @context - name: (_) @name) @item + [ + "declare" + "readonly" + "abstract" + "static" + (accessibility_modifier) + ]* @context + name: (_) @name) @item ; Add support for (node:test, bun:test and Jest) runnable -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] @context - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] @context + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item ; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#any-of? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#any-of? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item ; Object properties (pair - key: [ - (property_identifier) @name - (string (string_fragment) @name) - (number) @name - (computed_property_name) @name - ]) @item - + key: [ + (property_identifier) @name + (string + (string_fragment) @name) + (number) @name + (computed_property_name) @name + ]) @item ; Nested variables in function bodies (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Nested array destructuring in functions (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Nested object destructuring in functions (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern value: (identifier) @name @item) - (pair_pattern value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (comment) @annotation diff --git a/crates/languages/src/tsx/overrides.scm b/crates/languages/src/tsx/overrides.scm index f5a51af33fee340762d6b689e78d2e94e9c84901..759ffe8703ff27f53e5ccadb3eb4687a279f21f8 100644 --- a/crates/languages/src/tsx/overrides.scm +++ b/crates/languages/src/tsx/overrides.scm @@ -2,7 +2,8 @@ (string) @string -(template_string (string_fragment) @string) +(template_string + (string_fragment) @string) (jsx_element) @element @@ -13,6 +14,7 @@ (jsx_expression) ] @default -(_ value: (call_expression - function: (identifier) @function_name_before_type_arguments - type_arguments: (type_arguments))) +(_ + value: (call_expression + function: (identifier) @function_name_before_type_arguments + type_arguments: (type_arguments))) diff --git a/crates/languages/src/tsx/runnables.scm b/crates/languages/src/tsx/runnables.scm index 85702cf99d9968b29f9375bfd8215ecba53f2eb5..db1f69a2c22e5a5dbcf7892f6c02158260c764e9 100644 --- a/crates/languages/src/tsx/runnables.scm +++ b/crates/languages/src/tsx/runnables.scm @@ -1,46 +1,42 @@ ; Add support for (node:test, bun:test and Jest) runnable ; Function expression that has `it`, `test` or `describe` as the function name -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) ; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#any-of? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#any-of? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) diff --git a/crates/languages/src/tsx/textobjects.scm b/crates/languages/src/tsx/textobjects.scm index 628a921f3ac9ea04ff59654d72caf73cebbc9071..7a3a4768d94f495f9654d7ba1c182d3f7a47dcb4 100644 --- a/crates/languages/src/tsx/textobjects.scm +++ b/crates/languages/src/tsx/textobjects.scm @@ -1,113 +1,129 @@ (comment)+ @comment.around (function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (method_definition - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (function_expression - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around ((arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")) @function.around - (#not-has-parent? @function.around variable_declarator)) + body: (statement_block + "{" + (_)* @function.inside + "}")) @function.around + (#not-has-parent? @function.around variable_declarator)) ; Arrow function in variable declaration - capture the full declaration ([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) ]) @function.around ; Arrow function in variable declaration (expression body fallback) ([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) ]) @function.around ; Catch-all for arrow functions in other contexts (callbacks, etc.) ((arrow_function - body: (_) @function.inside) @function.around - (#not-has-parent? @function.around variable_declarator)) + body: (_) @function.inside) @function.around + (#not-has-parent? @function.around variable_declarator)) + (function_signature) @function.around (generator_function - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (generator_function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (class_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (class - body: (_ - "{" - (_)* @class.inside - "}" )) @class.around + body: (_ + "{" + (_)* @class.inside + "}")) @class.around (interface_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (enum_declaration - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (ambient_declaration - (module + (module body: (_ - "{" - [(_) ";"?]* @class.inside - "}" ))) @class.around + "{" + [ + (_) + ";"? + ]* @class.inside + "}"))) @class.around (internal_module - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (type_alias_declaration) @class.around diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 2b2fb19c629f85c6b51eba64d154b43e716f6827..d15d01808137dd171cc7ee0ab440671bf58cac52 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -804,6 +804,7 @@ impl LspAdapter for TypeScriptLspAdapter { async fn initialization_options( self: Arc, adapter: &Arc, + _: &mut AsyncApp, ) -> Result> { let tsdk_path = self.tsdk_path(adapter).await; Ok(Some(json!({ diff --git a/crates/languages/src/typescript/brackets.scm b/crates/languages/src/typescript/brackets.scm index 635233849142d8951edeca02ca0c79253aa91e80..2f6f3a133fbe47abfcf54473beff0c73c04afaf4 100644 --- a/crates/languages/src/typescript/brackets.scm +++ b/crates/languages/src/typescript/brackets.scm @@ -1,7 +1,23 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/typescript/debugger.scm b/crates/languages/src/typescript/debugger.scm index a99f194a4a4130210b47f8170fca039acc163411..8f384fd8ad9e07fea89972464e64b905086bf580 100644 --- a/crates/languages/src/typescript/debugger.scm +++ b/crates/languages/src/typescript/debugger.scm @@ -1,23 +1,51 @@ -(lexical_declaration (variable_declarator name: (identifier) @debug-variable)) +(lexical_declaration + (variable_declarator + name: (identifier) @debug-variable)) -(for_in_statement left: (identifier) @debug-variable) -(for_statement initializer: (lexical_declaration (variable_declarator name: (identifier) @debug-variable))) +(for_in_statement + left: (identifier) @debug-variable) -(binary_expression left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(binary_expression right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_statement + initializer: (lexical_declaration + (variable_declarator + name: (identifier) @debug-variable))) -(unary_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(update_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_expression + left: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_expression + right: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(unary_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(array (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(update_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(pair value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(return_statement + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(member_expression object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(parenthesized_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(array + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(pair + value: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(member_expression + object: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) (statement_block) @debug-scope + (program) @debug-scope diff --git a/crates/languages/src/typescript/highlights.scm b/crates/languages/src/typescript/highlights.scm index 8ec3ec26cca805c65d68d9df08037102a32494dc..55eb9119e4963957e77aa1791d2a51aadd8d2890 100644 --- a/crates/languages/src/typescript/highlights.scm +++ b/crates/languages/src/typescript/highlights.scm @@ -1,46 +1,19 @@ ; Variables - (identifier) @variable (call_expression function: (member_expression object: (identifier) @type - (#any-of? - @type - "Promise" - "Array" - "Object" - "Map" - "Set" - "WeakMap" - "WeakSet" - "Date" - "Error" - "TypeError" - "RangeError" - "SyntaxError" - "ReferenceError" - "EvalError" - "URIError" - "RegExp" - "Function" - "Number" - "String" - "Boolean" - "Symbol" - "BigInt" - "Proxy" - "ArrayBuffer" - "DataView" - ) - ) -) + (#any-of? @type + "Promise" "Array" "Object" "Map" "Set" "WeakMap" "WeakSet" "Date" "Error" "TypeError" + "RangeError" "SyntaxError" "ReferenceError" "EvalError" "URIError" "RegExp" "Function" + "Number" "String" "Boolean" "Symbol" "BigInt" "Proxy" "ArrayBuffer" "DataView"))) ; Special identifiers - (type_annotation) @type (type_identifier) @type + (predefined_type) @type.builtin (type_alias_declaration @@ -65,49 +38,47 @@ (implements_clause (type_identifier) @type) -;; Enables ts-pretty-errors -;; The Lsp returns "snippets" of typescript, which are not valid typescript in totality, -;; but should still be highlighted -;; Highlights object literals by hijacking the statement_block pattern, but only if -;; the statement block follows an object literal pattern -((statement_block - (labeled_statement - ;; highlight the label like a property name - label: (statement_identifier) @property.name - body: [ - ;; match a terminating expression statement - (expression_statement - ;; single identifier - treat as a type name - [(identifier) @type.name - ;; object - treat as a property - type pair - (object - (pair - key: (_) @property.name - value: (_) @type.name)) - ;; subscript_expression - treat as an array declaration - (subscript_expression - object: (_) @type.name - index: (_) - ) - ;; templated string - treat each identifier contained as a type name - (template_string - (template_substitution - (identifier) @type.name)) - ]) - ;; match a nested statement block - (statement_block) @nested - ]))) +; Enables ts-pretty-errors +; The Lsp returns "snippets" of typescript, which are not valid typescript in totality, +; but should still be highlighted +; Highlights object literals by hijacking the statement_block pattern, but only if +; the statement block follows an object literal pattern +(statement_block + (labeled_statement + ; highlight the label like a property name + label: (statement_identifier) @property.name + body: [ + ; match a terminating expression statement + (expression_statement + ; single identifier - treat as a type name + [ + (identifier) @type.name + ; object - treat as a property - type pair + (object + (pair + key: (_) @property.name + value: (_) @type.name)) + ; subscript_expression - treat as an array declaration + (subscript_expression + object: (_) @type.name + index: (_)) + ; templated string - treat each identifier contained as a type name + (template_string + (template_substitution + (identifier) @type.name)) + ]) + ; match a nested statement block + (statement_block) @nested + ])) ; Inline type imports: import { type Foo } or import { type Foo as Bar } (import_specifier "type" - name: (identifier) @type -) + name: (identifier) @type) (import_specifier "type" - alias: (identifier) @type -) + alias: (identifier) @type) ; Full type imports: import type { Foo } or import type { Foo as Bar } (import_statement @@ -115,45 +86,41 @@ (import_clause (named_imports (import_specifier - name: (identifier) @type - ) - ) - ) -) + name: (identifier) @type)))) (import_statement "type" (import_clause (named_imports (import_specifier - alias: (identifier) @type - ) - ) - ) -) + alias: (identifier) @type)))) ([ (identifier) (shorthand_property_identifier) (shorthand_property_identifier_pattern) - ] @constant - (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) +] @constant + (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) ; Properties - (property_identifier) @property + (shorthand_property_identifier) @property + (shorthand_property_identifier_pattern) @property + (private_property_identifier) @property ; Function and method calls - (call_expression function: (identifier) @function) (call_expression function: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method)) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method)) (new_expression constructor: (identifier) @type) @@ -162,38 +129,60 @@ module: (identifier) @type) ; Function and method definitions - (function_expression name: (identifier) @function) + (function_declaration name: (identifier) @function) + (method_definition - name: [(property_identifier) (private_property_identifier)] @function.method) + name: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + (method_definition - name: (property_identifier) @constructor - (#eq? @constructor "constructor")) + name: (property_identifier) @constructor + (#eq? @constructor "constructor")) (pair - key: [(property_identifier) (private_property_identifier)] @function.method - value: [(function_expression) (arrow_function)]) + key: [ + (property_identifier) + (private_property_identifier) + ] @function.method + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method) - right: [(function_expression) (arrow_function)]) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + right: [ + (function_expression) + (arrow_function) + ]) (variable_declarator name: (identifier) @function - value: [(function_expression) (arrow_function)]) + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (identifier) @function - right: [(function_expression) (arrow_function)]) + right: [ + (function_expression) + (arrow_function) + ]) (arrow_function) @function ; Parameters - (required_parameter (identifier) @variable.parameter) @@ -227,8 +216,8 @@ name: (identifier) @variable.parameter) ; Literals - (this) @variable.special + (super) @variable.special [ @@ -247,8 +236,7 @@ (undefined) (true) (false) - ] @type.builtin -) + ] @type.builtin) (comment) @comment @@ -263,11 +251,12 @@ (escape_sequence) @string.escape (regex) @string.regex + (regex_flags) @keyword.operator.regex + (number) @number ; Tokens - [ ";" "?." @@ -326,14 +315,14 @@ "..." ] @operator -(regex "/" @string.regex) +(regex + "/" @string.regex) (ternary_expression [ "?" ":" - ] @operator -) + ] @operator) [ "(" @@ -342,7 +331,7 @@ "]" "{" "}" -] @punctuation.bracket +] @punctuation.bracket (template_substitution "${" @punctuation.special @@ -360,31 +349,32 @@ "<" @punctuation.bracket ">" @punctuation.bracket) -(decorator "@" @punctuation.special) +(decorator + "@" @punctuation.special) (union_type - ("|") @punctuation.special) + "|" @punctuation.special) (intersection_type - ("&") @punctuation.special) + "&" @punctuation.special) (type_annotation - (":") @punctuation.special) + ":" @punctuation.special) (index_signature - (":") @punctuation.special) + ":" @punctuation.special) (type_predicate_annotation - (":") @punctuation.special) + ":" @punctuation.special) (public_field_definition - ("?") @punctuation.special) + "?" @punctuation.special) (property_signature - ("?") @punctuation.special) + "?" @punctuation.special) (method_signature - ("?") @punctuation.special) + "?" @punctuation.special) (optional_parameter ([ @@ -393,7 +383,6 @@ ]) @punctuation.special) ; Keywords - [ "abstract" "as" @@ -465,4 +454,5 @@ "yield" ] @keyword.control -(switch_default "default" @keyword.control) +(switch_default + "default" @keyword.control) diff --git a/crates/languages/src/typescript/imports.scm b/crates/languages/src/typescript/imports.scm index 68ca25b2c15b7e312edbc3eeb9b2f0e493ca2d6f..de8f8db418157511d5756d6b5ede1a02a03bd831 100644 --- a/crates/languages/src/typescript/imports.scm +++ b/crates/languages/src/typescript/imports.scm @@ -1,20 +1,23 @@ (import_statement - import_clause: (import_clause - [ - (identifier) @name - (named_imports - (import_specifier - name: (_) @name - alias: (_)? @alias)) - (namespace_import) @wildcard - ]) - source: (string (string_fragment) @source)) @import + import_clause: (import_clause + [ + (identifier) @name + (named_imports + (import_specifier + name: (_) @name + alias: (_)? @alias)) + (namespace_import) @wildcard + ]) + source: (string + (string_fragment) @source)) @import (import_statement - !source - import_clause: (import_require_clause - source: (string (string_fragment) @source))) @wildcard @import + !source + import_clause: (import_require_clause + source: (string + (string_fragment) @source))) @wildcard @import (import_statement - !import_clause - source: (string (string_fragment) @source)) @wildcard @import + !import_clause + source: (string + (string_fragment) @source)) @wildcard @import diff --git a/crates/languages/src/typescript/indents.scm b/crates/languages/src/typescript/indents.scm index b4ac50bf5ac0bf1871523cabc9ee3683a28cd0f3..2715d2567194f00a9566e9b0c385ae8aa6258df0 100644 --- a/crates/languages/src/typescript/indents.scm +++ b/crates/languages/src/typescript/indents.scm @@ -1,17 +1,28 @@ [ - (call_expression) - (assignment_expression) - (member_expression) - (lexical_declaration) - (variable_declaration) - (assignment_expression) - ; below handled by `(_ "{" "}" @end) @indent` - ; (if_statement) - ; (for_statement) - ; (while_statement) + (call_expression) + (assignment_expression) + (member_expression) + (lexical_declaration) + (variable_declaration) + (assignment_expression) + ; below handled by `(_ "{" "}" @end) @indent` + ; (if_statement) + ; (for_statement) + ; (while_statement) ] @indent -(_ "[" "]" @end) @indent -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "<" + ">" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/crates/languages/src/typescript/injections.scm b/crates/languages/src/typescript/injections.scm index 91880407900e7407e46982a54dbeaa3e30277bdd..a8cf9a41b5f90a6b9d02358b1b6073286fbe86ac 100644 --- a/crates/languages/src/typescript/injections.scm +++ b/crates/languages/src/typescript/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) (((comment) @_jsdoc_comment (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content @@ -14,156 +13,187 @@ (#set! injection.language "regex")) (call_expression - function: (identifier) @_name (#eq? @_name "css") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + function: (identifier) @_name + (#eq? @_name "css") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression function: (member_expression - object: (identifier) @_obj (#eq? @_obj "styled") + object: (identifier) @_obj + (#eq? @_obj "styled") property: (property_identifier)) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression function: (call_expression - function: (identifier) @_name (#eq? @_name "styled")) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + function: (identifier) @_name + (#eq? @_name "styled")) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression - function: (identifier) @_name (#eq? @_name "html") + function: (identifier) @_name + (#eq? @_name "html") arguments: (template_string) @injection.content - (#set! injection.language "html") -) + (#set! injection.language "html")) (call_expression - function: (identifier) @_name (#eq? @_name "js") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "javascript")) -) + function: (identifier) @_name + (#eq? @_name "js") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "javascript"))) (call_expression - function: (identifier) @_name (#eq? @_name "json") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "json")) -) + function: (identifier) @_name + (#eq? @_name "json") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "json"))) (call_expression - function: (identifier) @_name (#eq? @_name "sql") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "sql")) -) + function: (identifier) @_name + (#eq? @_name "sql") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "sql"))) (call_expression - function: (identifier) @_name (#eq? @_name "ts") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "typescript")) -) + function: (identifier) @_name + (#eq? @_name "ts") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "typescript"))) (call_expression - function: (identifier) @_name (#match? @_name "^ya?ml$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "yaml")) -) + function: (identifier) @_name + (#match? @_name "^ya?ml$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "yaml"))) (call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "graphql")) -) + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql"))) (call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "graphql"))) -) + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql")))) (call_expression - function: (identifier) @_name(#match? @_name "^iso$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "isograph"))) -) - -;; Angular Component template injection + function: (identifier) @_name + (#match? @_name "^iso$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "isograph")))) + +; Angular Component template injection (call_expression function: [ - (identifier) @_decorator (#eq? @_decorator "Component") - (member_expression property: (property_identifier) @_decorator (#eq? @_decorator "Component")) + (identifier) @_decorator + (#eq? @_decorator "Component") + (member_expression + property: (property_identifier) @_decorator + (#eq? @_decorator "Component")) ] - arguments: (arguments (object - (pair - key: (property_identifier) @_prop (#eq? @_prop "template") - value: [ - (string) @injection.content - (template_string) @injection.content - (template_string (string_fragment) @injection.content) - ] - ))) + arguments: (arguments + (object + (pair + key: (property_identifier) @_prop + (#eq? @_prop "template") + value: [ + (string) @injection.content + (template_string) @injection.content + (template_string + (string_fragment) @injection.content) + ]))) (#set! injection.language "angular")) -;; Angular Component styles injection +; Angular Component styles injection (call_expression function: [ - (identifier) @_decorator (#eq? @_decorator "Component") - (member_expression property: (property_identifier) @_decorator (#eq? @_decorator "Component")) + (identifier) @_decorator + (#eq? @_decorator "Component") + (member_expression + property: (property_identifier) @_decorator + (#eq? @_decorator "Component")) ] - arguments: (arguments (object - (pair - key: (property_identifier) @_prop (#eq? @_prop "styles") - value: [ - (string) @injection.content - (template_string) @injection.content - (template_string (string_fragment) @injection.content) - (array (string) @injection.content) - (array (template_string) @injection.content) - (array (template_string (string_fragment)) @injection.content) - ] - ))) + arguments: (arguments + (object + (pair + key: (property_identifier) @_prop + (#eq? @_prop "styles") + value: [ + (string) @injection.content + (template_string) @injection.content + (template_string + (string_fragment) @injection.content) + (array + (string) @injection.content) + (array + (template_string) @injection.content) + (array + (template_string + (string_fragment)) @injection.content) + ]))) (#set! injection.language "css")) ; Parse the contents of strings and tagged template ; literals with leading ECMAScript comments: ; '/* html */' or '/*html*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/") - (#set! injection.language "html") -) + (#set! injection.language "html")) ; '/* sql */' or '/*sql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/") - (#set! injection.language "sql") -) + (#set! injection.language "sql")) ; '/* gql */' or '/*gql*/' ; '/* graphql */' or '/*graphql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/") - (#set! injection.language "graphql") -) + (#set! injection.language "graphql")) ; '/* css */' or '/*css*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/") - (#set! injection.language "css") -) + (#set! injection.language "css")) diff --git a/crates/languages/src/typescript/outline.scm b/crates/languages/src/typescript/outline.scm index 54d29007c7b7eb57c0bcaefc2c1e0ab75e4d9a6c..37991965256a0def9b0458958ac4e50c6f337af6 100644 --- a/crates/languages/src/typescript/outline.scm +++ b/crates/languages/src/typescript/outline.scm @@ -1,230 +1,275 @@ (internal_module - "namespace" @context - name: (_) @name) @item + "namespace" @context + name: (_) @name) @item (enum_declaration - "enum" @context - name: (_) @name) @item + "enum" @context + name: (_) @name) @item (type_alias_declaration - "type" @context - name: (_) @name) @item + "type" @context + name: (_) @name) @item (function_declaration - "async"? @context - "function" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item + "async"? @context + "function" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item (generator_function_declaration - "async"? @context - "function" @context - "*" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item + "async"? @context + "function" @context + "*" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item (interface_declaration - "interface" @context - name: (_) @name) @item + "interface" @context + name: (_) @name) @item (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Exported array destructuring (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Exported object destructuring (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Top-level array destructuring (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Top-level object destructuring (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (class_declaration - "class" @context - name: (_) @name) @item + "class" @context + name: (_) @name) @item (abstract_class_declaration - "abstract" @context - "class" @context - name: (_) @name) @item + "abstract" @context + "class" @context + name: (_) @name) @item ; Method definitions in classes (not in object literals) (class_body - (method_definition - [ - "get" - "set" - "async" - "*" - "readonly" - "static" - (override_modifier) - (accessibility_modifier) - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item) + (method_definition + [ + "get" + "set" + "async" + "*" + "readonly" + "static" + (override_modifier) + (accessibility_modifier) + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item) ; Object literal methods (variable_declarator - value: (object - (method_definition - [ - "get" - "set" - "async" - "*" - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item)) + value: (object + (method_definition + [ + "get" + "set" + "async" + "*" + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item)) (public_field_definition - [ - "declare" - "readonly" - "abstract" - "static" - (accessibility_modifier) - ]* @context - name: (_) @name) @item + [ + "declare" + "readonly" + "abstract" + "static" + (accessibility_modifier) + ]* @context + name: (_) @name) @item ; Add support for (node:test, bun:test and Jest) runnable -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] @context - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] @context + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item ; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#any-of? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#any-of? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item ; Object properties (pair - key: [ - (property_identifier) @name - (string (string_fragment) @name) - (number) @name - (computed_property_name) @name - ]) @item - + key: [ + (property_identifier) @name + (string + (string_fragment) @name) + (number) @name + (computed_property_name) @name + ]) @item ; Nested variables in function bodies (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Nested array destructuring in functions (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Nested object destructuring in functions (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern value: (identifier) @name @item) - (pair_pattern value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (comment) @annotation diff --git a/crates/languages/src/typescript/overrides.scm b/crates/languages/src/typescript/overrides.scm index 8f437a1424af06aa4855aac67511926181977936..f5e99cad68a91695d9d0b19b308e3ce19f75555a 100644 --- a/crates/languages/src/typescript/overrides.scm +++ b/crates/languages/src/typescript/overrides.scm @@ -2,8 +2,10 @@ (string) @string -(template_string (string_fragment) @string) +(template_string + (string_fragment) @string) -(_ value: (call_expression - function: (identifier) @function_name_before_type_arguments - type_arguments: (type_arguments))) +(_ + value: (call_expression + function: (identifier) @function_name_before_type_arguments + type_arguments: (type_arguments))) diff --git a/crates/languages/src/typescript/runnables.scm b/crates/languages/src/typescript/runnables.scm index 6bfc53632910ce8212f739d310e3d560d05cffc1..38fee610e85f2aa2f5f7f7c58caf79b3c6a3d1ed 100644 --- a/crates/languages/src/typescript/runnables.scm +++ b/crates/languages/src/typescript/runnables.scm @@ -1,85 +1,71 @@ ; Add support for (node:test, bun:test, Jest and Deno.test) runnable ; Function expression that has `it`, `test` or `describe` as the function name -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) ; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#any-of? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#any-of? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) ; Add support for Deno.test with string names -( - (call_expression - function: (member_expression - object: (identifier) @_namespace - property: (property_identifier) @_method - ) - (#eq? @_namespace "Deno") - (#eq? @_method "test") - arguments: ( - arguments . [ - (string (string_fragment) @run @DENO_TEST_NAME) - (identifier) @run @DENO_TEST_NAME - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: (member_expression + object: (identifier) @_namespace + property: (property_identifier) @_method) + (#eq? @_namespace "Deno") + (#eq? @_method "test") + arguments: (arguments + . + [ + (string + (string_fragment) @run @DENO_TEST_NAME) + (identifier) @run @DENO_TEST_NAME + ])) @_js-test + (#set! tag js-test)) ; Add support for Deno.test with named function expressions -( - (call_expression - function: (member_expression - object: (identifier) @_namespace - property: (property_identifier) @_method - ) - (#eq? @_namespace "Deno") - (#eq? @_method "test") - arguments: ( - arguments . (function_expression - name: (identifier) @run @DENO_TEST_NAME - ) - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: (member_expression + object: (identifier) @_namespace + property: (property_identifier) @_method) + (#eq? @_namespace "Deno") + (#eq? @_method "test") + arguments: (arguments + . + (function_expression + name: (identifier) @run @DENO_TEST_NAME))) @_js-test + (#set! tag js-test)) diff --git a/crates/languages/src/typescript/textobjects.scm b/crates/languages/src/typescript/textobjects.scm index 96289f058cd7b605a8f5b4c8966e3c372022d065..384ea482352dfb1f617357bd3af719a64425d876 100644 --- a/crates/languages/src/typescript/textobjects.scm +++ b/crates/languages/src/typescript/textobjects.scm @@ -1,114 +1,130 @@ (comment)+ @comment.around (function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (method_definition - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (function_expression - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around ((arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")) @function.around - (#not-has-parent? @function.around variable_declarator)) + body: (statement_block + "{" + (_)* @function.inside + "}")) @function.around + (#not-has-parent? @function.around variable_declarator)) ; Arrow function in variable declaration - capture the full declaration ([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) ]) @function.around ; Arrow function in variable declaration - capture body as @function.inside ; (for statement blocks, the more specific pattern above captures just the contents) ([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) ]) @function.around ; Catch-all for arrow functions in other contexts (callbacks, etc.) ((arrow_function - body: (_) @function.inside) @function.around - (#not-has-parent? @function.around variable_declarator)) + body: (_) @function.inside) @function.around + (#not-has-parent? @function.around variable_declarator)) + (function_signature) @function.around (generator_function - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (generator_function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (class_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (class - body: (_ - "{" - (_)* @class.inside - "}" )) @class.around + body: (_ + "{" + (_)* @class.inside + "}")) @class.around (interface_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (enum_declaration - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (ambient_declaration - (module + (module body: (_ - "{" - [(_) ";"?]* @class.inside - "}" ))) @class.around + "{" + [ + (_) + ";"? + ]* @class.inside + "}"))) @class.around (internal_module - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (type_alias_declaration) @class.around diff --git a/crates/languages/src/yaml/brackets.scm b/crates/languages/src/yaml/brackets.scm index 0cfc5072d4eeda19d75ce943481670a3ee8938b0..edeb53a0d313846089e716bedff4256e2b47d94e 100644 --- a/crates/languages/src/yaml/brackets.scm +++ b/crates/languages/src/yaml/brackets.scm @@ -1,4 +1,13 @@ -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/yaml/highlights.scm b/crates/languages/src/yaml/highlights.scm index dfecf3f9d421cf1a574ce03dccfeb1201d8086a9..1d9c97c17a7925e5e9d87ed8e3bfba51c9b11d8b 100644 --- a/crates/languages/src/yaml/highlights.scm +++ b/crates/languages/src/yaml/highlights.scm @@ -1,4 +1,5 @@ (boolean_scalar) @boolean + (null_scalar) @constant.builtin [ @@ -25,30 +26,31 @@ key: (flow_node [ - (plain_scalar (string_scalar)) + (plain_scalar + (string_scalar)) (double_quote_scalar) (single_quote_scalar) ] @property) [ - "," - "-" - ":" - ">" - "?" - "|" + "," + "-" + ":" + ">" + "?" + "|" ] @punctuation.delimiter [ - "[" - "]" - "{" - "}" + "[" + "]" + "{" + "}" ] @punctuation.bracket [ - "*" - "&" - "---" - "..." + "*" + "&" + "---" + "..." ] @punctuation.special diff --git a/crates/languages/src/yaml/injections.scm b/crates/languages/src/yaml/injections.scm index c9de25a18f8afb7d8e0c6874401798edede9bce1..2b94b7f0cdd9d18c3c7157d9bd4adfd1b59ea061 100644 --- a/crates/languages/src/yaml/injections.scm +++ b/crates/languages/src/yaml/injections.scm @@ -1,25 +1,26 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) ; GitHub actions: JavaScript for workflow scripting (inline and block) (block_mapping (block_mapping_pair - key: (flow_node) @_uses (#eq? @_uses "uses") - value: (flow_node) @_actions_ghs (#match? @_actions_ghs "^actions/github-script")) + key: (flow_node) @_uses + (#eq? @_uses "uses") + value: (flow_node) @_actions_ghs + (#match? @_actions_ghs "^actions/github-script")) (block_mapping_pair - key: (flow_node) @_with (#eq? @_with "with") + key: (flow_node) @_with + (#eq? @_with "with") value: (block_node (block_mapping (block_mapping_pair - key: (flow_node) @_run (#eq? @_run "script") + key: (flow_node) @_run + (#eq? @_run "script") value: [ - (flow_node (plain_scalar (string_scalar) @injection.content)) - (block_node (block_scalar) @injection.content) + (flow_node + (plain_scalar + (string_scalar) @injection.content)) + (block_node + (block_scalar) @injection.content) ] - (#set! injection.language "javascript") - ) - ) - ) - ) -) + (#set! injection.language "javascript")))))) diff --git a/crates/languages/src/yaml/outline.scm b/crates/languages/src/yaml/outline.scm index c5a7f8e5d40388c020ec9dab83d6cee02746b581..a41447bf64cceadd1ae3d59bd2804e85bd5e8c39 100644 --- a/crates/languages/src/yaml/outline.scm +++ b/crates/languages/src/yaml/outline.scm @@ -1,9 +1,7 @@ (block_mapping_pair - key: - (flow_node - (plain_scalar - (string_scalar) @name)) - value: - (flow_node - (plain_scalar - (string_scalar) @context))?) @item + key: (flow_node + (plain_scalar + (string_scalar) @name)) + value: (flow_node + (plain_scalar + (string_scalar) @context))?) @item diff --git a/crates/languages/src/yaml/overrides.scm b/crates/languages/src/yaml/overrides.scm index 9503051a62080eb2fdfca3416ef9e5286464dd17..99c991e7d445137dc335275138a8fd68cea31d17 100644 --- a/crates/languages/src/yaml/overrides.scm +++ b/crates/languages/src/yaml/overrides.scm @@ -1,4 +1,5 @@ (comment) @comment.inclusive + [ (single_quote_scalar) (double_quote_scalar) diff --git a/crates/languages/src/yaml/redactions.scm b/crates/languages/src/yaml/redactions.scm index 85fdbd26ea0fc0b3956652ef48c61a44613337e4..56c7415e70f183afe63950511479e74512ac97f8 100644 --- a/crates/languages/src/yaml/redactions.scm +++ b/crates/languages/src/yaml/redactions.scm @@ -1 +1,2 @@ -(block_mapping_pair value: (flow_node) @redact) +(block_mapping_pair + value: (flow_node) @redact) diff --git a/crates/languages/src/zed-keybind-context/brackets.scm b/crates/languages/src/zed-keybind-context/brackets.scm index d086b2e98df0837208a13f6c6f79db84c204fb99..24c20234b639f2afe7754b1d6dceb5685ac7b8e7 100644 --- a/crates/languages/src/zed-keybind-context/brackets.scm +++ b/crates/languages/src/zed-keybind-context/brackets.scm @@ -1 +1,2 @@ -("(" @open ")" @close) +("(" @open + ")" @close) diff --git a/crates/livekit_client/Cargo.toml b/crates/livekit_client/Cargo.toml index e4c530bbcb3864cf2557f15ef02ddbe7e81852c7..66511da9daa943628e71000a2009b2026eeace6c 100644 --- a/crates/livekit_client/Cargo.toml +++ b/crates/livekit_client/Cargo.toml @@ -40,15 +40,12 @@ serde.workspace = true serde_urlencoded.workspace = true settings.workspace = true smallvec.workspace = true -tokio-tungstenite.workspace = true ui.workspace = true util.workspace = true [target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies] -libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" } -livekit = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks", features = [ - "__rustls-tls" -] } +libwebrtc.workspace = true +livekit.workspace = true [target.'cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))'.dependencies] scap.workspace = true diff --git a/crates/livekit_client/src/livekit_client.rs b/crates/livekit_client/src/livekit_client.rs index 6fc1d3415a493e7e1989472616015916a82cf818..1db9a12ef2b7f3b4f3de1cba6c61a30db12a5bd9 100644 --- a/crates/livekit_client/src/livekit_client.rs +++ b/crates/livekit_client/src/livekit_client.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use anyhow::{Context as _, Result, anyhow}; use audio::AudioSettings; use collections::HashMap; @@ -54,10 +52,8 @@ impl Room { token: String, cx: &mut AsyncApp, ) -> Result<(Self, mpsc::UnboundedReceiver)> { - let connector = - tokio_tungstenite::Connector::Rustls(Arc::new(http_client_tls::tls_config())); let mut config = livekit::RoomOptions::default(); - config.connector = Some(connector); + config.tls_config = livekit::TlsConfig(Some(http_client_tls::tls_config())); let (room, mut events) = Tokio::spawn(cx, async move { livekit::Room::connect(&url, &token, config).await }) diff --git a/crates/livekit_client/src/livekit_client/playback.rs b/crates/livekit_client/src/livekit_client/playback.rs index 6e39c2abfb4162ceaa43373f4170a41ffdb36351..df62479f022be5295a3de44f40fabf48aed515f2 100644 --- a/crates/livekit_client/src/livekit_client/playback.rs +++ b/crates/livekit_client/src/livekit_client/playback.rs @@ -466,10 +466,13 @@ pub(crate) async fn capture_local_video_track( ) -> Result<(crate::LocalVideoTrack, Box)> { let metadata = capture_source.metadata()?; let track_source = gpui_tokio::Tokio::spawn(cx, async move { - NativeVideoSource::new(VideoResolution { - width: metadata.resolution.width.0 as u32, - height: metadata.resolution.height.0 as u32, - }) + NativeVideoSource::new( + VideoResolution { + width: metadata.resolution.width.0 as u32, + height: metadata.resolution.height.0 as u32, + }, + true, + ) }) .await?; diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index 9533ddb600b18213de4d6e50599c62aa182b9b8a..2c48575a648a9eba12b16ce8edb2cf959d7cc8b3 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -13,12 +13,13 @@ path = "src/lsp.rs" doctest = false [features] -test-support = ["async-pipe"] +test-support = ["async-pipe", "gpui_util"] [dependencies] anyhow.workspace = true async-pipe = { workspace = true, optional = true } collections.workspace = true +gpui_util = { workspace = true, optional = true } futures.workspace = true gpui.workspace = true log.workspace = true @@ -34,6 +35,7 @@ release_channel.workspace = true [dev-dependencies] async-pipe.workspace = true +gpui_util.workspace = true ctor.workspace = true gpui = { workspace = true, features = ["test-support"] } semver.workspace = true diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index e552c21d701cefa8aa1f4b6e14e826892e3b25b6..2e2318065292ffdc2ac39b577afc7a264d36473d 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -1970,10 +1970,14 @@ impl FakeLanguageServer { let responded_tx = responded_tx.clone(); let executor = cx.background_executor().clone(); async move { + let _guard = gpui_util::defer({ + let responded_tx = responded_tx.clone(); + move || { + responded_tx.unbounded_send(()).ok(); + } + }); executor.simulate_random_delay().await; - let result = result.await; - responded_tx.unbounded_send(()).ok(); - result + result.await } }) .detach(); diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index 1cd19ffb8f7cfa16ab1aa95af9425690aba78707..087b7153704c215ec27eae653879ffe9f11ebf09 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -15,6 +15,7 @@ use ui::Checkbox; use ui::CopyButton; use std::borrow::Cow; +use std::collections::BTreeMap; use std::iter; use std::mem; use std::ops::Range; @@ -246,7 +247,7 @@ pub struct Markdown { fallback_code_block_language: Option, options: Options, copied_code_blocks: HashSet, - code_block_scroll_handles: HashMap, + code_block_scroll_handles: BTreeMap, context_menu_selected_text: Option, } @@ -316,7 +317,7 @@ impl Markdown { parse_links_only: false, }, copied_code_blocks: HashSet::default(), - code_block_scroll_handles: HashMap::default(), + code_block_scroll_handles: BTreeMap::default(), context_menu_selected_text: None, }; this.parse(cx); @@ -341,7 +342,7 @@ impl Markdown { parse_links_only: true, }, copied_code_blocks: HashSet::default(), - code_block_scroll_handles: HashMap::default(), + code_block_scroll_handles: BTreeMap::default(), context_menu_selected_text: None, }; this.parse(cx); @@ -364,6 +365,32 @@ impl Markdown { self.code_block_scroll_handles.clear(); } + fn autoscroll_code_block(&self, source_index: usize, cursor_position: Point) { + let Some((_, scroll_handle)) = self + .code_block_scroll_handles + .range(..=source_index) + .next_back() + else { + return; + }; + + let bounds = scroll_handle.bounds(); + if cursor_position.y < bounds.top() || cursor_position.y > bounds.bottom() { + return; + } + + let horizontal_delta = if cursor_position.x < bounds.left() { + bounds.left() - cursor_position.x + } else if cursor_position.x > bounds.right() { + bounds.right() - cursor_position.x + } else { + return; + }; + + let offset = scroll_handle.offset(); + scroll_handle.set_offset(point(offset.x + horizontal_delta, offset.y)); + } + pub fn is_parsing(&self) -> bool { self.pending_parse.is_some() } @@ -902,6 +929,7 @@ impl MarkdownElement { Ok(ix) | Err(ix) => ix, }; markdown.selection.set_head(source_index, &rendered_text); + markdown.autoscroll_code_block(source_index, event.position); markdown.autoscroll_request = Some(source_index); cx.notify(); } else { diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index 21738147eed1b5b02da1c85207736160bd37ceb3..f530b88908380be13de2005bb8b3ec2b7e6e31b5 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -10,7 +10,7 @@ use collections::HashSet; use crate::path_range::PathWithRange; -const PARSE_OPTIONS: Options = Options::ENABLE_TABLES +pub const PARSE_OPTIONS: Options = Options::ENABLE_TABLES .union(Options::ENABLE_FOOTNOTES) .union(Options::ENABLE_STRIKETHROUGH) .union(Options::ENABLE_TASKLISTS) diff --git a/crates/markdown_preview/Cargo.toml b/crates/markdown_preview/Cargo.toml index 55912c66a017fa22902f9b05e5fa924230710d69..4baa308f1088341aada1eb2917c2133b8df8c143 100644 --- a/crates/markdown_preview/Cargo.toml +++ b/crates/markdown_preview/Cargo.toml @@ -25,6 +25,7 @@ html5ever.workspace = true language.workspace = true linkify.workspace = true log.workspace = true +markdown.workspace = true markup5ever_rcdom.workspace = true pretty_assertions.workspace = true pulldown-cmark.workspace = true diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index 59f18647d3ca8ac4937b2e411c8b9bb8e33550b7..ffd697d0e1bafc2feeccf3a3a7836a224d983860 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -7,8 +7,9 @@ use collections::FxHashMap; use gpui::{DefiniteLength, FontWeight, px, relative}; use html5ever::{ParseOpts, local_name, parse_document, tendril::TendrilSink}; use language::LanguageRegistry; +use markdown::parser::PARSE_OPTIONS; use markup5ever_rcdom::RcDom; -use pulldown_cmark::{Alignment, Event, Options, Parser, Tag, TagEnd}; +use pulldown_cmark::{Alignment, Event, Parser, Tag, TagEnd}; use std::{ cell::RefCell, collections::HashMap, mem, ops::Range, path::PathBuf, rc::Rc, sync::Arc, vec, }; @@ -19,10 +20,7 @@ pub async fn parse_markdown( file_location_directory: Option, language_registry: Option>, ) -> ParsedMarkdown { - let mut options = Options::all(); - options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); - - let parser = Parser::new_ext(markdown_input, options); + let parser = Parser::new_ext(markdown_input, PARSE_OPTIONS); let parser = MarkdownParser::new( parser.into_offset_iter().collect(), file_location_directory, @@ -3076,6 +3074,26 @@ More text ); } + #[gpui::test] + async fn test_dollar_signs_are_plain_text() { + // Dollar signs should be preserved as plain text, not treated as math delimiters. + // Regression test for https://github.com/zed-industries/zed/issues/50170 + let parsed = parse("$100$ per unit").await; + assert_eq!(parsed.children, vec![p("$100$ per unit", 0..14)]); + } + + #[gpui::test] + async fn test_dollar_signs_in_list_items() { + let parsed = parse("- $18,000 budget\n- $20,000 budget\n").await; + assert_eq!( + parsed.children, + vec![ + list_item(0..16, 1, Unordered, vec![p("$18,000 budget", 2..16)]), + list_item(17..33, 1, Unordered, vec![p("$20,000 budget", 19..33)]), + ] + ); + } + #[gpui::test] async fn test_code_block() { let parsed = parse( diff --git a/crates/migrator/src/migrations.rs b/crates/migrator/src/migrations.rs index 82afed3c461f5d8875b6ac03c4bfac27c0cd7716..ec33b6a53b3c598842aa29b6e2c31c08c7b11558 100644 --- a/crates/migrator/src/migrations.rs +++ b/crates/migrator/src/migrations.rs @@ -275,6 +275,12 @@ pub(crate) mod m_2025_12_15 { pub(crate) use settings::SETTINGS_PATTERNS; } +pub(crate) mod m_2025_01_27 { + mod settings; + + pub(crate) use settings::make_auto_indent_an_enum; +} + pub(crate) mod m_2026_02_02 { mod settings; @@ -292,3 +298,9 @@ pub(crate) mod m_2026_02_04 { pub(crate) use settings::migrate_tool_permission_defaults; } + +pub(crate) mod m_2026_02_25 { + mod settings; + + pub(crate) use settings::migrate_builtin_agent_servers_to_registry; +} diff --git a/crates/migrator/src/migrations/m_2025_01_27/settings.rs b/crates/migrator/src/migrations/m_2025_01_27/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..e8df2aa8aabed4daaae3e45e97532c1ce3557dfe --- /dev/null +++ b/crates/migrator/src/migrations/m_2025_01_27/settings.rs @@ -0,0 +1,27 @@ +use anyhow::Result; +use serde_json::Value; + +use crate::migrations::migrate_language_setting; + +pub fn make_auto_indent_an_enum(value: &mut Value) -> Result<()> { + migrate_language_setting(value, migrate_auto_indent) +} + +fn migrate_auto_indent(value: &mut Value, _path: &[&str]) -> Result<()> { + let Some(auto_indent) = value + .as_object_mut() + .and_then(|obj| obj.get_mut("auto_indent")) + else { + return Ok(()); + }; + + *auto_indent = match auto_indent { + Value::Bool(true) => Value::String("syntax_aware".to_string()), + Value::Bool(false) => Value::String("none".to_string()), + Value::String(s) if s == "syntax_aware" || s == "preserve_indent" || s == "none" => { + return Ok(()); + } + _ => anyhow::bail!("Expected auto_indent to be a boolean or valid enum value"), + }; + Ok(()) +} diff --git a/crates/migrator/src/migrations/m_2026_02_25/settings.rs b/crates/migrator/src/migrations/m_2026_02_25/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..56b85efa0047cd17aef5f20da617430c2f4adcb3 --- /dev/null +++ b/crates/migrator/src/migrations/m_2026_02_25/settings.rs @@ -0,0 +1,161 @@ +use anyhow::Result; +use serde_json::Value; + +use crate::migrations::migrate_settings; + +const AGENT_SERVERS_KEY: &str = "agent_servers"; + +struct BuiltinMapping { + old_key: &'static str, + registry_key: &'static str, +} + +const BUILTIN_MAPPINGS: &[BuiltinMapping] = &[ + BuiltinMapping { + old_key: "gemini", + registry_key: "gemini", + }, + BuiltinMapping { + old_key: "claude", + registry_key: "claude-acp", + }, + BuiltinMapping { + old_key: "codex", + registry_key: "codex-acp", + }, +]; + +const REGISTRY_COMPATIBLE_FIELDS: &[&str] = &[ + "env", + "default_mode", + "default_model", + "favorite_models", + "default_config_options", + "favorite_config_option_values", +]; + +pub fn migrate_builtin_agent_servers_to_registry(value: &mut Value) -> Result<()> { + migrate_settings(value, &mut migrate_one) +} + +fn migrate_one(obj: &mut serde_json::Map) -> Result<()> { + let Some(agent_servers) = obj.get_mut(AGENT_SERVERS_KEY) else { + return Ok(()); + }; + let Some(servers_map) = agent_servers.as_object_mut() else { + return Ok(()); + }; + + for mapping in BUILTIN_MAPPINGS { + migrate_builtin_entry(servers_map, mapping); + } + + Ok(()) +} + +fn migrate_builtin_entry( + servers_map: &mut serde_json::Map, + mapping: &BuiltinMapping, +) { + // Check if the old key exists and needs migration before taking ownership. + let needs_migration = servers_map + .get(mapping.old_key) + .and_then(|v| v.as_object()) + .is_some_and(|obj| !obj.contains_key("type")); + + if !needs_migration { + return; + } + + // When the registry key differs from the old key and the target already + // exists, just remove the stale old entry to avoid overwriting user data. + if mapping.old_key != mapping.registry_key && servers_map.contains_key(mapping.registry_key) { + servers_map.remove(mapping.old_key); + return; + } + + let Some(old_entry) = servers_map.remove(mapping.old_key) else { + return; + }; + let Some(old_obj) = old_entry.as_object() else { + return; + }; + + let has_command = old_obj.contains_key("command"); + let ignore_system_version = old_obj + .get("ignore_system_version") + .and_then(|v| v.as_bool()); + + // A custom entry is needed when the user configured a custom binary + // or explicitly opted into using the system version via + // `ignore_system_version: false` (only meaningful for gemini). + let needs_custom = has_command + || (mapping.old_key == "gemini" && matches!(ignore_system_version, Some(false))); + + if needs_custom { + let local_key = format!("{}-custom", mapping.registry_key); + + // Don't overwrite an existing `-custom` entry. + if servers_map.contains_key(&local_key) { + return; + } + + let mut custom_obj = serde_json::Map::new(); + custom_obj.insert("type".to_string(), Value::String("custom".to_string())); + + if has_command { + if let Some(command) = old_obj.get("command") { + custom_obj.insert("command".to_string(), command.clone()); + } + if let Some(args) = old_obj.get("args") { + if !args.as_array().is_some_and(|a| a.is_empty()) { + custom_obj.insert("args".to_string(), args.clone()); + } + } + } else { + // ignore_system_version: false — the user wants the binary from $PATH + custom_obj.insert( + "command".to_string(), + Value::String(mapping.old_key.to_string()), + ); + } + + // Carry over all compatible fields to the custom entry. + for &field in REGISTRY_COMPATIBLE_FIELDS { + if let Some(value) = old_obj.get(field) { + match value { + Value::Array(arr) if arr.is_empty() => continue, + Value::Object(map) if map.is_empty() => continue, + Value::Null => continue, + _ => { + custom_obj.insert(field.to_string(), value.clone()); + } + } + } + } + + servers_map.insert(local_key, Value::Object(custom_obj)); + } else { + // Build a registry entry with compatible fields only. + let mut registry_obj = serde_json::Map::new(); + registry_obj.insert("type".to_string(), Value::String("registry".to_string())); + + for &field in REGISTRY_COMPATIBLE_FIELDS { + if let Some(value) = old_obj.get(field) { + match value { + Value::Array(arr) if arr.is_empty() => continue, + Value::Object(map) if map.is_empty() => continue, + Value::Null => continue, + _ => { + registry_obj.insert(field.to_string(), value.clone()); + } + } + } + } + + servers_map.insert( + mapping.registry_key.to_string(), + Value::Object(registry_obj), + ); + } +} diff --git a/crates/migrator/src/migrator.rs b/crates/migrator/src/migrator.rs index 5372fc126cc7ecd88db0eb751354bdc81e67495a..f208faf163aaf425127791f781d4569a737870ff 100644 --- a/crates/migrator/src/migrator.rs +++ b/crates/migrator/src/migrator.rs @@ -232,11 +232,13 @@ pub fn migrate_settings(text: &str) -> Result> { migrations::m_2025_12_15::SETTINGS_PATTERNS, &SETTINGS_QUERY_2025_12_15, ), + MigrationType::Json(migrations::m_2025_01_27::make_auto_indent_an_enum), MigrationType::Json( migrations::m_2026_02_02::move_edit_prediction_provider_to_edit_predictions, ), MigrationType::Json(migrations::m_2026_02_03::migrate_experimental_sweep_mercury), MigrationType::Json(migrations::m_2026_02_04::migrate_tool_permission_defaults), + MigrationType::Json(migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry), ]; run_migrations(text, migrations) } @@ -2605,6 +2607,91 @@ mod tests { ); } + #[test] + fn test_make_auto_indent_an_enum() { + // Empty settings should not change + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2025_01_27::make_auto_indent_an_enum, + )], + &r#"{ }"#.unindent(), + None, + ); + + // true should become "syntax_aware" + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2025_01_27::make_auto_indent_an_enum, + )], + &r#"{ + "auto_indent": true + }"# + .unindent(), + Some( + &r#"{ + "auto_indent": "syntax_aware" + }"# + .unindent(), + ), + ); + + // false should become "none" + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2025_01_27::make_auto_indent_an_enum, + )], + &r#"{ + "auto_indent": false + }"# + .unindent(), + Some( + &r#"{ + "auto_indent": "none" + }"# + .unindent(), + ), + ); + + // Already valid enum values should not change + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2025_01_27::make_auto_indent_an_enum, + )], + &r#"{ + "auto_indent": "preserve_indent" + }"# + .unindent(), + None, + ); + + // Should also work inside languages + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2025_01_27::make_auto_indent_an_enum, + )], + &r#"{ + "auto_indent": true, + "languages": { + "Python": { + "auto_indent": false + } + } + }"# + .unindent(), + Some( + &r#"{ + "auto_indent": "syntax_aware", + "languages": { + "Python": { + "auto_indent": "none" + } + } + }"# + .unindent(), + ), + ); + } + #[test] fn test_move_edit_prediction_provider_to_edit_predictions() { assert_migrate_settings_with_migrations( @@ -3820,4 +3907,415 @@ mod tests { ), ); } + + #[test] + fn test_migrate_builtin_agent_servers_to_registry_simple() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, + )], + r#"{ + "agent_servers": { + "gemini": { + "default_model": "gemini-2.0-flash" + }, + "claude": { + "default_mode": "plan" + }, + "codex": { + "default_model": "o4-mini" + } + } +}"#, + Some( + r#"{ + "agent_servers": { + "codex-acp": { + "type": "registry", + "default_model": "o4-mini" + }, + "claude-acp": { + "type": "registry", + "default_mode": "plan" + }, + "gemini": { + "type": "registry", + "default_model": "gemini-2.0-flash" + } + } +}"#, + ), + ); + } + + #[test] + fn test_migrate_builtin_agent_servers_empty_entries() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, + )], + r#"{ + "agent_servers": { + "gemini": {}, + "claude": {}, + "codex": {} + } +}"#, + Some( + r#"{ + "agent_servers": { + "codex-acp": { + "type": "registry" + }, + "claude-acp": { + "type": "registry" + }, + "gemini": { + "type": "registry" + } + } +}"#, + ), + ); + } + + #[test] + fn test_migrate_builtin_agent_servers_with_command() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, + )], + r#"{ + "agent_servers": { + "claude": { + "command": "/usr/local/bin/claude", + "args": ["--verbose"], + "env": {"CLAUDE_KEY": "abc123"}, + "default_mode": "plan", + "default_model": "claude-sonnet-4" + } + } +}"#, + Some( + r#"{ + "agent_servers": { + "claude-acp-custom": { + "type": "custom", + "command": "/usr/local/bin/claude", + "args": [ + "--verbose" + ], + "env": { + "CLAUDE_KEY": "abc123" + }, + "default_mode": "plan", + "default_model": "claude-sonnet-4" + } + } +}"#, + ), + ); + } + + #[test] + fn test_migrate_builtin_agent_servers_gemini_with_command() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, + )], + r#"{ + "agent_servers": { + "gemini": { + "command": "/opt/gemini/bin/gemini", + "default_model": "gemini-2.0-flash" + } + } +}"#, + Some( + r#"{ + "agent_servers": { + "gemini-custom": { + "type": "custom", + "command": "/opt/gemini/bin/gemini", + "default_model": "gemini-2.0-flash" + } + } +}"#, + ), + ); + } + + #[test] + fn test_migrate_builtin_agent_servers_gemini_ignore_system_version_false() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, + )], + r#"{ + "agent_servers": { + "gemini": { + "ignore_system_version": false, + "default_model": "gemini-2.0-flash" + } + } +}"#, + Some( + r#"{ + "agent_servers": { + "gemini-custom": { + "type": "custom", + "command": "gemini", + "default_model": "gemini-2.0-flash" + } + } +}"#, + ), + ); + } + + #[test] + fn test_migrate_builtin_agent_servers_gemini_ignore_system_version_true() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, + )], + r#"{ + "agent_servers": { + "gemini": { + "ignore_system_version": true, + "default_model": "gemini-2.0-flash" + } + } +}"#, + Some( + r#"{ + "agent_servers": { + "gemini": { + "type": "registry", + "default_model": "gemini-2.0-flash" + } + } +}"#, + ), + ); + } + + #[test] + fn test_migrate_builtin_agent_servers_already_typed_unchanged() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, + )], + r#"{ + "agent_servers": { + "gemini": { + "type": "registry", + "default_model": "gemini-2.0-flash" + }, + "claude-acp": { + "type": "registry", + "default_mode": "plan" + } + } +}"#, + None, + ); + } + + #[test] + fn test_migrate_builtin_agent_servers_preserves_custom_entries() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, + )], + r#"{ + "agent_servers": { + "claude": { + "default_mode": "plan" + }, + "my-custom-agent": { + "type": "custom", + "command": "/path/to/agent" + } + } +}"#, + Some( + r#"{ + "agent_servers": { + "claude-acp": { + "type": "registry", + "default_mode": "plan" + }, + "my-custom-agent": { + "type": "custom", + "command": "/path/to/agent" + } + } +}"#, + ), + ); + } + + #[test] + fn test_migrate_builtin_agent_servers_target_already_exists() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, + )], + r#"{ + "agent_servers": { + "claude": { + "default_mode": "plan" + }, + "claude-acp": { + "type": "registry", + "default_model": "claude-sonnet-4" + } + } +}"#, + Some( + r#"{ + "agent_servers": { + "claude-acp": { + "type": "registry", + "default_model": "claude-sonnet-4" + } + } +}"#, + ), + ); + } + + #[test] + fn test_migrate_builtin_agent_servers_no_agent_servers_key() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, + )], + r#"{ + "agent": { + "enabled": true + } +}"#, + None, + ); + } + + #[test] + fn test_migrate_builtin_agent_servers_all_fields() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, + )], + r#"{ + "agent_servers": { + "codex": { + "env": {"OPENAI_API_KEY": "sk-123"}, + "default_mode": "read-only", + "default_model": "o4-mini", + "favorite_models": ["o4-mini", "codex-mini-latest"], + "default_config_options": {"approval_mode": "auto-edit"}, + "favorite_config_option_values": {"approval_mode": ["auto-edit", "suggest"]} + } + } +}"#, + Some( + r#"{ + "agent_servers": { + "codex-acp": { + "type": "registry", + "env": { + "OPENAI_API_KEY": "sk-123" + }, + "default_mode": "read-only", + "default_model": "o4-mini", + "favorite_models": [ + "o4-mini", + "codex-mini-latest" + ], + "default_config_options": { + "approval_mode": "auto-edit" + }, + "favorite_config_option_values": { + "approval_mode": [ + "auto-edit", + "suggest" + ] + } + } + } +}"#, + ), + ); + } + + #[test] + fn test_migrate_builtin_agent_servers_codex_with_command() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, + )], + r#"{ + "agent_servers": { + "codex": { + "command": "/usr/local/bin/codex", + "args": ["--full-auto"], + "default_model": "o4-mini" + } + } +}"#, + Some( + r#"{ + "agent_servers": { + "codex-acp-custom": { + "type": "custom", + "command": "/usr/local/bin/codex", + "args": [ + "--full-auto" + ], + "default_model": "o4-mini" + } + } +}"#, + ), + ); + } + + #[test] + fn test_migrate_builtin_agent_servers_mixed_migrated_and_not() { + assert_migrate_settings_with_migrations( + &[MigrationType::Json( + migrations::m_2026_02_25::migrate_builtin_agent_servers_to_registry, + )], + r#"{ + "agent_servers": { + "gemini": { + "type": "registry", + "default_model": "gemini-2.0-flash" + }, + "claude": { + "default_mode": "plan" + }, + "codex": {} + } +}"#, + Some( + r#"{ + "agent_servers": { + "codex-acp": { + "type": "registry" + }, + "claude-acp": { + "type": "registry", + "default_mode": "plan" + }, + "gemini": { + "type": "registry", + "default_model": "gemini-2.0-flash" + } + } +}"#, + ), + ); + } } diff --git a/crates/miniprofiler_ui/src/miniprofiler_ui.rs b/crates/miniprofiler_ui/src/miniprofiler_ui.rs index 1f95dc3d230e7c50b4960560a96c9007fd77aab8..9ae0a33471d31f32852b4b376bbc71ff0911c60b 100644 --- a/crates/miniprofiler_ui/src/miniprofiler_ui.rs +++ b/crates/miniprofiler_ui/src/miniprofiler_ui.rs @@ -464,7 +464,7 @@ impl Render for ProfilerWindow { let scroll_offset = self.scroll_handle.offset(); let max_offset = self.scroll_handle.max_offset(); - self.autoscroll = -scroll_offset.y >= (max_offset.height - px(24.)); + self.autoscroll = -scroll_offset.y >= (max_offset.y - px(24.)); if self.autoscroll { self.scroll_handle.scroll_to_bottom(); } @@ -544,7 +544,7 @@ impl Render for ProfilerWindow { let path = cx.prompt_for_new_path( &active_path, - Some("performance_profile.miniprof"), + Some("performance_profile.miniprof.json"), ); cx.background_spawn(async move { diff --git a/crates/mistral/src/mistral.rs b/crates/mistral/src/mistral.rs index cc9f94304d989c69c3f5a4bd3763704314564a19..0244f904468a5eb3e03b520a2687b31a1168f52b 100644 --- a/crates/mistral/src/mistral.rs +++ b/crates/mistral/src/mistral.rs @@ -233,6 +233,8 @@ pub struct Request { pub messages: Vec, pub stream: bool, #[serde(default, skip_serializing_if = "Option::is_none")] + pub stream_options: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] pub max_tokens: Option, #[serde(default, skip_serializing_if = "Option::is_none")] pub temperature: Option, @@ -246,6 +248,12 @@ pub struct Request { pub tools: Vec, } +#[derive(Debug, Serialize, Deserialize)] +pub struct StreamOptions { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub stream_tool_calls: Option, +} + #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum ResponseFormat { diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 34d32f481947657327cbec99e0a3aedc59aeabe7..c991fd9a5cbfe451b3f86ff016f8467395373564 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -105,6 +105,8 @@ pub enum Event { }, ExcerptsRemoved { ids: Vec, + /// Contains only buffer IDs for which all excerpts have been removed. + /// Buffers that still have remaining excerpts are never included. removed_buffer_ids: Vec, }, ExcerptsExpanded { @@ -624,7 +626,7 @@ pub struct MultiBufferSnapshot { diffs: TreeMap, diff_transforms: SumTree, excerpt_ids: SumTree, - replaced_excerpts: TreeMap, + replaced_excerpts: Arc>, non_text_state_update_count: usize, edit_count: usize, is_dirty: bool, @@ -1160,12 +1162,11 @@ impl MultiBuffer { }, ); this.singleton = true; - let buffer_id = buffer.read(cx).remote_id(); - this.push_excerpts( - buffer, - [ExcerptRange::new(text::Anchor::min_max_range_for_buffer( - buffer_id, - ))], + this.set_excerpts_for_path( + PathKey::sorted(0), + buffer.clone(), + [Point::zero()..buffer.read(cx).max_point()], + 0, cx, ); this @@ -1734,18 +1735,6 @@ impl MultiBuffer { } } - pub fn push_excerpts( - &mut self, - buffer: Entity, - ranges: impl IntoIterator>, - cx: &mut Context, - ) -> Vec - where - O: text::ToOffset, - { - self.insert_excerpts_after(ExcerptId::max(), buffer, ranges, cx) - } - #[instrument(skip_all)] fn merge_excerpt_ranges<'a>( expanded_ranges: impl IntoIterator> + 'a, @@ -1967,7 +1956,10 @@ impl MultiBuffer { *has_deleted_file = false; *has_conflict = false; *has_inverted_diff = false; - replaced_excerpts.clear(); + match Arc::get_mut(replaced_excerpts) { + Some(replaced_excerpts) => replaced_excerpts.clear(), + None => *replaced_excerpts = Default::default(), + } let edits = Self::sync_diff_transforms( self.snapshot.get_mut(), @@ -3746,11 +3738,21 @@ impl MultiBuffer { cx: &mut gpui::App, ) -> Entity { let multi = cx.new(|_| Self::new(Capability::ReadWrite)); - for (text, ranges) in excerpts { + for (ix, (text, ranges)) in excerpts.into_iter().enumerate() { let buffer = cx.new(|cx| Buffer::local(text, cx)); - let excerpt_ranges = ranges.into_iter().map(ExcerptRange::new); + let snapshot = buffer.read(cx).snapshot(); + let excerpt_ranges = ranges + .into_iter() + .map(ExcerptRange::new) + .collect::>(); multi.update(cx, |multi, cx| { - multi.push_excerpts(buffer, excerpt_ranges, cx) + multi.set_excerpt_ranges_for_path( + PathKey::sorted(ix as u64), + buffer, + &snapshot, + excerpt_ranges, + cx, + ) }); } @@ -3884,7 +3886,8 @@ impl MultiBuffer { .collect::>() ); - let excerpt_id = self.push_excerpts(buffer_handle.clone(), ranges, cx); + let excerpt_id = + self.insert_excerpts_after(ExcerptId::max(), buffer_handle, ranges, cx); log::info!("Inserted with ids: {:?}", excerpt_id); } else { let remove_count = rng.random_range(1..=excerpt_ids.len()); @@ -6938,18 +6941,23 @@ impl MultiBufferSnapshot { } fn excerpt_locator_for_id(&self, id: ExcerptId) -> &Locator { + self.try_excerpt_locator_for_id(id) + .unwrap_or_else(|| panic!("invalid excerpt id {id:?}")) + } + + fn try_excerpt_locator_for_id(&self, id: ExcerptId) -> Option<&Locator> { if id == ExcerptId::min() { - Locator::min_ref() + Some(Locator::min_ref()) } else if id == ExcerptId::max() { - Locator::max_ref() + Some(Locator::max_ref()) } else { let (_, _, item) = self.excerpt_ids.find::((), &id, Bias::Left); if let Some(entry) = item && entry.id == id { - return &entry.locator; + return Some(&entry.locator); } - panic!("invalid excerpt id {id:?}") + None } } @@ -7034,7 +7042,7 @@ impl MultiBufferSnapshot { /// afterwards. fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> { let excerpt_id = self.latest_excerpt_id(excerpt_id); - let locator = self.excerpt_locator_for_id(excerpt_id); + let locator = self.try_excerpt_locator_for_id(excerpt_id)?; let (_, _, item) = self.excerpts .find::, _>((), &Some(locator), Bias::Left); diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index 5e028e60f13034e73c0c2cb6ae05c6bf56911c87..7e27786a76a14783f54e42c73850a888e87a3ac7 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -105,8 +105,8 @@ fn test_remote(cx: &mut App) { #[gpui::test] fn test_excerpt_boundaries_and_clipping(cx: &mut App) { - let buffer_1 = cx.new(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); - let buffer_2 = cx.new(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); + let buffer_1 = cx.new(|cx| Buffer::local(sample_text(7, 6, 'a'), cx)); + let buffer_2 = cx.new(|cx| Buffer::local(sample_text(7, 6, 'g'), cx)); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); let events = Arc::new(RwLock::new(Vec::::new())); @@ -122,9 +122,11 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { let subscription = multibuffer.update(cx, |multibuffer, cx| { let subscription = multibuffer.subscribe(); - multibuffer.push_excerpts( + multibuffer.set_excerpt_ranges_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(1, 2)..Point::new(2, 5))], + &buffer_1.read(cx).snapshot(), + vec![ExcerptRange::new(Point::new(1, 2)..Point::new(2, 5))], cx, ); assert_eq!( @@ -135,14 +137,21 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { }] ); - multibuffer.push_excerpts( + multibuffer.set_excerpt_ranges_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(3, 3)..Point::new(4, 4))], + &buffer_1.read(cx).snapshot(), + vec![ + ExcerptRange::new(Point::new(1, 2)..Point::new(2, 5)), + ExcerptRange::new(Point::new(5, 3)..Point::new(6, 4)), + ], cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpt_ranges_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(3, 1)..Point::new(3, 3))], + &buffer_2.read(cx).snapshot(), + vec![ExcerptRange::new(Point::new(3, 1)..Point::new(3, 3))], cx, ); assert_eq!( @@ -179,8 +188,8 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { " bbbb ccccc - ddd - eeee + fff + gggg jj" ), ); @@ -189,14 +198,14 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { .row_infos(MultiBufferRow(0)) .map(|info| info.buffer_row) .collect::>(), - [Some(1), Some(2), Some(3), Some(4), Some(3)] + [Some(1), Some(2), Some(5), Some(6), Some(3)] ); assert_eq!( snapshot .row_infos(MultiBufferRow(2)) .map(|info| info.buffer_row) .collect::>(), - [Some(3), Some(4), Some(3)] + [Some(5), Some(6), Some(3)] ); assert_eq!( snapshot @@ -217,7 +226,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { boundaries_in_range(Point::new(0, 0)..Point::new(4, 2), &snapshot), &[ (MultiBufferRow(0), "bbbb\nccccc".to_string(), true), - (MultiBufferRow(2), "ddd\neeee".to_string(), false), + (MultiBufferRow(2), "fff\ngggg".to_string(), false), (MultiBufferRow(4), "jj".to_string(), true), ] ); @@ -235,15 +244,15 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { ); assert_eq!( boundaries_in_range(Point::new(1, 0)..Point::new(4, 0), &snapshot), - &[(MultiBufferRow(2), "ddd\neeee".to_string(), false)] + &[(MultiBufferRow(2), "fff\ngggg".to_string(), false)] ); assert_eq!( boundaries_in_range(Point::new(1, 0)..Point::new(4, 0), &snapshot), - &[(MultiBufferRow(2), "ddd\neeee".to_string(), false)] + &[(MultiBufferRow(2), "fff\ngggg".to_string(), false)] ); assert_eq!( boundaries_in_range(Point::new(2, 0)..Point::new(3, 0), &snapshot), - &[(MultiBufferRow(2), "ddd\neeee".to_string(), false)] + &[(MultiBufferRow(2), "fff\ngggg".to_string(), false)] ); assert_eq!( boundaries_in_range(Point::new(4, 0)..Point::new(4, 2), &snapshot), @@ -273,8 +282,8 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { "bbbb\n", // Preserve newlines "c\n", // "cc\n", // - "ddd\n", // - "eeee\n", // + "fff\n", // + "gggg\n", // "jj" // ) ); @@ -310,9 +319,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { ); let snapshot = multibuffer.update(cx, |multibuffer, cx| { - let (buffer_2_excerpt_id, _) = - multibuffer.excerpts_for_buffer(buffer_2.read(cx).remote_id(), cx)[0].clone(); - multibuffer.remove_excerpts([buffer_2_excerpt_id], cx); + multibuffer.remove_excerpts_for_path(PathKey::sorted(1), cx); multibuffer.snapshot(cx) }); @@ -322,8 +329,8 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { "bbbb\n", // Preserve newlines "c\n", // "cc\n", // - "ddd\n", // - "eeee", // + "fff\n", // + "gggg", // ) ); @@ -747,18 +754,29 @@ fn test_excerpt_events(cx: &mut App) { .detach(); }); + let buffer_1_snapshot = buffer_1.read(cx).snapshot(); + let buffer_2_snapshot = buffer_2.read(cx).snapshot(); leader_multibuffer.update(cx, |leader, cx| { - leader.push_excerpts( + leader.set_excerpt_ranges_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(0..8), ExcerptRange::new(12..16)], + &buffer_1_snapshot, + vec![ + ExcerptRange::new((0..8).to_point(&buffer_1_snapshot)), + ExcerptRange::new((22..26).to_point(&buffer_1_snapshot)), + ], cx, ); - leader.insert_excerpts_after( - leader.excerpt_ids()[0], + leader.set_excerpt_ranges_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(0..5), ExcerptRange::new(10..15)], + &buffer_2_snapshot, + vec![ + ExcerptRange::new((0..5).to_point(&buffer_2_snapshot)), + ExcerptRange::new((20..25).to_point(&buffer_2_snapshot)), + ], cx, - ) + ); }); assert_eq!( leader_multibuffer.read(cx).snapshot(cx).text(), @@ -767,34 +785,26 @@ fn test_excerpt_events(cx: &mut App) { assert_eq!(*follower_edit_event_count.read(), 2); leader_multibuffer.update(cx, |leader, cx| { - let excerpt_ids = leader.excerpt_ids(); - leader.remove_excerpts([excerpt_ids[1], excerpt_ids[3]], cx); - }); - assert_eq!( - leader_multibuffer.read(cx).snapshot(cx).text(), - follower_multibuffer.read(cx).snapshot(cx).text(), - ); - assert_eq!(*follower_edit_event_count.read(), 3); - - // Removing an empty set of excerpts is a noop. - leader_multibuffer.update(cx, |leader, cx| { - leader.remove_excerpts([], cx); - }); - assert_eq!( - leader_multibuffer.read(cx).snapshot(cx).text(), - follower_multibuffer.read(cx).snapshot(cx).text(), - ); - assert_eq!(*follower_edit_event_count.read(), 3); - - // Adding an empty set of excerpts is a noop. - leader_multibuffer.update(cx, |leader, cx| { - leader.push_excerpts::(buffer_2.clone(), [], cx); + leader.set_excerpt_ranges_for_path( + PathKey::sorted(0), + buffer_1.clone(), + &buffer_1_snapshot, + vec![ExcerptRange::new((0..8).to_point(&buffer_1_snapshot))], + cx, + ); + leader.set_excerpt_ranges_for_path( + PathKey::sorted(1), + buffer_2, + &buffer_2_snapshot, + vec![ExcerptRange::new((0..5).to_point(&buffer_2_snapshot))], + cx, + ); }); assert_eq!( leader_multibuffer.read(cx).snapshot(cx).text(), follower_multibuffer.read(cx).snapshot(cx).text(), ); - assert_eq!(*follower_edit_event_count.read(), 3); + assert_eq!(*follower_edit_event_count.read(), 4); leader_multibuffer.update(cx, |leader, cx| { leader.clear(cx); @@ -803,7 +813,7 @@ fn test_excerpt_events(cx: &mut App) { leader_multibuffer.read(cx).snapshot(cx).text(), follower_multibuffer.read(cx).snapshot(cx).text(), ); - assert_eq!(*follower_edit_event_count.read(), 4); + assert_eq!(*follower_edit_event_count.read(), 5); } #[gpui::test] @@ -1013,7 +1023,13 @@ async fn test_empty_diff_excerpt(cx: &mut TestAppContext) { let diff = cx .new(|cx| BufferDiff::new_with_base_text(base_text, &buffer.read(cx).text_snapshot(), cx)); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.push_excerpts(buffer.clone(), [ExcerptRange::new(0..0)], cx); + multibuffer.set_excerpt_ranges_for_path( + PathKey::sorted(0), + buffer.clone(), + &buffer.read(cx).snapshot(), + vec![ExcerptRange::new(Point::zero()..Point::zero())], + cx, + ); multibuffer.set_all_diff_hunks_expanded(cx); multibuffer.add_diff(diff.clone(), cx); }); @@ -1031,7 +1047,13 @@ async fn test_empty_diff_excerpt(cx: &mut TestAppContext) { let buf2 = cx.new(|cx| Buffer::local("X", cx)); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.push_excerpts(buf2, [ExcerptRange::new(0..1)], cx); + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), + buf2, + [Point::new(0, 0)..Point::new(0, 1)], + 0, + cx, + ); }); buffer.update(cx, |buffer, cx| { @@ -1105,8 +1127,20 @@ fn test_multibuffer_anchors(cx: &mut App) { let buffer_2 = cx.new(|cx| Buffer::local("efghi", cx)); let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); - multibuffer.push_excerpts(buffer_1.clone(), [ExcerptRange::new(0..4)], cx); - multibuffer.push_excerpts(buffer_2.clone(), [ExcerptRange::new(0..5)], cx); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer_1.clone(), + [Point::new(0, 0)..Point::new(0, 4)], + 0, + cx, + ); + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), + buffer_2.clone(), + [Point::new(0, 0)..Point::new(0, 5)], + 0, + cx, + ); multibuffer }); let old_snapshot = multibuffer.read(cx).snapshot(cx); @@ -1219,29 +1253,39 @@ fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut App) { // Add an excerpt from buffer 1 that spans this new insertion. buffer_1.update(cx, |buffer, cx| buffer.edit([(4..4, "123")], None, cx)); let excerpt_id_1 = multibuffer.update(cx, |multibuffer, cx| { - multibuffer - .push_excerpts(buffer_1.clone(), [ExcerptRange::new(0..7)], cx) - .pop() - .unwrap() + let buffer_1_snapshot = buffer_1.read(cx).snapshot(); + multibuffer.set_excerpt_ranges_for_path( + PathKey::sorted(0), + buffer_1, + &buffer_1_snapshot, + vec![ExcerptRange::new((0..7).to_point(&buffer_1_snapshot))], + cx, + ); + multibuffer.excerpt_ids().into_iter().next().unwrap() }); let snapshot_1 = multibuffer.read(cx).snapshot(cx); assert_eq!(snapshot_1.text(), "abcd123"); // Replace the buffer 1 excerpt with new excerpts from buffer 2. - let (excerpt_id_2, excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts([excerpt_id_1], cx); + let (excerpt_id_2, _excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.remove_excerpts_for_path(PathKey::sorted(0), cx); + let snapshot_2 = buffer_2.read(cx).snapshot(); + multibuffer.set_excerpt_ranges_for_path( + PathKey::sorted(1), + buffer_2.clone(), + &buffer_2.read(cx).snapshot(), + vec![ + ExcerptRange::new((0..4).to_point(&snapshot_2)), + ExcerptRange::new((6..10).to_point(&snapshot_2)), + ExcerptRange::new((12..16).to_point(&snapshot_2)), + ], + cx, + ); let mut ids = multibuffer - .push_excerpts( - buffer_2.clone(), - [ - ExcerptRange::new(0..4), - ExcerptRange::new(6..10), - ExcerptRange::new(12..16), - ], - cx, - ) - .into_iter(); + .excerpts_for_buffer(buffer_2.read(cx).remote_id(), cx) + .into_iter() + .map(|(id, _)| id); (ids.next().unwrap(), ids.next().unwrap()) }); let snapshot_2 = multibuffer.read(cx).snapshot(cx); @@ -1283,22 +1327,33 @@ fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut App) { // Replace the middle excerpt with a smaller excerpt in buffer 2, // that intersects the old excerpt. - let excerpt_id_5 = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts([excerpt_id_3], cx); - multibuffer - .insert_excerpts_after( - excerpt_id_2, - buffer_2.clone(), - [ExcerptRange::new(5..8)], - cx, - ) - .pop() - .unwrap() + multibuffer.update(cx, |multibuffer, cx| { + let snapshot_2 = buffer_2.read(cx).snapshot(); + multibuffer.set_excerpt_ranges_for_path( + PathKey::sorted(1), + buffer_2.clone(), + &buffer_2.read(cx).snapshot(), + vec![ + ExcerptRange::new((0..4).to_point(&snapshot_2)), + ExcerptRange::new((12..16).to_point(&snapshot_2)), + ], + cx, + ); + multibuffer.set_excerpt_ranges_for_path( + PathKey::sorted(1), + buffer_2.clone(), + &buffer_2.read(cx).snapshot(), + vec![ + ExcerptRange::new((0..4).to_point(&snapshot_2)), + ExcerptRange::new((5..8).to_point(&snapshot_2)), + ExcerptRange::new((12..16).to_point(&snapshot_2)), + ], + cx, + ); }); let snapshot_3 = multibuffer.read(cx).snapshot(cx); assert_eq!(snapshot_3.text(), "ABCD\nFGH\nMNOP"); - assert_ne!(excerpt_id_5, excerpt_id_3); // Resolve some anchors from the previous snapshot in the new snapshot. // The third anchor can't be resolved, since its excerpt has been removed, @@ -2149,14 +2204,18 @@ async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)], + [Point::zero()..buffer_1.read(cx).max_point()], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)], + [Point::zero()..buffer_2.read(cx).max_point()], + 0, cx, ); multibuffer.add_diff(diff_1.clone(), cx); @@ -3431,14 +3490,18 @@ fn test_history(cx: &mut App) { this.set_group_interval(group_interval); }); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(0..buffer_1.read(cx).len())], + [Point::zero()..buffer_1.read(cx).max_point()], + 0, cx, ); - multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(0..buffer_2.read(cx).len())], + [Point::zero()..buffer_2.read(cx).max_point()], + 0, cx, ); }); @@ -3691,18 +3754,23 @@ async fn test_summaries_for_anchors(cx: &mut TestAppContext) { let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); multibuffer.set_all_diff_hunks_expanded(cx); - ids.extend(multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)], + [Point::zero()..buffer_1.read(cx).max_point()], + 0, cx, - )); - ids.extend(multibuffer.push_excerpts( + ); + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)], + [Point::zero()..buffer_2.read(cx).max_point()], + 0, cx, - )); + ); multibuffer.add_diff(diff_1.clone(), cx); multibuffer.add_diff(diff_2.clone(), cx); + ids = multibuffer.excerpt_ids(); multibuffer }); @@ -3747,7 +3815,14 @@ async fn test_trailing_deletion_without_newline(cx: &mut TestAppContext) { cx.run_until_parked(); let multibuffer = cx.new(|cx| { - let mut multibuffer = MultiBuffer::singleton(buffer_1.clone(), cx); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer_1.clone(), + [Point::zero()..buffer_1.read(cx).max_point()], + 0, + cx, + ); multibuffer.add_diff(diff_1.clone(), cx); multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); multibuffer @@ -3790,9 +3865,11 @@ async fn test_trailing_deletion_without_newline(cx: &mut TestAppContext) { let text_2 = "foo\n".to_owned(); let buffer_2 = cx.new(|cx| Buffer::local(&text_2, cx)); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.push_excerpts( + multibuffer.set_excerpt_ranges_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))], + &buffer_2.read(cx).snapshot(), + vec![ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))], cx, ); }); @@ -4893,25 +4970,36 @@ fn test_excerpts_containment_functions(cx: &mut App) { let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); let (excerpt_1_id, excerpt_2_id, excerpt_3_id) = multibuffer.update(cx, |multibuffer, cx| { - let excerpt_1_id = multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 3))], + [Point::new(0, 0)..Point::new(1, 3)], + 0, cx, - )[0]; + ); - let excerpt_2_id = multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 3))], + [Point::new(0, 0)..Point::new(1, 3)], + 0, cx, - )[0]; + ); - let excerpt_3_id = multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(2), buffer_3.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(0, 3))], + [Point::new(0, 0)..Point::new(0, 3)], + 0, cx, - )[0]; + ); - (excerpt_1_id, excerpt_2_id, excerpt_3_id) + let mut ids = multibuffer.excerpt_ids().into_iter(); + ( + ids.next().unwrap(), + ids.next().unwrap(), + ids.next().unwrap(), + ) }); let snapshot = multibuffer.read(cx).snapshot(cx); @@ -4996,19 +5084,25 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); let (excerpt_1_id, excerpt_2_id) = multibuffer.update(cx, |multibuffer, cx| { - let excerpt_1_id = multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 3))], + [Point::new(0, 0)..Point::new(1, 3)], + 0, cx, - )[0]; + ); - let excerpt_2_id = multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_2.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(0, 3))], + [Point::new(0, 0)..Point::new(0, 3)], + 0, cx, - )[0]; + ); - (excerpt_1_id, excerpt_2_id) + let excerpt_ids = multibuffer.excerpt_ids(); + + (excerpt_ids[0], excerpt_ids[1]) }); let snapshot = multibuffer.read(cx).snapshot(cx); @@ -5058,19 +5152,24 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { let multibuffer_trailing_empty = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); let (te_excerpt_1_id, te_excerpt_2_id) = multibuffer_trailing_empty.update(cx, |multibuffer, cx| { - let excerpt_1_id = multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), buffer_1.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 3))], + [Point::new(0, 0)..Point::new(1, 3)], + 0, cx, - )[0]; + ); - let excerpt_2_id = multibuffer.push_excerpts( + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), buffer_empty.clone(), - [ExcerptRange::new(Point::new(0, 0)..Point::new(0, 0))], + [Point::new(0, 0)..Point::new(0, 0)], + 0, cx, - )[0]; + ); - (excerpt_1_id, excerpt_2_id) + let excerpt_ids = multibuffer.excerpt_ids(); + (excerpt_ids[0], excerpt_ids[1]) }); let snapshot_trailing = multibuffer_trailing_empty.read(cx).snapshot(cx); diff --git a/crates/multi_buffer/src/path_key.rs b/crates/multi_buffer/src/path_key.rs index 475ed3c9d623382c5ed989918ee3224afc95cd25..09d17d7b7fe2e9e666ba6c5777216c9c8ba4dea0 100644 --- a/crates/multi_buffer/src/path_key.rs +++ b/crates/multi_buffer/src/path_key.rs @@ -5,7 +5,7 @@ use gpui::{App, AppContext, Context, Entity}; use itertools::Itertools; use language::{Buffer, BufferSnapshot}; use rope::Point; -use text::{Bias, BufferId, OffsetRangeExt, locator::Locator}; +use text::{Bias, OffsetRangeExt, locator::Locator}; use util::{post_inc, rel_path::RelPath}; use ztracing::instrument; @@ -27,6 +27,12 @@ pub struct PathKey { } impl PathKey { + pub fn sorted(sort_prefix: u64) -> Self { + Self { + sort_prefix: Some(sort_prefix), + path: RelPath::empty().into_arc(), + } + } pub fn with_sort_prefix(sort_prefix: u64, path: Arc) -> Self { Self { sort_prefix: Some(sort_prefix), @@ -86,6 +92,17 @@ impl MultiBuffer { Some(Anchor::in_buffer(excerpt.id, excerpt.range.context.start)) } + pub fn set_excerpts_for_buffer( + &mut self, + buffer: Entity, + ranges: impl IntoIterator>, + context_line_count: u32, + cx: &mut Context, + ) -> (Vec>, bool) { + let path = PathKey::for_buffer(&buffer, cx); + self.set_excerpts_for_path(path, buffer, ranges, context_line_count, cx) + } + /// Sets excerpts, returns `true` if at least one new excerpt was added. #[instrument(skip_all)] pub fn set_excerpts_for_path( @@ -172,15 +189,6 @@ impl MultiBuffer { } } - pub fn remove_excerpts_for_buffer(&mut self, buffer: BufferId, cx: &mut Context) { - self.remove_excerpts( - self.excerpts_for_buffer(buffer, cx) - .into_iter() - .map(|(excerpt, _)| excerpt), - cx, - ); - } - pub(super) fn expand_excerpts_with_paths( &mut self, ids: impl IntoIterator, @@ -382,9 +390,7 @@ impl MultiBuffer { { last.context.end = last.context.end.max(existing_range.end); to_remove.push(*existing_id); - self.snapshot - .get_mut() - .replaced_excerpts + Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts) .insert(*existing_id, *last_id); existing_iter.next(); continue; @@ -462,9 +468,7 @@ impl MultiBuffer { (Some(_), Some((_, existing_range))) => { let existing_id = existing_iter.next().unwrap(); let new_id = next_excerpt_id(); - self.snapshot - .get_mut() - .replaced_excerpts + Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts) .insert(existing_id, new_id); to_remove.push(existing_id); let mut range = new_iter.next().unwrap(); diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index 158ec689788a21216f16ffd14e34771d68f544e9..e6145e409058a3fe453c4557b2a32cccf6baf16c 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -88,6 +88,8 @@ pub enum Model { FivePointTwo, #[serde(rename = "gpt-5.2-codex")] FivePointTwoCodex, + #[serde(rename = "gpt-5.3-codex")] + FivePointThreeCodex, #[serde(rename = "custom")] Custom { name: String, @@ -128,6 +130,7 @@ impl Model { "gpt-5.1" => Ok(Self::FivePointOne), "gpt-5.2" => Ok(Self::FivePointTwo), "gpt-5.2-codex" => Ok(Self::FivePointTwoCodex), + "gpt-5.3-codex" => Ok(Self::FivePointThreeCodex), invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"), } } @@ -149,6 +152,7 @@ impl Model { Self::FivePointOne => "gpt-5.1", Self::FivePointTwo => "gpt-5.2", Self::FivePointTwoCodex => "gpt-5.2-codex", + Self::FivePointThreeCodex => "gpt-5.3-codex", Self::Custom { name, .. } => name, } } @@ -170,6 +174,7 @@ impl Model { Self::FivePointOne => "gpt-5.1", Self::FivePointTwo => "gpt-5.2", Self::FivePointTwoCodex => "gpt-5.2-codex", + Self::FivePointThreeCodex => "gpt-5.3-codex", Self::Custom { display_name, .. } => display_name.as_deref().unwrap_or(&self.id()), } } @@ -191,6 +196,7 @@ impl Model { Self::FivePointOne => 400_000, Self::FivePointTwo => 400_000, Self::FivePointTwoCodex => 400_000, + Self::FivePointThreeCodex => 400_000, Self::Custom { max_tokens, .. } => *max_tokens, } } @@ -215,6 +221,7 @@ impl Model { Self::FivePointOne => Some(128_000), Self::FivePointTwo => Some(128_000), Self::FivePointTwoCodex => Some(128_000), + Self::FivePointThreeCodex => Some(128_000), } } @@ -223,6 +230,7 @@ impl Model { Self::Custom { reasoning_effort, .. } => reasoning_effort.to_owned(), + Self::FivePointThreeCodex => Some(ReasoningEffort::Medium), _ => None, } } @@ -233,7 +241,7 @@ impl Model { supports_chat_completions, .. } => *supports_chat_completions, - Self::FiveCodex | Self::FivePointTwoCodex => false, + Self::FiveCodex | Self::FivePointTwoCodex | Self::FivePointThreeCodex => false, _ => true, } } @@ -254,6 +262,7 @@ impl Model { | Self::FivePointOne | Self::FivePointTwo | Self::FivePointTwoCodex + | Self::FivePointThreeCodex | Self::FiveNano => true, Self::O1 | Self::O3 | Self::O3Mini | Model::Custom { .. } => false, } diff --git a/crates/paths/src/paths.rs b/crates/paths/src/paths.rs index 656188e249fc864e1328c8f458bdc46aa7eaea3a..40e10fb3badaf2e00c6dbcc75af06e7b758faa81 100644 --- a/crates/paths/src/paths.rs +++ b/crates/paths/src/paths.rs @@ -419,12 +419,6 @@ pub fn copilot_dir() -> &'static PathBuf { COPILOT_DIR.get_or_init(|| data_dir().join("copilot")) } -/// Returns the path to the Supermaven directory. -pub fn supermaven_dir() -> &'static PathBuf { - static SUPERMAVEN_DIR: OnceLock = OnceLock::new(); - SUPERMAVEN_DIR.get_or_init(|| data_dir().join("supermaven")) -} - /// Returns the path to the default Prettier directory. pub fn default_prettier_dir() -> &'static PathBuf { static DEFAULT_PRETTIER_DIR: OnceLock = OnceLock::new(); diff --git a/crates/picker/Cargo.toml b/crates/picker/Cargo.toml index f85c55b9f27bcb8fd87101c341058e1a3962934e..8c76aa746453866755be322df576a519ba147b24 100644 --- a/crates/picker/Cargo.toml +++ b/crates/picker/Cargo.toml @@ -28,8 +28,6 @@ workspace.workspace = true zed_actions.workspace = true [dev-dependencies] -ctor.workspace = true editor = { workspace = true, features = ["test-support"] } -env_logger.workspace = true gpui = { workspace = true, features = ["test-support"] } -serde_json.workspace = true +settings.workspace = true diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index 716653d89642fe6d8f457f145ed15b8972432a09..e87ec3415cf6d70d840d8566accb94ac6de1547c 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -114,7 +114,7 @@ pub trait PickerDelegate: Sized + 'static { None } fn can_select( - &mut self, + &self, _ix: usize, _window: &mut Window, _cx: &mut Context>, @@ -619,6 +619,9 @@ impl Picker { ) { cx.stop_propagation(); window.prevent_default(); + if !self.delegate.can_select(ix, window, cx) { + return; + } self.set_selected_index(ix, None, false, window, cx); self.do_confirm(secondary, window, cx) } @@ -753,10 +756,11 @@ impl Picker { ix: usize, ) -> impl IntoElement + use { let item_bounds = self.item_bounds.clone(); + let selectable = self.delegate.can_select(ix, window, cx); div() .id(("item", ix)) - .cursor_pointer() + .when(selectable, |this| this.cursor_pointer()) .child( canvas( move |bounds, _window, _cx| { @@ -850,6 +854,175 @@ impl Picker { } } +#[cfg(test)] +mod tests { + use super::*; + use gpui::TestAppContext; + use std::cell::Cell; + + struct TestDelegate { + items: Vec, + selected_index: usize, + confirmed_index: Rc>>, + } + + impl TestDelegate { + fn new(items: Vec) -> Self { + Self { + items, + selected_index: 0, + confirmed_index: Rc::new(Cell::new(None)), + } + } + } + + impl PickerDelegate for TestDelegate { + type ListItem = ui::ListItem; + + fn match_count(&self) -> usize { + self.items.len() + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index( + &mut self, + ix: usize, + _window: &mut Window, + _cx: &mut Context>, + ) { + self.selected_index = ix; + } + + fn can_select( + &self, + ix: usize, + _window: &mut Window, + _cx: &mut Context>, + ) -> bool { + self.items.get(ix).copied().unwrap_or(false) + } + + fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { + "Test".into() + } + + fn update_matches( + &mut self, + _query: String, + _window: &mut Window, + _cx: &mut Context>, + ) -> Task<()> { + Task::ready(()) + } + + fn confirm( + &mut self, + _secondary: bool, + _window: &mut Window, + _cx: &mut Context>, + ) { + self.confirmed_index.set(Some(self.selected_index)); + } + + fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context>) {} + + fn render_match( + &self, + ix: usize, + selected: bool, + _window: &mut Window, + _cx: &mut Context>, + ) -> Option { + Some( + ui::ListItem::new(ix) + .inset(true) + .toggle_state(selected) + .child(ui::Label::new(format!("Item {ix}"))), + ) + } + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let store = settings::SettingsStore::test(cx); + cx.set_global(store); + theme::init(theme::LoadThemes::JustBase, cx); + editor::init(cx); + }); + } + + #[gpui::test] + async fn test_clicking_non_selectable_item_does_not_confirm(cx: &mut TestAppContext) { + init_test(cx); + + let confirmed_index = Rc::new(Cell::new(None)); + let (picker, cx) = cx.add_window_view(|window, cx| { + let mut delegate = TestDelegate::new(vec![true, false, true]); + delegate.confirmed_index = confirmed_index.clone(); + Picker::uniform_list(delegate, window, cx) + }); + + picker.update(cx, |picker, _cx| { + assert_eq!(picker.delegate.selected_index(), 0); + }); + + picker.update_in(cx, |picker, window, cx| { + picker.handle_click(1, false, window, cx); + }); + assert!( + confirmed_index.get().is_none(), + "clicking a non-selectable item should not confirm" + ); + + picker.update_in(cx, |picker, window, cx| { + picker.handle_click(0, false, window, cx); + }); + assert_eq!( + confirmed_index.get(), + Some(0), + "clicking a selectable item should confirm" + ); + } + + #[gpui::test] + async fn test_keyboard_navigation_skips_non_selectable_items(cx: &mut TestAppContext) { + init_test(cx); + + let (picker, cx) = cx.add_window_view(|window, cx| { + Picker::uniform_list(TestDelegate::new(vec![true, false, true]), window, cx) + }); + + picker.update(cx, |picker, _cx| { + assert_eq!(picker.delegate.selected_index(), 0); + }); + + picker.update_in(cx, |picker, window, cx| { + picker.select_next(&menu::SelectNext, window, cx); + }); + picker.update(cx, |picker, _cx| { + assert_eq!( + picker.delegate.selected_index(), + 2, + "select_next should skip non-selectable item at index 1" + ); + }); + + picker.update_in(cx, |picker, window, cx| { + picker.select_previous(&menu::SelectPrevious, window, cx); + }); + picker.update(cx, |picker, _cx| { + assert_eq!( + picker.delegate.selected_index(), + 0, + "select_previous should skip non-selectable item at index 1" + ); + }); + } +} + impl EventEmitter for Picker {} impl ModalView for Picker {} diff --git a/crates/platform_title_bar/Cargo.toml b/crates/platform_title_bar/Cargo.toml index 2f1f6d2cd9297136077780aafdc75d22ecf6b845..43ad6166929bc463edbea878941ba19ffe2ea3a9 100644 --- a/crates/platform_title_bar/Cargo.toml +++ b/crates/platform_title_bar/Cargo.toml @@ -15,6 +15,7 @@ doctest = false [dependencies] feature_flags.workspace = true gpui.workspace = true +project.workspace = true settings.workspace = true smallvec.workspace = true theme.workspace = true diff --git a/crates/platform_title_bar/src/platform_title_bar.rs b/crates/platform_title_bar/src/platform_title_bar.rs index 6f89a5c39137896ee4b1a6cd3b81770fc3382284..7053fe89e7fdc6ece9ad50fdd8facaf31dba3086 100644 --- a/crates/platform_title_bar/src/platform_title_bar.rs +++ b/crates/platform_title_bar/src/platform_title_bar.rs @@ -7,6 +7,8 @@ use gpui::{ MouseButton, ParentElement, StatefulInteractiveElement, Styled, Window, WindowControlArea, div, px, }; +use project::DisableAiSettings; +use settings::Settings; use smallvec::SmallVec; use std::mem; use ui::{ @@ -95,7 +97,7 @@ impl PlatformTitleBar { } pub fn is_multi_workspace_enabled(cx: &App) -> bool { - cx.has_flag::() + cx.has_flag::() && !DisableAiSettings::get_global(cx).disable_ai } } diff --git a/crates/project/src/agent_registry_store.rs b/crates/project/src/agent_registry_store.rs index 5b047a815096d778b4d120132f0e024eaf128942..155badc4ac7da22921b121428cc34a0d46f5b982 100644 --- a/crates/project/src/agent_registry_store.rs +++ b/crates/project/src/agent_registry_store.rs @@ -9,9 +9,9 @@ use futures::AsyncReadExt; use gpui::{App, AppContext as _, Context, Entity, Global, SharedString, Task}; use http_client::{AsyncBody, HttpClient}; use serde::Deserialize; -use settings::Settings; +use settings::Settings as _; -use crate::agent_server_store::AllAgentServersSettings; +use crate::DisableAiSettings; const REGISTRY_URL: &str = "https://cdn.agentclientprotocol.com/registry/v1/latest/registry.json"; const REFRESH_THROTTLE_DURATION: Duration = Duration::from_secs(60 * 60); @@ -129,13 +129,11 @@ impl AgentRegistryStore { let store = cx.new(|cx| Self::new(fs, http_client, cx)); cx.set_global(GlobalAgentRegistryStore(store.clone())); - if AllAgentServersSettings::get_global(cx).has_registry_agents() { - store.update(cx, |store, cx| { - if store.agents.is_empty() { - store.refresh(cx); - } - }); - } + store.update(cx, |store, cx| { + if store.agents.is_empty() { + store.refresh(cx); + } + }); store } @@ -173,6 +171,10 @@ impl AgentRegistryStore { return; } + if DisableAiSettings::get_global(cx).disable_ai { + return; + } + self.is_fetching = true; self.fetch_error = None; self.last_refresh = Some(Instant::now()); @@ -249,6 +251,10 @@ impl AgentRegistryStore { http_client: Arc, cx: &mut Context, ) { + if DisableAiSettings::get_global(cx).disable_ai { + return; + } + cx.spawn(async move |this, cx| -> Result<()> { let cache_path = registry_cache_path(); if !fs.is_file(&cache_path).await { diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index 3dcbab8bafb470602c99c88049eb5266886ed1cd..b1dbefa15a3dcaf64c36d027d68060d18f533def 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -1,20 +1,15 @@ use remote::Interactive; use std::{ any::Any, - borrow::Borrow, path::{Path, PathBuf}, - str::FromStr as _, sync::Arc, time::Duration, }; use anyhow::{Context as _, Result, bail}; use collections::HashMap; -use fs::{Fs, RemoveOptions, RenameOptions}; -use futures::StreamExt as _; -use gpui::{ - AppContext as _, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task, -}; +use fs::Fs; +use gpui::{AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task}; use http_client::{HttpClient, github::AssetKind}; use node_runtime::NodeRuntime; use remote::RemoteClient; @@ -23,10 +18,10 @@ use rpc::{ proto::{self, ExternalExtensionAgent}, }; use schemars::JsonSchema; -use semver::Version; use serde::{Deserialize, Serialize}; use settings::{RegisterSetting, SettingsStore}; -use task::{Shell, SpawnInTerminal}; +use sha2::{Digest, Sha256}; +use task::Shell; use util::{ResultExt as _, debug_panic}; use crate::ProjectEnvironment; @@ -66,7 +61,7 @@ impl std::fmt::Debug for AgentServerCommand { } } -#[derive(Clone, Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct ExternalAgentServerName(pub SharedString); impl std::fmt::Display for ExternalAgentServerName { @@ -87,7 +82,7 @@ impl From for SharedString { } } -impl Borrow for ExternalAgentServerName { +impl std::borrow::Borrow for ExternalAgentServerName { fn borrow(&self) -> &str { &self.0 } @@ -95,7 +90,6 @@ impl Borrow for ExternalAgentServerName { #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] pub enum ExternalAgentSource { - Builtin, #[default] Custom, Extension, @@ -105,12 +99,11 @@ pub enum ExternalAgentSource { pub trait ExternalAgentServer { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, status_tx: Option>, new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>>; + ) -> Task>; fn as_any_mut(&mut self) -> &mut dyn Any; } @@ -410,86 +403,13 @@ impl AgentServerStore { // If we don't have agents from the registry loaded yet, trigger a // refresh, which will cause this function to be called again + let registry_store = AgentRegistryStore::try_global(cx); if new_settings.has_registry_agents() - && let Some(registry) = AgentRegistryStore::try_global(cx) + && let Some(registry) = registry_store.as_ref() { registry.update(cx, |registry, cx| registry.refresh_if_stale(cx)); } - self.external_agents.clear(); - self.external_agents.insert( - GEMINI_NAME.into(), - ExternalAgentEntry::new( - Box::new(LocalGemini { - fs: fs.clone(), - node_runtime: node_runtime.clone(), - project_environment: project_environment.clone(), - custom_command: new_settings - .gemini - .clone() - .and_then(|settings| settings.custom_command()), - settings_env: new_settings - .gemini - .as_ref() - .and_then(|settings| settings.env.clone()), - ignore_system_version: new_settings - .gemini - .as_ref() - .and_then(|settings| settings.ignore_system_version) - .unwrap_or(true), - }), - ExternalAgentSource::Builtin, - None, - None, - ), - ); - self.external_agents.insert( - CODEX_NAME.into(), - ExternalAgentEntry::new( - Box::new(LocalCodex { - fs: fs.clone(), - project_environment: project_environment.clone(), - custom_command: new_settings - .codex - .clone() - .and_then(|settings| settings.custom_command()), - settings_env: new_settings - .codex - .as_ref() - .and_then(|settings| settings.env.clone()), - http_client: http_client.clone(), - no_browser: downstream_client - .as_ref() - .is_some_and(|(_, client)| !client.has_wsl_interop()), - }), - ExternalAgentSource::Builtin, - None, - None, - ), - ); - self.external_agents.insert( - CLAUDE_AGENT_NAME.into(), - ExternalAgentEntry::new( - Box::new(LocalClaudeCode { - fs: fs.clone(), - node_runtime: node_runtime.clone(), - project_environment: project_environment.clone(), - custom_command: new_settings - .claude - .clone() - .and_then(|settings| settings.custom_command()), - settings_env: new_settings - .claude - .as_ref() - .and_then(|settings| settings.env.clone()), - }), - ExternalAgentSource::Builtin, - None, - None, - ), - ); - - let registry_store = AgentRegistryStore::try_global(cx); let registry_agents_by_id = registry_store .as_ref() .map(|store| { @@ -503,13 +423,14 @@ impl AgentServerStore { }) .unwrap_or_default(); + self.external_agents.clear(); + // Insert extension agents before custom/registry so registry entries override extensions. for (agent_name, ext_id, targets, env, icon_path, display_name) in extension_agents.iter() { let name = ExternalAgentServerName(agent_name.clone().into()); let mut env = env.clone(); if let Some(settings_env) = new_settings - .custom .get(agent_name.as_ref()) .and_then(|settings| match settings { CustomAgentServerSettings::Extension { env, .. } => Some(env.clone()), @@ -542,7 +463,7 @@ impl AgentServerStore { ); } - for (name, settings) in &new_settings.custom { + for (name, settings) in new_settings.iter() { match settings { CustomAgentServerSettings::Custom { command, .. } => { let agent_name = ExternalAgentServerName(name.clone().into()); @@ -672,7 +593,7 @@ impl AgentServerStore { extension_agents: vec![], _subscriptions: subscriptions, }, - external_agents: Default::default(), + external_agents: HashMap::default(), }; if let Some(_events) = extension::ExtensionEvents::try_global(cx) {} this.agent_servers_settings_changed(cx); @@ -680,70 +601,19 @@ impl AgentServerStore { } pub(crate) fn remote(project_id: u64, upstream_client: Entity) -> Self { - // Set up the builtin agents here so they're immediately available in - // remote projects--we know that the HeadlessProject on the other end - // will have them. - let external_agents: [(ExternalAgentServerName, ExternalAgentEntry); 3] = [ - ( - CLAUDE_AGENT_NAME.into(), - ExternalAgentEntry::new( - Box::new(RemoteExternalAgentServer { - project_id, - upstream_client: upstream_client.clone(), - name: CLAUDE_AGENT_NAME.into(), - status_tx: None, - new_version_available_tx: None, - }) as Box, - ExternalAgentSource::Builtin, - None, - None, - ), - ), - ( - CODEX_NAME.into(), - ExternalAgentEntry::new( - Box::new(RemoteExternalAgentServer { - project_id, - upstream_client: upstream_client.clone(), - name: CODEX_NAME.into(), - status_tx: None, - new_version_available_tx: None, - }) as Box, - ExternalAgentSource::Builtin, - None, - None, - ), - ), - ( - GEMINI_NAME.into(), - ExternalAgentEntry::new( - Box::new(RemoteExternalAgentServer { - project_id, - upstream_client: upstream_client.clone(), - name: GEMINI_NAME.into(), - status_tx: None, - new_version_available_tx: None, - }) as Box, - ExternalAgentSource::Builtin, - None, - None, - ), - ), - ]; - Self { state: AgentServerStoreState::Remote { project_id, upstream_client, }, - external_agents: external_agents.into_iter().collect(), + external_agents: HashMap::default(), } } pub fn collab() -> Self { Self { state: AgentServerStoreState::Collab, - external_agents: Default::default(), + external_agents: HashMap::default(), } } @@ -790,6 +660,17 @@ impl AgentServerStore { .map(|entry| entry.server.as_mut()) } + pub fn no_browser(&self) -> bool { + match &self.state { + AgentServerStoreState::Local { + downstream_client, .. + } => downstream_client + .as_ref() + .is_some_and(|(_, client)| !client.has_wsl_interop()), + _ => false, + } + } + pub fn external_agents(&self) -> impl Iterator { self.external_agents.keys() } @@ -799,7 +680,7 @@ impl AgentServerStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let (command, root_dir, login_command) = this + let command = this .update(&mut cx, |this, cx| { let AgentServerStoreState::Local { downstream_client, .. @@ -808,6 +689,7 @@ impl AgentServerStore { debug_panic!("should not receive GetAgentServerCommand in a non-local project"); bail!("unexpected GetAgentServerCommand request in a non-local project"); }; + let no_browser = this.no_browser(); let agent = this .external_agents .get_mut(&*envelope.payload.name) @@ -857,9 +739,12 @@ impl AgentServerStore { (status_tx, new_version_available_tx) }) .unzip(); + let mut extra_env = HashMap::default(); + if no_browser { + extra_env.insert("NO_BROWSER".to_owned(), "1".to_owned()); + } anyhow::Ok(agent.get_command( - envelope.payload.root_dir.as_deref(), - HashMap::default(), + extra_env, status_tx, new_version_available_tx, &mut cx.to_async(), @@ -873,8 +758,9 @@ impl AgentServerStore { .env .map(|env| env.into_iter().collect()) .unwrap_or_default(), - root_dir: root_dir, - login: login_command.map(|cmd| cmd.to_proto()), + // root_dir and login are no longer used, but returned for backwards compatibility + root_dir: paths::home_dir().to_string_lossy().to_string(), + login: None, }) } @@ -915,13 +801,7 @@ impl AgentServerStore { .names .into_iter() .map(|name| { - let agent_name = ExternalAgentServerName(name.clone().into()); - let fallback_source = - if name == GEMINI_NAME || name == CLAUDE_AGENT_NAME || name == CODEX_NAME { - ExternalAgentSource::Builtin - } else { - ExternalAgentSource::Custom - }; + let agent_name = ExternalAgentServerName(name.into()); let (icon, display_name, source) = metadata .remove(&agent_name) .or_else(|| { @@ -935,12 +815,7 @@ impl AgentServerStore { ) }) }) - .unwrap_or((None, None, fallback_source)); - let source = if fallback_source == ExternalAgentSource::Builtin { - ExternalAgentSource::Builtin - } else { - source - }; + .unwrap_or((None, None, ExternalAgentSource::default())); let agent = RemoteExternalAgentServer { project_id: *project_id, upstream_client: upstream_client.clone(), @@ -1057,192 +932,6 @@ impl AgentServerStore { } } -fn get_or_npm_install_builtin_agent( - binary_name: SharedString, - package_name: SharedString, - entrypoint_path: PathBuf, - minimum_version: Option, - status_tx: Option>, - new_version_available: Option>>, - fs: Arc, - node_runtime: NodeRuntime, - cx: &mut AsyncApp, -) -> Task> { - cx.spawn(async move |cx| { - let node_path = node_runtime.binary_path().await?; - let dir = paths::external_agents_dir().join(binary_name.as_str()); - fs.create_dir(&dir).await?; - - let mut stream = fs.read_dir(&dir).await?; - let mut versions = Vec::new(); - let mut to_delete = Vec::new(); - while let Some(entry) = stream.next().await { - let Ok(entry) = entry else { continue }; - let Some(file_name) = entry.file_name() else { - continue; - }; - - if let Some(name) = file_name.to_str() - && let Some(version) = semver::Version::from_str(name).ok() - && fs - .is_file(&dir.join(file_name).join(&entrypoint_path)) - .await - { - versions.push((version, file_name.to_owned())); - } else { - to_delete.push(file_name.to_owned()) - } - } - - versions.sort(); - let newest_version = if let Some((version, _)) = versions.last().cloned() - && minimum_version.is_none_or(|minimum_version| version >= minimum_version) - { - versions.pop() - } else { - None - }; - log::debug!("existing version of {package_name}: {newest_version:?}"); - to_delete.extend(versions.into_iter().map(|(_, file_name)| file_name)); - - cx.background_spawn({ - let fs = fs.clone(); - let dir = dir.clone(); - async move { - for file_name in to_delete { - fs.remove_dir( - &dir.join(file_name), - RemoveOptions { - recursive: true, - ignore_if_not_exists: false, - }, - ) - .await - .ok(); - } - } - }) - .detach(); - - let version = if let Some((version, file_name)) = newest_version { - cx.background_spawn({ - let dir = dir.clone(); - let fs = fs.clone(); - async move { - let latest_version = node_runtime - .npm_package_latest_version(&package_name) - .await - .ok(); - if let Some(latest_version) = latest_version - && latest_version != version - { - let download_result = download_latest_version( - fs, - dir.clone(), - node_runtime, - package_name.clone(), - ) - .await - .log_err(); - if let Some(mut new_version_available) = new_version_available - && download_result.is_some() - { - new_version_available - .send(Some(latest_version.to_string())) - .ok(); - } - } - } - }) - .detach(); - file_name - } else { - if let Some(mut status_tx) = status_tx { - status_tx.send("Installing…".into()).ok(); - } - let dir = dir.clone(); - cx.background_spawn(download_latest_version( - fs.clone(), - dir.clone(), - node_runtime, - package_name.clone(), - )) - .await? - .to_string() - .into() - }; - - let agent_server_path = dir.join(version).join(entrypoint_path); - let agent_server_path_exists = fs.is_file(&agent_server_path).await; - anyhow::ensure!( - agent_server_path_exists, - "Missing entrypoint path {} after installation", - agent_server_path.to_string_lossy() - ); - - anyhow::Ok(AgentServerCommand { - path: node_path, - args: vec![agent_server_path.to_string_lossy().into_owned()], - env: None, - }) - }) -} - -fn find_bin_in_path( - bin_name: SharedString, - root_dir: PathBuf, - env: HashMap, - cx: &mut AsyncApp, -) -> Task> { - cx.background_executor().spawn(async move { - let which_result = if cfg!(windows) { - which::which(bin_name.as_str()) - } else { - let shell_path = env.get("PATH").cloned(); - which::which_in(bin_name.as_str(), shell_path.as_ref(), &root_dir) - }; - - if let Err(which::Error::CannotFindBinaryPath) = which_result { - return None; - } - - which_result.log_err() - }) -} - -async fn download_latest_version( - fs: Arc, - dir: PathBuf, - node_runtime: NodeRuntime, - package_name: SharedString, -) -> Result { - log::debug!("downloading latest version of {package_name}"); - - let tmp_dir = tempfile::tempdir_in(&dir)?; - - node_runtime - .npm_install_packages(tmp_dir.path(), &[(&package_name, "latest")]) - .await?; - - let version = node_runtime - .npm_package_installed_version(tmp_dir.path(), &package_name) - .await? - .context("expected package to be installed")?; - - fs.rename( - &tmp_dir.keep(), - &dir.join(version.to_string()), - RenameOptions { - ignore_if_exists: true, - overwrite: true, - create_parents: false, - }, - ) - .await?; - - anyhow::Ok(version) -} - struct RemoteExternalAgentServer { project_id: u64, upstream_client: Entity, @@ -1254,16 +943,14 @@ struct RemoteExternalAgentServer { impl ExternalAgentServer for RemoteExternalAgentServer { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, status_tx: Option>, new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task> { let project_id = self.project_id; let name = self.name.to_string(); let upstream_client = self.upstream_client.downgrade(); - let root_dir = root_dir.map(|root_dir| root_dir.to_owned()); self.status_tx = status_tx; self.new_version_available_tx = new_version_available_tx; cx.spawn(async move |cx| { @@ -1274,7 +961,7 @@ impl ExternalAgentServer for RemoteExternalAgentServer { .request(proto::GetAgentServerCommand { project_id, name, - root_dir: root_dir.clone(), + root_dir: None, }) })? .await?; @@ -1290,381 +977,11 @@ impl ExternalAgentServer for RemoteExternalAgentServer { Interactive::No, ) })??; - Ok(( - AgentServerCommand { - path: command.program.into(), - args: command.args, - env: Some(command.env), - }, - root_dir, - response.login.map(SpawnInTerminal::from_proto), - )) - }) - } - - fn as_any_mut(&mut self) -> &mut dyn Any { - self - } -} - -struct LocalGemini { - fs: Arc, - node_runtime: NodeRuntime, - project_environment: Entity, - custom_command: Option, - settings_env: Option>, - ignore_system_version: bool, -} - -impl ExternalAgentServer for LocalGemini { - fn get_command( - &mut self, - root_dir: Option<&str>, - extra_env: HashMap, - status_tx: Option>, - new_version_available_tx: Option>>, - cx: &mut AsyncApp, - ) -> Task)>> { - let fs = self.fs.clone(); - let node_runtime = self.node_runtime.clone(); - let project_environment = self.project_environment.downgrade(); - let custom_command = self.custom_command.clone(); - let settings_env = self.settings_env.clone(); - let ignore_system_version = self.ignore_system_version; - let root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); - - cx.spawn(async move |cx| { - let mut env = project_environment - .update(cx, |project_environment, cx| { - project_environment.local_directory_environment( - &Shell::System, - root_dir.clone(), - cx, - ) - })? - .await - .unwrap_or_default(); - - env.extend(settings_env.unwrap_or_default()); - - let mut command = if let Some(mut custom_command) = custom_command { - custom_command.env = Some(env); - custom_command - } else if !ignore_system_version - && let Some(bin) = - find_bin_in_path("gemini".into(), root_dir.to_path_buf(), env.clone(), cx).await - { - AgentServerCommand { - path: bin, - args: Vec::new(), - env: Some(env), - } - } else { - let mut command = get_or_npm_install_builtin_agent( - GEMINI_NAME.into(), - "@google/gemini-cli".into(), - "node_modules/@google/gemini-cli/dist/index.js".into(), - if cfg!(windows) { - // v0.8.x on Windows has a bug that causes the initialize request to hang forever - Some("0.9.0".parse().unwrap()) - } else { - Some("0.2.1".parse().unwrap()) - }, - status_tx, - new_version_available_tx, - fs, - node_runtime, - cx, - ) - .await?; - command.env = Some(env); - command - }; - - // Gemini CLI doesn't seem to have a dedicated invocation for logging in--we just run it normally without any arguments. - let login = task::SpawnInTerminal { - command: Some(command.path.to_string_lossy().into_owned()), - args: command.args.clone(), - env: command.env.clone().unwrap_or_default(), - label: "gemini /auth".into(), - ..Default::default() - }; - - command.env.get_or_insert_default().extend(extra_env); - command.args.push("--experimental-acp".into()); - Ok(( - command, - root_dir.to_string_lossy().into_owned(), - Some(login), - )) - }) - } - - fn as_any_mut(&mut self) -> &mut dyn Any { - self - } -} - -struct LocalClaudeCode { - fs: Arc, - node_runtime: NodeRuntime, - project_environment: Entity, - custom_command: Option, - settings_env: Option>, -} - -impl ExternalAgentServer for LocalClaudeCode { - fn get_command( - &mut self, - root_dir: Option<&str>, - extra_env: HashMap, - status_tx: Option>, - new_version_available_tx: Option>>, - cx: &mut AsyncApp, - ) -> Task)>> { - let fs = self.fs.clone(); - let node_runtime = self.node_runtime.clone(); - let project_environment = self.project_environment.downgrade(); - let custom_command = self.custom_command.clone(); - let settings_env = self.settings_env.clone(); - let root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); - - cx.spawn(async move |cx| { - let mut env = project_environment - .update(cx, |project_environment, cx| { - project_environment.local_directory_environment( - &Shell::System, - root_dir.clone(), - cx, - ) - })? - .await - .unwrap_or_default(); - env.insert("ANTHROPIC_API_KEY".into(), "".into()); - - env.extend(settings_env.unwrap_or_default()); - - let (mut command, login_command) = if let Some(mut custom_command) = custom_command { - custom_command.env = Some(env); - (custom_command, None) - } else { - let mut command = get_or_npm_install_builtin_agent( - "claude-agent-acp".into(), - "@zed-industries/claude-agent-acp".into(), - "node_modules/@zed-industries/claude-agent-acp/dist/index.js".into(), - Some("0.17.0".parse().unwrap()), - status_tx, - new_version_available_tx, - fs, - node_runtime, - cx, - ) - .await?; - command.env = Some(env); - - (command, None) - }; - - command.env.get_or_insert_default().extend(extra_env); - Ok(( - command, - root_dir.to_string_lossy().into_owned(), - login_command, - )) - }) - } - - fn as_any_mut(&mut self) -> &mut dyn Any { - self - } -} - -struct LocalCodex { - fs: Arc, - project_environment: Entity, - http_client: Arc, - custom_command: Option, - settings_env: Option>, - no_browser: bool, -} - -impl ExternalAgentServer for LocalCodex { - fn get_command( - &mut self, - root_dir: Option<&str>, - extra_env: HashMap, - mut status_tx: Option>, - _new_version_available_tx: Option>>, - cx: &mut AsyncApp, - ) -> Task)>> { - let fs = self.fs.clone(); - let project_environment = self.project_environment.downgrade(); - let http = self.http_client.clone(); - let custom_command = self.custom_command.clone(); - let settings_env = self.settings_env.clone(); - let root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); - let no_browser = self.no_browser; - - cx.spawn(async move |cx| { - let mut env = project_environment - .update(cx, |project_environment, cx| { - project_environment.local_directory_environment( - &Shell::System, - root_dir.clone(), - cx, - ) - })? - .await - .unwrap_or_default(); - if no_browser { - env.insert("NO_BROWSER".to_owned(), "1".to_owned()); - } - - env.extend(settings_env.unwrap_or_default()); - - let mut command = if let Some(mut custom_command) = custom_command { - custom_command.env = Some(env); - custom_command - } else { - let dir = paths::external_agents_dir().join(CODEX_NAME); - fs.create_dir(&dir).await?; - - let bin_name = if cfg!(windows) { - "codex-acp.exe" - } else { - "codex-acp" - }; - - let find_latest_local_version = async || -> Option { - let mut local_versions: Vec<(semver::Version, String)> = Vec::new(); - let mut stream = fs.read_dir(&dir).await.ok()?; - while let Some(entry) = stream.next().await { - let Ok(entry) = entry else { continue }; - let Some(file_name) = entry.file_name() else { - continue; - }; - let version_path = dir.join(&file_name); - if fs.is_file(&version_path.join(bin_name)).await { - let version_str = file_name.to_string_lossy(); - if let Ok(version) = - semver::Version::from_str(version_str.trim_start_matches('v')) - { - local_versions.push((version, version_str.into_owned())); - } - } - } - local_versions.sort_by(|(a, _), (b, _)| a.cmp(b)); - local_versions.last().map(|(_, v)| dir.join(v)) - }; - - let fallback_to_latest_local_version = - async |err: anyhow::Error| -> Result { - if let Some(local) = find_latest_local_version().await { - log::info!( - "Falling back to locally installed Codex version: {}", - local.display() - ); - Ok(local) - } else { - Err(err) - } - }; - - let version_dir = match ::http_client::github::latest_github_release( - CODEX_ACP_REPO, - true, - false, - http.clone(), - ) - .await - { - Ok(release) => { - let version_dir = dir.join(&release.tag_name); - if !fs.is_dir(&version_dir).await { - if let Some(ref mut status_tx) = status_tx { - status_tx.send("Installing…".into()).ok(); - } - - let tag = release.tag_name.clone(); - let version_number = tag.trim_start_matches('v'); - let asset_name = asset_name(version_number) - .context("codex acp is not supported for this architecture")?; - let asset = release - .assets - .into_iter() - .find(|asset| asset.name == asset_name) - .with_context(|| { - format!("no asset found matching `{asset_name:?}`") - })?; - // Strip "sha256:" prefix from digest if present (GitHub API format) - let digest = asset - .digest - .as_deref() - .and_then(|d| d.strip_prefix("sha256:").or(Some(d))); - match ::http_client::github_download::download_server_binary( - &*http, - &asset.browser_download_url, - digest, - &version_dir, - if cfg!(target_os = "windows") && cfg!(target_arch = "x86_64") { - AssetKind::Zip - } else { - AssetKind::TarGz - }, - ) - .await - { - Ok(()) => { - // remove older versions - util::fs::remove_matching(&dir, |entry| entry != version_dir) - .await; - version_dir - } - Err(err) => { - log::error!( - "Failed to download Codex release {}: {err:#}", - release.tag_name - ); - fallback_to_latest_local_version(err).await? - } - } - } else { - version_dir - } - } - Err(err) => { - log::error!("Failed to fetch Codex latest release: {err:#}"); - fallback_to_latest_local_version(err).await? - } - }; - - let bin_path = version_dir.join(bin_name); - anyhow::ensure!( - fs.is_file(&bin_path).await, - "Missing Codex binary at {} after installation", - bin_path.to_string_lossy() - ); - - let mut cmd = AgentServerCommand { - path: bin_path, - args: Vec::new(), - env: None, - }; - cmd.env = Some(env); - cmd - }; - - command.env.get_or_insert_default().extend(extra_env); - Ok((command, root_dir.to_string_lossy().into_owned(), None)) + Ok(AgentServerCommand { + path: command.program.into(), + args: command.args, + env: Some(command.env), + }) }) } @@ -1673,42 +990,6 @@ impl ExternalAgentServer for LocalCodex { } } -pub const CODEX_ACP_REPO: &str = "zed-industries/codex-acp"; - -fn get_platform_info() -> Option<(&'static str, &'static str, &'static str)> { - let arch = if cfg!(target_arch = "x86_64") { - "x86_64" - } else if cfg!(target_arch = "aarch64") { - "aarch64" - } else { - return None; - }; - - let platform = if cfg!(target_os = "macos") { - "apple-darwin" - } else if cfg!(target_os = "windows") { - "pc-windows-msvc" - } else if cfg!(target_os = "linux") { - "unknown-linux-gnu" - } else { - return None; - }; - - // Windows uses .zip in release assets - let ext = if cfg!(target_os = "windows") { - "zip" - } else { - "tar.gz" - }; - - Some((arch, platform, ext)) -} - -fn asset_name(version: &str) -> Option { - let (arch, platform, ext) = get_platform_info()?; - Some(format!("codex-acp-{version}-{arch}-{platform}.{ext}")) -} - pub struct LocalExtensionArchiveAgent { pub fs: Arc, pub http_client: Arc, @@ -1723,12 +1004,11 @@ pub struct LocalExtensionArchiveAgent { impl ExternalAgentServer for LocalExtensionArchiveAgent { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, _status_tx: Option>, _new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task> { let fs = self.fs.clone(); let http_client = self.http_client.clone(); let node_runtime = self.node_runtime.clone(); @@ -1738,18 +1018,13 @@ impl ExternalAgentServer for LocalExtensionArchiveAgent { let targets = self.targets.clone(); let base_env = self.env.clone(); - let root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); - cx.spawn(async move |cx| { // Get project environment let mut env = project_environment .update(cx, |project_environment, cx| { project_environment.local_directory_environment( &Shell::System, - root_dir.clone(), + paths::home_dir().as_path().into(), cx, ) })? @@ -1800,12 +1075,10 @@ impl ExternalAgentServer for LocalExtensionArchiveAgent { // Use URL as version identifier for caching // Hash the URL to get a stable directory name - use std::collections::hash_map::DefaultHasher; - use std::hash::{Hash, Hasher}; - let mut hasher = DefaultHasher::new(); - archive_url.hash(&mut hasher); - let url_hash = hasher.finish(); - let version_dir = dir.join(format!("v_{:x}", url_hash)); + let mut hasher = Sha256::new(); + hasher.update(archive_url.as_bytes()); + let url_hash = format!("{:x}", hasher.finalize()); + let version_dir = dir.join(format!("v_{}", url_hash)); if !fs.is_dir(&version_dir).await { // Determine SHA256 for verification @@ -1909,7 +1182,7 @@ impl ExternalAgentServer for LocalExtensionArchiveAgent { env: Some(env), }; - Ok((command, version_dir.to_string_lossy().into_owned(), None)) + Ok(command) }) } @@ -1931,12 +1204,11 @@ struct LocalRegistryArchiveAgent { impl ExternalAgentServer for LocalRegistryArchiveAgent { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, _status_tx: Option>, _new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task> { let fs = self.fs.clone(); let http_client = self.http_client.clone(); let node_runtime = self.node_runtime.clone(); @@ -1945,17 +1217,12 @@ impl ExternalAgentServer for LocalRegistryArchiveAgent { let targets = self.targets.clone(); let settings_env = self.env.clone(); - let root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); - cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { project_environment.local_directory_environment( &Shell::System, - root_dir.clone(), + paths::home_dir().as_path().into(), cx, ) })? @@ -2004,12 +1271,10 @@ impl ExternalAgentServer for LocalRegistryArchiveAgent { let archive_url = &target_config.archive; - use std::collections::hash_map::DefaultHasher; - use std::hash::{Hash, Hasher}; - let mut hasher = DefaultHasher::new(); - archive_url.hash(&mut hasher); - let url_hash = hasher.finish(); - let version_dir = dir.join(format!("v_{:x}", url_hash)); + let mut hasher = Sha256::new(); + hasher.update(archive_url.as_bytes()); + let url_hash = format!("{:x}", hasher.finalize()); + let version_dir = dir.join(format!("v_{}", url_hash)); if !fs.is_dir(&version_dir).await { let sha256 = if let Some(provided_sha) = &target_config.sha256 { @@ -2099,7 +1364,7 @@ impl ExternalAgentServer for LocalRegistryArchiveAgent { env: Some(env), }; - Ok((command, version_dir.to_string_lossy().into_owned(), None)) + Ok(command) }) } @@ -2120,12 +1385,11 @@ struct LocalRegistryNpxAgent { impl ExternalAgentServer for LocalRegistryNpxAgent { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, _status_tx: Option>, _new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task> { let node_runtime = self.node_runtime.clone(); let project_environment = self.project_environment.downgrade(); let package = self.package.clone(); @@ -2133,17 +1397,12 @@ impl ExternalAgentServer for LocalRegistryNpxAgent { let distribution_env = self.distribution_env.clone(); let settings_env = self.settings_env.clone(); - let env_root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); - cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { project_environment.local_directory_environment( &Shell::System, - env_root_dir.clone(), + paths::home_dir().as_path().into(), cx, ) })? @@ -2176,7 +1435,7 @@ impl ExternalAgentServer for LocalRegistryNpxAgent { env: Some(env), }; - Ok((command, env_root_dir.to_string_lossy().into_owned(), None)) + Ok(command) }) } @@ -2193,24 +1452,19 @@ struct LocalCustomAgent { impl ExternalAgentServer for LocalCustomAgent { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, _status_tx: Option>, _new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task> { let mut command = self.command.clone(); - let root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); let project_environment = self.project_environment.downgrade(); cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { project_environment.local_directory_environment( &Shell::System, - root_dir.clone(), + paths::home_dir().as_path().into(), cx, ) })? @@ -2219,7 +1473,7 @@ impl ExternalAgentServer for LocalCustomAgent { env.extend(command.env.unwrap_or_default()); env.extend(extra_env); command.env = Some(env); - Ok((command, root_dir.to_string_lossy().into_owned(), None)) + Ok(command) }) } @@ -2228,76 +1482,31 @@ impl ExternalAgentServer for LocalCustomAgent { } } -pub const GEMINI_NAME: &'static str = "gemini"; -pub const CLAUDE_AGENT_NAME: &'static str = "claude"; -pub const CODEX_NAME: &'static str = "codex"; +pub const GEMINI_NAME: &str = "gemini"; +pub const CLAUDE_AGENT_NAME: &str = "claude-acp"; +pub const CODEX_NAME: &str = "codex-acp"; #[derive(Default, Clone, JsonSchema, Debug, PartialEq, RegisterSetting)] -pub struct AllAgentServersSettings { - pub gemini: Option, - pub claude: Option, - pub codex: Option, - pub custom: HashMap, -} +pub struct AllAgentServersSettings(pub HashMap); -impl AllAgentServersSettings { - pub fn has_registry_agents(&self) -> bool { - self.custom - .values() - .any(|s| matches!(s, CustomAgentServerSettings::Registry { .. })) - } -} - -#[derive(Default, Clone, JsonSchema, Debug, PartialEq)] -pub struct BuiltinAgentServerSettings { - pub path: Option, - pub args: Option>, - pub env: Option>, - pub ignore_system_version: Option, - pub default_mode: Option, - pub default_model: Option, - pub favorite_models: Vec, - pub default_config_options: HashMap, - pub favorite_config_option_values: HashMap>, -} +impl std::ops::Deref for AllAgentServersSettings { + type Target = HashMap; -impl BuiltinAgentServerSettings { - fn custom_command(self) -> Option { - self.path.map(|path| AgentServerCommand { - path, - args: self.args.unwrap_or_default(), - // Settings env are always applied, so we don't need to supply them here as well - env: None, - }) + fn deref(&self) -> &Self::Target { + &self.0 } } -impl From for BuiltinAgentServerSettings { - fn from(value: settings::BuiltinAgentServerSettings) -> Self { - BuiltinAgentServerSettings { - path: value - .path - .map(|p| PathBuf::from(shellexpand::tilde(&p.to_string_lossy()).as_ref())), - args: value.args, - env: value.env, - ignore_system_version: value.ignore_system_version, - default_mode: value.default_mode, - default_model: value.default_model, - favorite_models: value.favorite_models, - default_config_options: value.default_config_options, - favorite_config_option_values: value.favorite_config_option_values, - } +impl std::ops::DerefMut for AllAgentServersSettings { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 } } -impl From for BuiltinAgentServerSettings { - fn from(value: AgentServerCommand) -> Self { - BuiltinAgentServerSettings { - path: Some(value.path), - args: Some(value.args), - env: value.env, - ..Default::default() - } +impl AllAgentServersSettings { + pub fn has_registry_agents(&self) -> bool { + self.values() + .any(|s| matches!(s, CustomAgentServerSettings::Registry { .. })) } } @@ -2541,15 +1750,12 @@ impl From for CustomAgentServerSettings { impl settings::Settings for AllAgentServersSettings { fn from_settings(content: &settings::SettingsContent) -> Self { let agent_settings = content.agent_servers.clone().unwrap(); - Self { - gemini: agent_settings.gemini.map(Into::into), - claude: agent_settings.claude.map(Into::into), - codex: agent_settings.codex.map(Into::into), - custom: agent_settings - .custom + Self( + agent_settings + .0 .into_iter() .map(|(k, v)| (k, v.into())) .collect(), - } + ) } } diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 9faf80b7ac00002c005df3a3b1e0674dcdd4cc81..b9d1105ad02415699fa6a9bd1be8ec1f9c71271a 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -869,7 +869,6 @@ impl BufferStore { entry .insert( - // todo(lw): hot foreground spawn cx.spawn(async move |this, cx| { let load_result = load_buffer.await; this.update(cx, |this, _cx| { diff --git a/crates/project/src/context_server_store.rs b/crates/project/src/context_server_store.rs index e4cac4768d48db8aecf0b4499cce070c2c2c914c..88dc64fcbe8795ae4826dcaa2813744f525b9258 100644 --- a/crates/project/src/context_server_store.rs +++ b/crates/project/src/context_server_store.rs @@ -8,7 +8,7 @@ use std::time::Duration; use anyhow::{Context as _, Result}; use collections::{HashMap, HashSet}; use context_server::{ContextServer, ContextServerCommand, ContextServerId}; -use futures::{FutureExt as _, future::join_all}; +use futures::{FutureExt as _, future::Either, future::join_all}; use gpui::{App, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, actions}; use itertools::Itertools; use registry::ContextServerDescriptorRegistry; @@ -141,6 +141,8 @@ impl ContextServerConfiguration { worktree_store: Entity, cx: &AsyncApp, ) -> Option { + const EXTENSION_COMMAND_TIMEOUT: Duration = Duration::from_secs(30); + match settings { ContextServerSettings::Stdio { enabled: _, @@ -155,18 +157,27 @@ impl ContextServerConfiguration { let descriptor = cx.update(|cx| registry.read(cx).context_server_descriptor(&id.0))?; - match descriptor.command(worktree_store, cx).await { - Ok(command) => Some(ContextServerConfiguration::Extension { + let command_future = descriptor.command(worktree_store, cx); + let timeout_future = cx.background_executor().timer(EXTENSION_COMMAND_TIMEOUT); + + match futures::future::select(command_future, timeout_future).await { + Either::Left((Ok(command), _)) => Some(ContextServerConfiguration::Extension { command, settings, remote, }), - Err(e) => { + Either::Left((Err(e), _)) => { log::error!( "Failed to create context server configuration from settings: {e:#}" ); None } + Either::Right(_) => { + log::error!( + "Timed out resolving command for extension context server {id}" + ); + None + } } } ContextServerSettings::Http { @@ -855,6 +866,7 @@ impl ContextServerStore { this.update(cx, |this, cx| { this.populate_server_ids(cx); + cx.notify(); this.update_servers_task.take(); if this.needs_server_update { this.available_context_servers_changed(cx); @@ -959,11 +971,23 @@ impl ContextServerStore { })??; for (id, config) in servers_to_start { - let (server, config) = - Self::create_context_server(this.clone(), id, config, cx).await?; - this.update(cx, |this, cx| { - this.run_server(server, config, cx); - })?; + match Self::create_context_server(this.clone(), id.clone(), config, cx).await { + Ok((server, config)) => { + this.update(cx, |this, cx| { + this.run_server(server, config, cx); + })?; + } + Err(err) => { + log::error!("{id} context server failed to create: {err:#}"); + this.update(cx, |_this, cx| { + cx.emit(ServerStatusChangedEvent { + server_id: id, + status: ContextServerStatus::Error(err.to_string().into()), + }); + cx.notify(); + })?; + } + } } Ok(()) diff --git a/crates/project/src/debugger/session.rs b/crates/project/src/debugger/session.rs index 2430d6c1024c61bb9af984c914df9c308c4cb64f..a6c3f52b17a4a6cf241aa49329f3f14f0b5cefbc 100644 --- a/crates/project/src/debugger/session.rs +++ b/crates/project/src/debugger/session.rs @@ -2645,10 +2645,40 @@ impl Session { self.fetch( command, move |this, variables, cx| { - let Some(variables) = variables.log_err() else { + let Some(mut variables) = variables.log_err() else { return; }; + if this.adapter.0.as_ref() == "Debugpy" { + for variable in variables.iter_mut() { + if variable.type_ == Some("str".into()) { + // reverse Python repr() escaping + let mut unescaped = String::with_capacity(variable.value.len()); + let mut chars = variable.value.chars(); + while let Some(c) = chars.next() { + if c != '\\' { + unescaped.push(c); + } else { + match chars.next() { + Some('\\') => unescaped.push('\\'), + Some('n') => unescaped.push('\n'), + Some('t') => unescaped.push('\t'), + Some('r') => unescaped.push('\r'), + Some('\'') => unescaped.push('\''), + Some('"') => unescaped.push('"'), + Some(c) => { + unescaped.push('\\'); + unescaped.push(c); + } + None => {} + } + } + } + variable.value = unescaped; + } + } + } + this.active_snapshot .variables .insert(variables_reference, variables); diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 1272a689b908413fff5eef71cf5e0e98fd72429b..eed16761974876247df2e5936f9db9fbdd8fafcc 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -6,6 +6,9 @@ pub mod pending_op; use crate::{ ProjectEnvironment, ProjectItem, ProjectPath, buffer_store::{BufferStore, BufferStoreEvent}, + trusted_worktrees::{ + PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore, + }, worktree_store::{WorktreeStore, WorktreeStoreEvent}, }; use anyhow::{Context as _, Result, anyhow, bail}; @@ -21,7 +24,7 @@ use futures::{ mpsc, oneshot::{self, Canceled}, }, - future::{self, Shared}, + future::{self, BoxFuture, Shared}, stream::FuturesOrdered, }; use git::{ @@ -36,8 +39,8 @@ use git::{ }, stash::{GitStash, StashEntry}, status::{ - DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus, - UnmergedStatus, UnmergedStatusCode, + self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, + TreeDiffStatus, UnmergedStatus, UnmergedStatusCode, }, }; use gpui::{ @@ -60,7 +63,7 @@ use settings::WorktreeId; use smol::future::yield_now; use std::{ cmp::Ordering, - collections::{BTreeSet, HashSet, VecDeque}, + collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry}, future::Future, mem, ops::Range, @@ -72,7 +75,7 @@ use std::{ }, time::Instant, }; -use sum_tree::{Edit, SumTree, TreeSet}; +use sum_tree::{Edit, SumTree, TreeMap}; use task::Shell; use text::{Bias, BufferId}; use util::{ @@ -192,6 +195,7 @@ pub struct GitStoreCheckpoint { pub struct StatusEntry { pub repo_path: RepoPath, pub status: FileStatus, + pub diff_stat: Option, } impl StatusEntry { @@ -213,6 +217,8 @@ impl StatusEntry { repo_path: self.repo_path.to_proto(), simple_status, status: Some(status_to_proto(self.status)), + diff_stat_added: self.diff_stat.map(|ds| ds.added), + diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted), } } } @@ -223,7 +229,15 @@ impl TryFrom for StatusEntry { fn try_from(value: proto::StatusEntry) -> Result { let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?; let status = status_from_proto(value.simple_status, value.status)?; - Ok(Self { repo_path, status }) + let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) { + (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }), + _ => None, + }; + Ok(Self { + repo_path, + status, + diff_stat, + }) } } @@ -251,9 +265,8 @@ pub struct RepositoryId(pub u64); #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct MergeDetails { - pub conflicted_paths: TreeSet, + pub merge_heads_by_conflicted_path: TreeMap>>, pub message: Option, - pub heads: Vec>, } #[derive(Clone)] @@ -267,6 +280,11 @@ pub struct RepositorySnapshot { pub id: RepositoryId, pub statuses_by_path: SumTree, pub work_directory_abs_path: Arc, + /// The working directory of the original repository. For a normal + /// checkout this equals `work_directory_abs_path`. For a git worktree + /// checkout, this is the original repo's working directory — used to + /// anchor new worktree creation so they don't nest. + pub original_repo_abs_path: Arc, pub path_style: PathStyle, pub branch: Option, pub head_commit: Option, @@ -296,6 +314,19 @@ enum GraphCommitHandlerState { Closed, } +pub struct InitialGitGraphData { + fetch_task: Task<()>, + pub error: Option, + pub commit_data: Vec>, + pub commit_oid_to_index: HashMap, +} + +pub struct GraphDataResponse<'a> { + pub commits: &'a [Arc], + pub is_loading: bool, + pub error: Option, +} + pub struct Repository { this: WeakEntity, snapshot: RepositorySnapshot, @@ -311,13 +342,7 @@ pub struct Repository { askpass_delegates: Arc>>, latest_askpass_id: u64, repository_state: Shared>>, - pub initial_graph_data: HashMap< - (LogOrder, LogSource), - ( - Task>, - Vec>, - ), - >, + initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>, graph_commit_data_handler: GraphCommitHandlerState, commit_data: HashMap, } @@ -343,6 +368,7 @@ impl LocalRepositoryState { dot_git_abs_path: Arc, project_environment: WeakEntity, fs: Arc, + is_trusted: bool, cx: &mut AsyncApp, ) -> anyhow::Result { let environment = project_environment @@ -370,6 +396,7 @@ impl LocalRepositoryState { } }) .await?; + backend.set_trusted(is_trusted); Ok(LocalRepositoryState { backend, environment: Arc::new(environment), @@ -390,14 +417,20 @@ pub enum RepositoryState { Remote(RemoteRepositoryState), } +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum GitGraphEvent { + CountUpdated(usize), + FullyLoaded, + LoadingError, +} + #[derive(Clone, Debug, PartialEq, Eq)] pub enum RepositoryEvent { StatusesChanged, - MergeHeadsChanged, BranchChanged, StashEntriesChanged, PendingOpsChanged { pending_ops: SumTree }, - GitGraphCountUpdated((LogOrder, LogSource), usize), + GraphEvent((LogSource, LogOrder), GitGraphEvent), } #[derive(Clone, Debug)] @@ -478,11 +511,15 @@ impl GitStore { state: GitStoreState, cx: &mut Context, ) -> Self { - let _subscriptions = vec![ + let mut _subscriptions = vec![ cx.subscribe(&worktree_store, Self::on_worktree_store_event), cx.subscribe(&buffer_store, Self::on_buffer_store_event), ]; + if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) { + _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event)); + } + GitStore { state, buffer_store, @@ -1492,19 +1529,30 @@ impl GitStore { new_work_directory_abs_path: Some(work_directory_abs_path), dot_git_abs_path: Some(dot_git_abs_path), repository_dir_abs_path: Some(_repository_dir_abs_path), - common_dir_abs_path: Some(_common_dir_abs_path), + common_dir_abs_path: Some(common_dir_abs_path), .. } = update { + let original_repo_abs_path: Arc = + git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into(); let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release)); + let is_trusted = TrustedWorktrees::try_get_global(cx) + .map(|trusted_worktrees| { + trusted_worktrees.update(cx, |trusted_worktrees, cx| { + trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx) + }) + }) + .unwrap_or(false); let git_store = cx.weak_entity(); let repo = cx.new(|cx| { let mut repo = Repository::local( id, work_directory_abs_path.clone(), + original_repo_abs_path.clone(), dot_git_abs_path.clone(), project_environment.downgrade(), fs.clone(), + is_trusted, git_store, cx, ); @@ -1545,6 +1593,39 @@ impl GitStore { } } + fn on_trusted_worktrees_event( + &mut self, + _: Entity, + event: &TrustedWorktreesEvent, + cx: &mut Context, + ) { + if !matches!(self.state, GitStoreState::Local { .. }) { + return; + } + + let (is_trusted, event_paths) = match event { + TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths), + TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths), + }; + + for (repo_id, worktree_ids) in &self.worktree_ids { + if worktree_ids + .iter() + .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id))) + { + if let Some(repo) = self.repositories.get(repo_id) { + let repository_state = repo.read(cx).repository_state.clone(); + cx.background_spawn(async move { + if let Ok(RepositoryState::Local(state)) = repository_state.await { + state.backend.set_trusted(is_trusted); + } + }) + .detach(); + } + } + } + } + fn on_buffer_store_event( &mut self, _: Entity, @@ -1827,6 +1908,11 @@ impl GitStore { let id = RepositoryId::from_proto(update.id); let client = this.upstream_client().context("no upstream client")?; + let original_repo_abs_path: Option> = update + .original_repo_abs_path + .as_deref() + .map(|p| Path::new(p).into()); + let mut repo_subscription = None; let repo = this.repositories.entry(id).or_insert_with(|| { let git_store = cx.weak_entity(); @@ -1834,6 +1920,7 @@ impl GitStore { Repository::remote( id, Path::new(&update.abs_path).into(), + original_repo_abs_path.clone(), path_style, ProjectId(update.project_id), client, @@ -3429,10 +3516,17 @@ impl RepositoryId { } impl RepositorySnapshot { - fn empty(id: RepositoryId, work_directory_abs_path: Arc, path_style: PathStyle) -> Self { + fn empty( + id: RepositoryId, + work_directory_abs_path: Arc, + original_repo_abs_path: Option>, + path_style: PathStyle, + ) -> Self { Self { id, statuses_by_path: Default::default(), + original_repo_abs_path: original_repo_abs_path + .unwrap_or_else(|| work_directory_abs_path.clone()), work_directory_abs_path, branch: None, head_commit: None, @@ -3457,9 +3551,9 @@ impl RepositorySnapshot { removed_statuses: Default::default(), current_merge_conflicts: self .merge - .conflicted_paths + .merge_heads_by_conflicted_path .iter() - .map(|repo_path| repo_path.to_proto()) + .map(|(repo_path, _)| repo_path.to_proto()) .collect(), merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()), project_id, @@ -3476,6 +3570,9 @@ impl RepositorySnapshot { .collect(), remote_upstream_url: self.remote_upstream_url.clone(), remote_origin_url: self.remote_origin_url.clone(), + original_repo_abs_path: Some( + self.original_repo_abs_path.to_string_lossy().into_owned(), + ), } } @@ -3497,7 +3594,9 @@ impl RepositorySnapshot { current_new_entry = new_statuses.next(); } Ordering::Equal => { - if new_entry.status != old_entry.status { + if new_entry.status != old_entry.status + || new_entry.diff_stat != old_entry.diff_stat + { updated_statuses.push(new_entry.to_proto()); } current_old_entry = old_statuses.next(); @@ -3528,9 +3627,9 @@ impl RepositorySnapshot { removed_statuses, current_merge_conflicts: self .merge - .conflicted_paths + .merge_heads_by_conflicted_path .iter() - .map(|path| path.to_proto()) + .map(|(path, _)| path.to_proto()) .collect(), merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()), project_id, @@ -3547,6 +3646,9 @@ impl RepositorySnapshot { .collect(), remote_upstream_url: self.remote_upstream_url.clone(), remote_origin_url: self.remote_origin_url.clone(), + original_repo_abs_path: Some( + self.original_repo_abs_path.to_string_lossy().into_owned(), + ), } } @@ -3564,6 +3666,12 @@ impl RepositorySnapshot { .cloned() } + pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option { + self.statuses_by_path + .get(&PathKey(path.as_ref().clone()), ()) + .and_then(|entry| entry.diff_stat) + } + pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option { Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style) } @@ -3586,12 +3694,16 @@ impl RepositorySnapshot { } pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool { - self.merge.conflicted_paths.contains(repo_path) + self.merge + .merge_heads_by_conflicted_path + .contains_key(repo_path) } pub fn has_conflict(&self, repo_path: &RepoPath) -> bool { - let had_conflict_on_last_merge_head_change = - self.merge.conflicted_paths.contains(repo_path); + let had_conflict_on_last_merge_head_change = self + .merge + .merge_heads_by_conflicted_path + .contains_key(repo_path); let has_conflict_currently = self .status_for_path(repo_path) .is_some_and(|entry| entry.status.is_conflicted()); @@ -3630,13 +3742,13 @@ pub fn proto_to_stash(entry: &proto::StashEntry) -> Result { } impl MergeDetails { - async fn load( + async fn update( + &mut self, backend: &Arc, - status: &SumTree, - prev_snapshot: &RepositorySnapshot, - ) -> Result<(MergeDetails, bool)> { + current_conflicted_paths: Vec, + ) -> Result { log::debug!("load merge details"); - let message = backend.merge_message().await; + self.message = backend.merge_message().await.map(SharedString::from); let heads = backend .revparse_batch(vec![ "MERGE_HEAD".into(), @@ -3651,48 +3763,42 @@ impl MergeDetails { .into_iter() .map(|opt| opt.map(SharedString::from)) .collect::>(); - let merge_heads_changed = heads != prev_snapshot.merge.heads; - let conflicted_paths = if merge_heads_changed { - let current_conflicted_paths = TreeSet::from_ordered_entries( - status - .iter() - .filter(|entry| entry.status.is_conflicted()) - .map(|entry| entry.repo_path.clone()), - ); - // It can happen that we run a scan while a lengthy merge is in progress - // that will eventually result in conflicts, but before those conflicts - // are reported by `git status`. Since for the moment we only care about - // the merge heads state for the purposes of tracking conflicts, don't update - // this state until we see some conflicts. - if heads.iter().any(Option::is_some) - && !prev_snapshot.merge.heads.iter().any(Option::is_some) - && current_conflicted_paths.is_empty() - { - log::debug!("not updating merge heads because no conflicts found"); - return Ok(( - MergeDetails { - message: message.map(SharedString::from), - ..prev_snapshot.merge.clone() - }, - false, - )); + let mut conflicts_changed = false; + + // Record the merge state for newly conflicted paths + for path in ¤t_conflicted_paths { + if self.merge_heads_by_conflicted_path.get(&path).is_none() { + conflicts_changed = true; + self.merge_heads_by_conflicted_path + .insert(path.clone(), heads.clone()); } + } - current_conflicted_paths - } else { - prev_snapshot.merge.conflicted_paths.clone() - }; - let details = MergeDetails { - conflicted_paths, - message: message.map(SharedString::from), - heads, - }; - Ok((details, merge_heads_changed)) + // Clear state for paths that are no longer conflicted and for which the merge heads have changed + self.merge_heads_by_conflicted_path + .retain(|path, old_merge_heads| { + let keep = current_conflicted_paths.contains(path) + || (old_merge_heads == &heads + && old_merge_heads.iter().any(|head| head.is_some())); + if !keep { + conflicts_changed = true; + } + keep + }); + + Ok(conflicts_changed) } } impl Repository { + pub fn is_trusted(&self) -> bool { + match self.repository_state.peek() { + Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(), + _ => false, + } + } + pub fn snapshot(&self) -> RepositorySnapshot { self.snapshot.clone() } @@ -3714,14 +3820,20 @@ impl Repository { fn local( id: RepositoryId, work_directory_abs_path: Arc, + original_repo_abs_path: Arc, dot_git_abs_path: Arc, project_environment: WeakEntity, fs: Arc, + is_trusted: bool, git_store: WeakEntity, cx: &mut Context, ) -> Self { - let snapshot = - RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local()); + let snapshot = RepositorySnapshot::empty( + id, + work_directory_abs_path.clone(), + Some(original_repo_abs_path), + PathStyle::local(), + ); let state = cx .spawn(async move |_, cx| { LocalRepositoryState::new( @@ -3729,6 +3841,7 @@ impl Repository { dot_git_abs_path, project_environment, fs, + is_trusted, cx, ) .await @@ -3744,7 +3857,7 @@ impl Repository { .shared(); cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event { - RepositoryEvent::BranchChanged | RepositoryEvent::MergeHeadsChanged => { + RepositoryEvent::BranchChanged => { if this.scan_id > 1 { this.initial_graph_data.clear(); } @@ -3775,13 +3888,19 @@ impl Repository { fn remote( id: RepositoryId, work_directory_abs_path: Arc, + original_repo_abs_path: Option>, path_style: PathStyle, project_id: ProjectId, client: AnyProtoClient, git_store: WeakEntity, cx: &mut Context, ) -> Self { - let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style); + let snapshot = RepositorySnapshot::empty( + id, + work_directory_abs_path, + original_repo_abs_path, + path_style, + ); let repository_state = RemoteRepositoryState { project_id, client }; let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx); let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared(); @@ -4053,6 +4172,10 @@ impl Repository { self.snapshot.status() } + pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option { + self.snapshot.diff_stat_for_path(path) + } + pub fn cached_stash(&self) -> GitStash { self.snapshot.stash_entries.clone() } @@ -4364,47 +4487,82 @@ impl Repository { }) } + pub fn get_graph_data( + &self, + log_source: LogSource, + log_order: LogOrder, + ) -> Option<&InitialGitGraphData> { + self.initial_graph_data.get(&(log_source, log_order)) + } + pub fn graph_data( &mut self, log_source: LogSource, log_order: LogOrder, range: Range, cx: &mut Context, - ) -> (&[Arc], bool) { - let (loading_task, initial_commit_data) = self + ) -> GraphDataResponse<'_> { + let initial_commit_data = self .initial_graph_data - .entry((log_order, log_source.clone())) + .entry((log_source.clone(), log_order)) .or_insert_with(|| { let state = self.repository_state.clone(); let log_source = log_source.clone(); - ( - cx.spawn(async move |repository, cx| { - let state = state.await; - match state { - Ok(RepositoryState::Local(LocalRepositoryState { - backend, .. - })) => { - Self::local_git_graph_data( - repository, backend, log_source, log_order, cx, - ) - .await - } - Ok(RepositoryState::Remote(_)) => { - Err("Git graph is not supported for collab yet".into()) - } - Err(e) => Err(SharedString::from(e)), + + let fetch_task = cx.spawn(async move |repository, cx| { + let state = state.await; + let result = match state { + Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => { + Self::local_git_graph_data( + repository.clone(), + backend, + log_source.clone(), + log_order, + cx, + ) + .await } - }), - vec![], - ) + Ok(RepositoryState::Remote(_)) => { + Err("Git graph is not supported for collab yet".into()) + } + Err(e) => Err(SharedString::from(e)), + }; + + if let Err(fetch_task_error) = result { + repository + .update(cx, |repository, _| { + if let Some(data) = repository + .initial_graph_data + .get_mut(&(log_source, log_order)) + { + data.error = Some(fetch_task_error); + } else { + debug_panic!( + "This task would be dropped if this entry doesn't exist" + ); + } + }) + .ok(); + } + }); + + InitialGitGraphData { + fetch_task, + error: None, + commit_data: Vec::new(), + commit_oid_to_index: HashMap::default(), + } }); - let max_start = initial_commit_data.len().saturating_sub(1); - let max_end = initial_commit_data.len(); - ( - &initial_commit_data[range.start.min(max_start)..range.end.min(max_end)], - !loading_task.is_ready(), - ) + let max_start = initial_commit_data.commit_data.len().saturating_sub(1); + let max_end = initial_commit_data.commit_data.len(); + + GraphDataResponse { + commits: &initial_commit_data.commit_data + [range.start.min(max_start)..range.end.min(max_end)], + is_loading: !initial_commit_data.fetch_task.is_ready(), + error: initial_commit_data.error.clone(), + } } async fn local_git_graph_data( @@ -4427,32 +4585,38 @@ impl Repository { } }); - let graph_data_key = (log_order, log_source.clone()); + let graph_data_key = (log_source, log_order); while let Ok(initial_graph_commit_data) = request_rx.recv().await { this.update(cx, |repository, cx| { let graph_data = repository .initial_graph_data - .get_mut(&graph_data_key) - .map(|(_, graph_data)| graph_data); - debug_assert!( - graph_data.is_some(), - "This task should be dropped if data doesn't exist" - ); + .entry(graph_data_key.clone()) + .and_modify(|graph_data| { + for commit_data in initial_graph_commit_data { + graph_data + .commit_oid_to_index + .insert(commit_data.sha, graph_data.commit_data.len()); + graph_data.commit_data.push(commit_data); + + cx.emit(RepositoryEvent::GraphEvent( + graph_data_key.clone(), + GitGraphEvent::CountUpdated(graph_data.commit_data.len()), + )); + } + }); - if let Some(graph_data) = graph_data { - graph_data.extend(initial_graph_commit_data); - cx.emit(RepositoryEvent::GitGraphCountUpdated( - graph_data_key.clone(), - graph_data.len(), - )); + match &graph_data { + Entry::Occupied(_) => {} + Entry::Vacant(_) => { + debug_panic!("This task should be dropped if data doesn't exist"); + } } }) .ok(); } task.await?; - Ok(()) } @@ -4872,8 +5036,7 @@ impl Repository { .map(|repo_path| repo_path.to_proto()) .collect(), }) - .await - .context("sending stash request")?; + .await?; Ok(()) } } @@ -5082,8 +5245,7 @@ impl Repository { }), askpass_id, }) - .await - .context("sending commit request")?; + .await?; Ok(()) } @@ -5122,8 +5284,7 @@ impl Repository { askpass_id, remote: fetch_options.to_proto(), }) - .await - .context("sending fetch request")?; + .await?; Ok(RemoteCommandOutput { stdout: response.stdout, @@ -5224,8 +5385,7 @@ impl Repository { } as i32), }) - .await - .context("sending push request")?; + .await?; Ok(RemoteCommandOutput { stdout: response.stdout, @@ -5291,8 +5451,7 @@ impl Repository { branch_name: branch.as_ref().map(|b| b.to_string()), remote_name: remote.to_string(), }) - .await - .context("sending pull request")?; + .await?; Ok(RemoteCommandOutput { stdout: response.stdout, @@ -5571,6 +5730,24 @@ impl Repository { ) } + pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver> { + self.send_job( + Some(format!("git worktree remove: {}", path.display()).into()), + move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + backend.remove_worktree(path, force).await + } + RepositoryState::Remote(_) => { + anyhow::bail!( + "Removing worktrees on remote repositories is not yet supported" + ) + } + } + }, + ) + } + pub fn default_branch( &mut self, include_remote_name: bool, @@ -5852,12 +6029,10 @@ impl Repository { update: proto::UpdateRepository, cx: &mut Context, ) -> Result<()> { - let conflicted_paths = TreeSet::from_ordered_entries( - update - .current_merge_conflicts - .into_iter() - .filter_map(|path| RepoPath::from_proto(&path).log_err()), - ); + if let Some(main_path) = &update.original_repo_abs_path { + self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into(); + } + let new_branch = update.branch_summary.as_ref().map(proto_to_branch); let new_head_commit = update .head_commit_details @@ -5869,7 +6044,17 @@ impl Repository { self.snapshot.branch = new_branch; self.snapshot.head_commit = new_head_commit; - self.snapshot.merge.conflicted_paths = conflicted_paths; + // We don't store any merge head state for downstream projects; the upstream + // will track it and we will just get the updated conflicts + let new_merge_heads = TreeMap::from_ordered_entries( + update + .current_merge_conflicts + .into_iter() + .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))), + ); + let conflicts_changed = + self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads; + self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads; self.snapshot.merge.message = update.merge_message.map(SharedString::from); let new_stash_entries = GitStash { entries: update @@ -5902,10 +6087,11 @@ impl Repository { }), ) .collect::>(); - if !edits.is_empty() { + if conflicts_changed || !edits.is_empty() { cx.emit(RepositoryEvent::StatusesChanged); } self.snapshot.statuses_by_path.edit(edits, ()); + if update.is_last_update { self.snapshot.scan_id = update.scan_id; } @@ -5989,17 +6175,16 @@ impl Repository { let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else { bail!("not a local repository") }; - let (snapshot, events) = this - .update(&mut cx, |this, _| { - this.paths_needing_status_update.clear(); - compute_snapshot( - this.id, - this.work_directory_abs_path.clone(), - this.snapshot.clone(), - backend.clone(), - ) - }) - .await?; + let compute_snapshot = this.update(&mut cx, |this, _| { + this.paths_needing_status_update.clear(); + compute_snapshot( + this.id, + this.work_directory_abs_path.clone(), + this.snapshot.clone(), + backend.clone(), + ) + }); + let (snapshot, events) = cx.background_spawn(compute_snapshot).await?; this.update(&mut cx, |this, cx| { this.snapshot = snapshot.clone(); this.clear_pending_ops(cx); @@ -6221,22 +6406,43 @@ impl Repository { return Ok(()); } + let has_head = prev_snapshot.head_commit.is_some(); + let stash_entries = backend.stash_entries().await?; let changed_path_statuses = cx .background_spawn(async move { let mut changed_paths = changed_paths.into_iter().flatten().collect::>(); - let statuses = backend - .status(&changed_paths.iter().cloned().collect::>()) - .await?; + let changed_paths_vec = changed_paths.iter().cloned().collect::>(); + + let status_task = backend.status(&changed_paths_vec); + let diff_stat_future = if has_head { + backend.diff_stat(&changed_paths_vec) + } else { + future::ready(Ok(status::GitDiffStat { + entries: Arc::default(), + })) + .boxed() + }; + + let (statuses, diff_stats) = + futures::future::try_join(status_task, diff_stat_future).await?; + + let diff_stats: HashMap = + HashMap::from_iter(diff_stats.entries.into_iter().cloned()); + let mut changed_path_statuses = Vec::new(); let prev_statuses = prev_snapshot.statuses_by_path.clone(); let mut cursor = prev_statuses.cursor::(()); for (repo_path, status) in &*statuses.entries { + let current_diff_stat = diff_stats.get(repo_path).copied(); + changed_paths.remove(repo_path); if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) - && cursor.item().is_some_and(|entry| entry.status == *status) + && cursor.item().is_some_and(|entry| { + entry.status == *status && entry.diff_stat == current_diff_stat + }) { continue; } @@ -6244,6 +6450,7 @@ impl Repository { changed_path_statuses.push(Edit::Insert(StatusEntry { repo_path: repo_path.clone(), status: *status, + diff_stat: current_diff_stat, })); } let mut cursor = prev_statuses.cursor::(()); @@ -6601,40 +6808,54 @@ async fn compute_snapshot( let mut events = Vec::new(); let branches = backend.branches().await?; let branch = branches.into_iter().find(|branch| branch.is_head); - let statuses = backend - .status(&[RepoPath::from_rel_path( + + // Useful when branch is None in detached head state + let head_commit = match backend.head_sha().await { + Some(head_sha) => backend.show(head_sha).await.log_err(), + None => None, + }; + + let diff_stat_future: BoxFuture<'_, Result> = if head_commit.is_some() { + backend.diff_stat(&[]) + } else { + future::ready(Ok(status::GitDiffStat { + entries: Arc::default(), + })) + .boxed() + }; + let (statuses, diff_stats) = futures::future::try_join( + backend.status(&[RepoPath::from_rel_path( &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(), - )]) - .await?; + )]), + diff_stat_future, + ) + .await?; + + let diff_stat_map: HashMap<&RepoPath, DiffStat> = + diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect(); let stash_entries = backend.stash_entries().await?; + let mut conflicted_paths = Vec::new(); let statuses_by_path = SumTree::from_iter( - statuses - .entries - .iter() - .map(|(repo_path, status)| StatusEntry { + statuses.entries.iter().map(|(repo_path, status)| { + if status.is_conflicted() { + conflicted_paths.push(repo_path.clone()); + } + StatusEntry { repo_path: repo_path.clone(), status: *status, - }), + diff_stat: diff_stat_map.get(repo_path).copied(), + } + }), (), ); - let (merge_details, merge_heads_changed) = - MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?; - log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}"); + let mut merge_details = prev_snapshot.merge; + let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?; + log::debug!("new merge details: {merge_details:?}"); - if merge_heads_changed { - events.push(RepositoryEvent::MergeHeadsChanged); - } - - if statuses_by_path != prev_snapshot.statuses_by_path { + if conflicts_changed || statuses_by_path != prev_snapshot.statuses_by_path { events.push(RepositoryEvent::StatusesChanged) } - // Useful when branch is None in detached head state - let head_commit = match backend.head_sha().await { - Some(head_sha) => backend.show(head_sha).await.log_err(), - None => None, - }; - if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit { events.push(RepositoryEvent::BranchChanged); } @@ -6646,6 +6867,7 @@ async fn compute_snapshot( id, statuses_by_path, work_directory_abs_path, + original_repo_abs_path: prev_snapshot.original_repo_abs_path, path_style: prev_snapshot.path_style, scan_id: prev_snapshot.scan_id + 1, branch, diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index bd94378433d7a8d992b913258999a6004b8031f2..67edd6c13ca5a850a99f28dee849718d9e7ec9ae 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -533,7 +533,7 @@ impl LspCommand for PerformRename { .rename_provider .is_some_and(|capability| match capability { OneOf::Left(enabled) => enabled, - OneOf::Right(_options) => true, + OneOf::Right(_) => true, }) } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index ad3d4bdb703548f86304ac6c3892f3cabab01caa..75f9702e12cf31ce4f555940d7d1918884bbc22a 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -548,6 +548,7 @@ impl LocalLspStore { let mut initialization_options = Self::initialization_options_for_adapter( adapter.adapter.clone(), &delegate, + cx, ) .await?; @@ -3157,7 +3158,7 @@ impl LocalLspStore { .map(|edit| (range_from_lsp(edit.range), edit.new_text)) .collect::>(); - lsp_edits.sort_by_key(|(range, _)| (range.start, range.end)); + lsp_edits.sort_unstable_by_key(|(range, _)| (range.start, range.end)); let mut lsp_edits = lsp_edits.into_iter().peekable(); let mut edits = Vec::new(); @@ -3771,9 +3772,10 @@ impl LocalLspStore { async fn initialization_options_for_adapter( adapter: Arc, delegate: &Arc, + cx: &mut AsyncApp, ) -> Result> { let Some(mut initialization_config) = - adapter.clone().initialization_options(delegate).await? + adapter.clone().initialization_options(delegate, cx).await? else { return Ok(None); }; @@ -4999,10 +5001,6 @@ impl LspStore { }; let status = request.status(); - if !request.check_capabilities(language_server.adapter_server_capabilities()) { - return Task::ready(Ok(Default::default())); - } - let request_timeout = ProjectSettings::get_global(cx) .global_lsp_settings .get_request_timeout(); @@ -5104,6 +5102,10 @@ impl LspStore { .clone(); self.semantic_token_config .update_rules(new_semantic_token_rules); + // Always clear cached stylizers so that changes to language-specific + // semantic token rules (e.g. from extension install/uninstall) are + // picked up. Stylizers are recreated lazily, so this is cheap. + self.semantic_token_config.clear_stylizers(); let new_global_semantic_tokens_mode = all_language_settings(None, cx).defaults.semantic_tokens; @@ -7030,6 +7032,21 @@ impl LspStore { .collect() } else { for (chunk, range_to_query) in ranges_to_query.into_iter().flatten() { + // When a server refresh was requested, other servers' cached hints + // are unaffected by the refresh and must be included in the result. + // Otherwise apply_fetched_hints (with should_invalidate()=true) + // removes all visible hints but only adds back the requesting + // server's new hints, permanently losing other servers' hints. + let other_servers_cached: CacheInlayHints = if lsp_refresh_requested { + lsp_data + .inlay_hints + .cached_hints(&chunk) + .cloned() + .unwrap_or_default() + } else { + HashMap::default() + }; + let next_hint_id = next_hint_id.clone(); let buffer = buffer.clone(); let query_version = query_version.clone(); @@ -7048,33 +7065,32 @@ impl LspStore { if update_cache { lsp_data.inlay_hints.invalidate_for_chunk(chunk); } - HashMap::default() + other_servers_cached } else { - new_hints_by_server - .into_iter() - .map(|(server_id, new_hints)| { - let new_hints = new_hints - .into_iter() - .map(|new_hint| { - ( - InlayId::Hint(next_hint_id.fetch_add( - 1, - atomic::Ordering::AcqRel, - )), - new_hint, - ) - }) - .collect::>(); - if update_cache { - lsp_data.inlay_hints.insert_new_hints( - chunk, - server_id, - new_hints.clone(), - ); - } - (server_id, new_hints) - }) - .collect() + let mut result = other_servers_cached; + for (server_id, new_hints) in new_hints_by_server { + let new_hints = new_hints + .into_iter() + .map(|new_hint| { + ( + InlayId::Hint(next_hint_id.fetch_add( + 1, + atomic::Ordering::AcqRel, + )), + new_hint, + ) + }) + .collect::>(); + if update_cache { + lsp_data.inlay_hints.insert_new_hints( + chunk, + server_id, + new_hints.clone(), + ); + } + result.insert(server_id, new_hints); + } + result } }) }) @@ -11406,6 +11422,15 @@ impl LspStore { let buffer_id = buffer.remote_id(); if local.registered_buffers.contains_key(&buffer_id) { + let abs_path = file.abs_path(cx); + let uri = match lsp::Uri::from_file_path(&abs_path) { + Ok(uri) => uri, + Err(()) => { + log::error!("failed to convert path to URI: {:?}", abs_path); + continue; + } + }; + let versions = local .buffer_snapshots .entry(buffer_id) @@ -11427,14 +11452,13 @@ impl LspStore { let snapshot = versions.last().unwrap(); let version = snapshot.version; let initial_snapshot = &snapshot.snapshot; - let uri = lsp::Uri::from_file_path(file.abs_path(cx)).unwrap(); language_server.register_buffer( uri, adapter.language_id(&language.name()), version, initial_snapshot.text(), ); - buffer_paths_registered.push((buffer_id, file.abs_path(cx))); + buffer_paths_registered.push((buffer_id, abs_path)); local .buffers_opened_in_servers .entry(buffer_id) @@ -13964,6 +13988,7 @@ impl LspAdapter for SshLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { let Some(options) = &self.initialization_options else { return Ok(None); diff --git a/crates/project/src/lsp_store/lsp_ext_command.rs b/crates/project/src/lsp_store/lsp_ext_command.rs index 270db67576f0a02155997757a01d489d44ef1766..9c284a143613c47aa3a5fcc9af5afac9d6dbbf4d 100644 --- a/crates/project/src/lsp_store/lsp_ext_command.rs +++ b/crates/project/src/lsp_store/lsp_ext_command.rs @@ -211,10 +211,10 @@ impl LspCommand for OpenDocs { _: &Arc, _: &App, ) -> Result { + let uri = lsp::Uri::from_file_path(path) + .map_err(|()| anyhow::anyhow!("{path:?} is not a valid URI"))?; Ok(OpenDocsParams { - text_document: lsp::TextDocumentIdentifier { - uri: lsp::Uri::from_file_path(path).unwrap(), - }, + text_document: lsp::TextDocumentIdentifier { uri }, position: point_to_lsp(self.position), }) } diff --git a/crates/project/src/lsp_store/semantic_tokens.rs b/crates/project/src/lsp_store/semantic_tokens.rs index 516fb75eaae13752c235d0ad42db460740529c4d..cfcd74ad7de7baaf60833cd9db1085d60307c20e 100644 --- a/crates/project/src/lsp_store/semantic_tokens.rs +++ b/crates/project/src/lsp_store/semantic_tokens.rs @@ -12,8 +12,11 @@ use gpui::{App, AppContext, AsyncApp, Context, Entity, ReadGlobal as _, SharedSt use language::{Buffer, LanguageName, language_settings::all_language_settings}; use lsp::{AdapterServerCapabilities, LanguageServerId}; use rpc::{TypedEnvelope, proto}; -use settings::{SemanticTokenRule, SemanticTokenRules, Settings as _, SettingsStore}; +use settings::{ + DefaultSemanticTokenRules, SemanticTokenRule, SemanticTokenRules, Settings as _, SettingsStore, +}; use smol::future::yield_now; + use text::{Anchor, Bias, OffsetUtf16, PointUtf16, Unclipped}; use util::ResultExt as _; @@ -58,6 +61,15 @@ impl SemanticTokenConfig { } } + /// Clears all cached stylizers. + /// + /// This is called when settings change to ensure that any modifications to + /// language-specific semantic token rules (e.g. from extension install/uninstall) + /// are picked up. Stylizers are recreated lazily on next use. + pub(super) fn clear_stylizers(&mut self) { + self.stylizers.clear(); + } + pub(super) fn update_global_mode(&mut self, new_mode: settings::SemanticTokens) -> bool { if new_mode != self.global_mode { self.global_mode = new_mode; @@ -462,6 +474,7 @@ impl SemanticTokenStylizer { let global_rules = &ProjectSettings::get_global(cx) .global_lsp_settings .semantic_token_rules; + let default_rules = cx.global::(); let rules_by_token_type = token_types .iter() @@ -475,6 +488,7 @@ impl SemanticTokenStylizer { .rules .iter() .chain(language_rules.into_iter().flat_map(|lr| &lr.rules)) + .chain(default_rules.0.rules.iter()) .rev() .filter(filter) .cloned() @@ -653,8 +667,8 @@ impl ServerSemanticTokens { pub(crate) fn apply(&mut self, edits: &[SemanticTokensEdit]) { for edit in edits { - let start = edit.start as usize; - let end = start + edit.delete_count as usize; + let start = (edit.start as usize).min(self.data.len()); + let end = (start + edit.delete_count as usize).min(self.data.len()); self.data.splice(start..end, edit.data.iter().copied()); } } @@ -1000,4 +1014,38 @@ mod tests { ] ); } + + #[test] + fn applies_out_of_bounds_delta_edit_without_panic() { + let mut tokens = ServerSemanticTokens::from_full(vec![2, 5, 3, 0, 3, 0, 5, 4, 1, 0], None); + + // start beyond data length + tokens.apply(&[SemanticTokensEdit { + start: 100, + delete_count: 5, + data: vec![1, 2, 3, 4, 5], + }]); + assert_eq!( + tokens.data, + vec![2, 5, 3, 0, 3, 0, 5, 4, 1, 0, 1, 2, 3, 4, 5] + ); + + // delete_count extends past data length + let mut tokens = ServerSemanticTokens::from_full(vec![2, 5, 3, 0, 3], None); + tokens.apply(&[SemanticTokensEdit { + start: 3, + delete_count: 100, + data: vec![9, 9], + }]); + assert_eq!(tokens.data, vec![2, 5, 3, 9, 9]); + + // empty data + let mut tokens = ServerSemanticTokens::from_full(Vec::new(), None); + tokens.apply(&[SemanticTokensEdit { + start: 0, + delete_count: 5, + data: vec![1, 2, 3, 4, 5], + }]); + assert_eq!(tokens.data, vec![1, 2, 3, 4, 5]); + } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 9e37802213dfb8df5cf63af5648044ae8ec65ecb..756f095511a9688678df013458710e69d720c52e 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1942,6 +1942,11 @@ impl Project { } } + #[cfg(feature = "test-support")] + pub fn client_subscriptions(&self) -> &Vec { + &self.client_subscriptions + } + #[cfg(feature = "test-support")] pub async fn example( root_paths: impl IntoIterator, @@ -2741,6 +2746,7 @@ impl Project { } = &mut self.client_state { *sharing_has_stopped = true; + self.client_subscriptions.clear(); self.collaborators.clear(); self.worktree_store.update(cx, |store, cx| { store.disconnected_from_host(cx); diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 75a3faf4f82d9e98e3c85a96222486cac217afd4..9258b16eef9f1c07cc44987f6608c2e0867c4154 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -1407,35 +1407,38 @@ impl SettingsObserver { let (mut user_tasks_file_rx, watcher_task) = watch_config_file(cx.background_executor(), fs, file_path.clone()); let user_tasks_content = cx.foreground_executor().block_on(user_tasks_file_rx.next()); - let weak_entry = cx.weak_entity(); cx.spawn(async move |settings_observer, cx| { let _watcher_task = watcher_task; let Ok(task_store) = settings_observer.read_with(cx, |settings_observer, _| { - settings_observer.task_store.clone() + settings_observer.task_store.downgrade() }) else { return; }; if let Some(user_tasks_content) = user_tasks_content { - task_store.update(cx, |task_store, cx| { - task_store - .update_user_tasks( - TaskSettingsLocation::Global(&file_path), - Some(&user_tasks_content), - cx, - ) - .log_err(); - }); + task_store + .update(cx, |task_store, cx| { + task_store + .update_user_tasks( + TaskSettingsLocation::Global(&file_path), + Some(&user_tasks_content), + cx, + ) + .log_err(); + }) + .ok(); } while let Some(user_tasks_content) = user_tasks_file_rx.next().await { - let result = task_store.update(cx, |task_store, cx| { + let Ok(result) = task_store.update(cx, |task_store, cx| { task_store.update_user_tasks( TaskSettingsLocation::Global(&file_path), Some(&user_tasks_content), cx, ) - }); + }) else { + continue; + }; - weak_entry + settings_observer .update(cx, |_, cx| match result { Ok(()) => cx.emit(SettingsObserverEvent::LocalTasksUpdated(Ok( file_path.clone() @@ -1459,35 +1462,38 @@ impl SettingsObserver { let (mut user_tasks_file_rx, watcher_task) = watch_config_file(cx.background_executor(), fs, file_path.clone()); let user_tasks_content = cx.foreground_executor().block_on(user_tasks_file_rx.next()); - let weak_entry = cx.weak_entity(); cx.spawn(async move |settings_observer, cx| { let _watcher_task = watcher_task; let Ok(task_store) = settings_observer.read_with(cx, |settings_observer, _| { - settings_observer.task_store.clone() + settings_observer.task_store.downgrade() }) else { return; }; if let Some(user_tasks_content) = user_tasks_content { - task_store.update(cx, |task_store, cx| { - task_store - .update_user_debug_scenarios( - TaskSettingsLocation::Global(&file_path), - Some(&user_tasks_content), - cx, - ) - .log_err(); - }); + task_store + .update(cx, |task_store, cx| { + task_store + .update_user_debug_scenarios( + TaskSettingsLocation::Global(&file_path), + Some(&user_tasks_content), + cx, + ) + .log_err(); + }) + .ok(); } while let Some(user_tasks_content) = user_tasks_file_rx.next().await { - let result = task_store.update(cx, |task_store, cx| { + let Ok(result) = task_store.update(cx, |task_store, cx| { task_store.update_user_debug_scenarios( TaskSettingsLocation::Global(&file_path), Some(&user_tasks_content), cx, ) - }); + }) else { + continue; + }; - weak_entry + settings_observer .update(cx, |_, cx| match result { Ok(()) => cx.emit(SettingsObserverEvent::LocalDebugScenariosUpdated(Ok( file_path.clone(), diff --git a/crates/project/tests/integration/ext_agent_tests.rs b/crates/project/tests/integration/ext_agent_tests.rs index 74f762981a4f15f6d3d528e45374f542f30fa5ec..f3c398a619a81ee81146de16f8e58b1093569e8a 100644 --- a/crates/project/tests/integration/ext_agent_tests.rs +++ b/crates/project/tests/integration/ext_agent_tests.rs @@ -9,21 +9,16 @@ struct NoopExternalAgent; impl ExternalAgentServer for NoopExternalAgent { fn get_command( &mut self, - _root_dir: Option<&str>, _extra_env: HashMap, _status_tx: Option>, _new_version_available_tx: Option>>, _cx: &mut AsyncApp, - ) -> Task)>> { - Task::ready(Ok(( - AgentServerCommand { - path: PathBuf::from("noop"), - args: Vec::new(), - env: None, - }, - "".to_string(), - None, - ))) + ) -> Task> { + Task::ready(Ok(AgentServerCommand { + path: PathBuf::from("noop"), + args: Vec::new(), + env: None, + })) } fn as_any_mut(&mut self) -> &mut dyn Any { diff --git a/crates/project/tests/integration/extension_agent_tests.rs b/crates/project/tests/integration/extension_agent_tests.rs index f237b9dc7deaf220fbed8fd3ff6f7c8cec99898d..eff41a99cab878336206f232450f3c1b490d1fc8 100644 --- a/crates/project/tests/integration/extension_agent_tests.rs +++ b/crates/project/tests/integration/extension_agent_tests.rs @@ -25,21 +25,16 @@ struct NoopExternalAgent; impl ExternalAgentServer for NoopExternalAgent { fn get_command( &mut self, - _root_dir: Option<&str>, _extra_env: HashMap, _status_tx: Option>, _new_version_available_tx: Option>>, _cx: &mut AsyncApp, - ) -> Task)>> { - Task::ready(Ok(( - AgentServerCommand { - path: PathBuf::from("noop"), - args: Vec::new(), - env: None, - }, - "".to_string(), - None, - ))) + ) -> Task> { + Task::ready(Ok(AgentServerCommand { + path: PathBuf::from("noop"), + args: Vec::new(), + env: None, + })) } fn as_any_mut(&mut self) -> &mut dyn Any { @@ -301,26 +296,6 @@ async fn test_commands_run_in_extraction_directory(cx: &mut TestAppContext) { #[test] fn test_tilde_expansion_in_settings() { - let settings = settings::BuiltinAgentServerSettings { - path: Some(PathBuf::from("~/bin/agent")), - args: Some(vec!["--flag".into()]), - env: None, - ignore_system_version: None, - default_mode: None, - default_model: None, - favorite_models: vec![], - default_config_options: Default::default(), - favorite_config_option_values: Default::default(), - }; - - let BuiltinAgentServerSettings { path, .. } = settings.into(); - - let path = path.unwrap(); - assert!( - !path.to_string_lossy().starts_with("~"), - "Tilde should be expanded for builtin agent path" - ); - let settings = settings::CustomAgentServerSettings::Custom { path: PathBuf::from("~/custom/agent"), args: vec!["serve".into()], diff --git a/crates/project/tests/integration/git_store.rs b/crates/project/tests/integration/git_store.rs index 43704953e0d0bd3e81b9b63b5a797934970dcafa..82e92bc4f1cfb606fb09d5efd5d341ed2951c067 100644 --- a/crates/project/tests/integration/git_store.rs +++ b/crates/project/tests/integration/git_store.rs @@ -336,7 +336,7 @@ mod conflict_set_tests { second_head: UnmergedStatusCode::Updated, }, ); - // Cause the repository to emit MergeHeadsChanged. + // Cause the repository to update cached conflicts state.refs.insert("MERGE_HEAD".into(), "123".into()) }) .unwrap(); @@ -461,6 +461,168 @@ mod conflict_set_tests { assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0)); }); } + + #[gpui::test] + async fn test_conflict_updates_with_delayed_merge_head_conflicts( + executor: BackgroundExecutor, + cx: &mut TestAppContext, + ) { + zlog::init_test(); + cx.update(|cx| { + settings::init(cx); + }); + + let initial_text = " + one + two + three + four + " + .unindent(); + + let conflicted_text = " + one + <<<<<<< HEAD + two + ======= + TWO + >>>>>>> branch + three + four + " + .unindent(); + + let resolved_text = " + one + TWO + three + four + " + .unindent(); + + let fs = FakeFs::new(executor); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "a.txt": initial_text, + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let (git_store, buffer) = project.update(cx, |project, cx| { + ( + project.git_store().clone(), + project.open_local_buffer(path!("/project/a.txt"), cx), + ) + }); + let buffer = buffer.await.unwrap(); + let conflict_set = git_store.update(cx, |git_store, cx| { + git_store.open_conflict_set(buffer.clone(), cx) + }); + + let (events_tx, events_rx) = mpsc::channel::(); + let _conflict_set_subscription = cx.update(|cx| { + cx.subscribe(&conflict_set, move |_, event, _| { + events_tx.send(event.clone()).ok(); + }) + }); + + cx.run_until_parked(); + events_rx + .try_recv() + .expect_err("conflict set should start empty"); + + fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { + state.refs.insert("MERGE_HEAD".into(), "123".into()) + }) + .unwrap(); + + cx.run_until_parked(); + events_rx + .try_recv() + .expect_err("merge head without conflicted paths should not publish conflicts"); + conflict_set.update(cx, |conflict_set, _| { + assert!(!conflict_set.has_conflict); + assert_eq!(conflict_set.snapshot.conflicts.len(), 0); + }); + + buffer.update(cx, |buffer, cx| { + buffer.set_text(conflicted_text.clone(), cx); + }); + fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { + state.unmerged_paths.insert( + repo_path("a.txt"), + UnmergedStatus { + first_head: UnmergedStatusCode::Updated, + second_head: UnmergedStatusCode::Updated, + }, + ); + }) + .unwrap(); + + cx.run_until_parked(); + let update = events_rx + .try_recv() + .expect("conflicts should appear once conflicted paths are visible"); + assert_eq!(update.old_range, 0..0); + assert_eq!(update.new_range, 0..1); + conflict_set.update(cx, |conflict_set, cx| { + assert!(conflict_set.has_conflict); + let conflict_range = conflict_set.snapshot().conflicts[0] + .range + .to_point(buffer.read(cx)); + assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0)); + }); + + buffer.update(cx, |buffer, cx| { + buffer.set_text(resolved_text.clone(), cx); + }); + + cx.run_until_parked(); + let update = events_rx + .try_recv() + .expect("resolved buffer text should clear visible conflict markers"); + assert_eq!(update.old_range, 0..1); + assert_eq!(update.new_range, 0..0); + conflict_set.update(cx, |conflict_set, _| { + assert!(conflict_set.has_conflict); + assert_eq!(conflict_set.snapshot.conflicts.len(), 0); + }); + + fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { + state.refs.insert("MERGE_HEAD".into(), "456".into()); + }) + .unwrap(); + + cx.run_until_parked(); + events_rx.try_recv().expect_err( + "merge-head change without unmerged-path changes should not emit marker updates", + ); + conflict_set.update(cx, |conflict_set, _| { + assert!(conflict_set.has_conflict); + assert_eq!(conflict_set.snapshot.conflicts.len(), 0); + }); + + fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { + state.unmerged_paths.remove(&repo_path("a.txt")); + state.refs.remove("MERGE_HEAD"); + }) + .unwrap(); + + cx.run_until_parked(); + let update = events_rx.try_recv().expect( + "status catch-up should emit a no-op update when clearing stale conflict state", + ); + assert_eq!(update.old_range, 0..0); + assert_eq!(update.new_range, 0..0); + assert!(update.buffer_range.is_none()); + conflict_set.update(cx, |conflict_set, _| { + assert!(!conflict_set.has_conflict); + assert_eq!(conflict_set.snapshot.conflicts.len(), 0); + }); + } } mod git_traversal { @@ -1012,3 +1174,327 @@ mod git_traversal { pretty_assertions::assert_eq!(found_statuses, expected_statuses); } } + +mod git_worktrees { + use std::path::PathBuf; + + use fs::FakeFs; + use gpui::TestAppContext; + use serde_json::json; + use settings::SettingsStore; + use util::path; + + fn init_test(cx: &mut gpui::TestAppContext) { + zlog::init_test(); + + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + } + + #[gpui::test] + async fn test_git_worktrees_list_and_create(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/root"), + json!({ + ".git": {}, + "file.txt": "content", + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + let worktrees = cx + .update(|cx| repository.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 1); + assert_eq!(worktrees[0].path, PathBuf::from(path!("/root"))); + + let worktree_directory = PathBuf::from(path!("/root")); + cx.update(|cx| { + repository.update(cx, |repository, _| { + repository.create_worktree( + "feature-branch".to_string(), + worktree_directory.clone(), + Some("abc123".to_string()), + ) + }) + }) + .await + .unwrap() + .unwrap(); + + cx.executor().run_until_parked(); + + let worktrees = cx + .update(|cx| repository.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 2); + assert_eq!(worktrees[0].path, PathBuf::from(path!("/root"))); + assert_eq!(worktrees[1].path, worktree_directory.join("feature-branch")); + assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch"); + assert_eq!(worktrees[1].sha.as_ref(), "abc123"); + + cx.update(|cx| { + repository.update(cx, |repository, _| { + repository.create_worktree( + "bugfix-branch".to_string(), + worktree_directory.clone(), + None, + ) + }) + }) + .await + .unwrap() + .unwrap(); + + cx.executor().run_until_parked(); + + // List worktrees — should now have main + two created + let worktrees = cx + .update(|cx| repository.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 3); + + let feature_worktree = worktrees + .iter() + .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/feature-branch") + .expect("should find feature-branch worktree"); + assert_eq!( + feature_worktree.path, + worktree_directory.join("feature-branch") + ); + + let bugfix_worktree = worktrees + .iter() + .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/bugfix-branch") + .expect("should find bugfix-branch worktree"); + assert_eq!( + bugfix_worktree.path, + worktree_directory.join("bugfix-branch") + ); + assert_eq!(bugfix_worktree.sha.as_ref(), "fake-sha"); + } + + use crate::Project; +} + +mod trust_tests { + use collections::HashSet; + use fs::FakeFs; + use gpui::TestAppContext; + use project::trusted_worktrees::*; + + use serde_json::json; + use settings::SettingsStore; + use util::path; + + use crate::Project; + + fn init_test(cx: &mut TestAppContext) { + zlog::init_test(); + + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + } + + #[gpui::test] + async fn test_repository_defaults_to_untrusted_without_trust_system(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "a.txt": "hello", + }), + ) + .await; + + // Create project without trust system — repos should default to untrusted. + let project = Project::test(fs, [path!("/project").as_ref()], cx).await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + repository.read_with(cx, |repo, _| { + assert!( + !repo.is_trusted(), + "repository should default to untrusted when no trust system is initialized" + ); + }); + } + + #[gpui::test] + async fn test_multiple_repos_trust_with_single_worktree(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "a.txt": "hello", + "sub": { + ".git": {}, + "b.txt": "world", + }, + }), + ) + .await; + + cx.update(|cx| { + init(DbTrustedPaths::default(), cx); + }); + + let project = + Project::test_with_worktree_trust(fs.clone(), [path!("/project").as_ref()], cx).await; + cx.executor().run_until_parked(); + + let worktree_store = project.read_with(cx, |project, _| project.worktree_store()); + let worktree_id = worktree_store.read_with(cx, |store, cx| { + store.worktrees().next().unwrap().read(cx).id() + }); + + let repos = project.read_with(cx, |project, cx| { + project + .repositories(cx) + .values() + .cloned() + .collect::>() + }); + assert_eq!(repos.len(), 2, "should have two repositories"); + for repo in &repos { + repo.read_with(cx, |repo, _| { + assert!( + !repo.is_trusted(), + "all repos should be untrusted initially" + ); + }); + } + + let trusted_worktrees = cx + .update(|cx| TrustedWorktrees::try_get_global(cx).expect("trust global should be set")); + trusted_worktrees.update(cx, |store, cx| { + store.trust( + &worktree_store, + HashSet::from_iter([PathTrust::Worktree(worktree_id)]), + cx, + ); + }); + cx.executor().run_until_parked(); + + for repo in &repos { + repo.read_with(cx, |repo, _| { + assert!( + repo.is_trusted(), + "all repos should be trusted after worktree is trusted" + ); + }); + } + } + + #[gpui::test] + async fn test_repository_trust_restrict_trust_cycle(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "a.txt": "hello", + }), + ) + .await; + + cx.update(|cx| { + project::trusted_worktrees::init(DbTrustedPaths::default(), cx); + }); + + let project = + Project::test_with_worktree_trust(fs.clone(), [path!("/project").as_ref()], cx).await; + cx.executor().run_until_parked(); + + let worktree_store = project.read_with(cx, |project, _| project.worktree_store()); + let worktree_id = worktree_store.read_with(cx, |store, cx| { + store.worktrees().next().unwrap().read(cx).id() + }); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + repository.read_with(cx, |repo, _| { + assert!(!repo.is_trusted(), "repository should start untrusted"); + }); + + let trusted_worktrees = cx + .update(|cx| TrustedWorktrees::try_get_global(cx).expect("trust global should be set")); + + trusted_worktrees.update(cx, |store, cx| { + store.trust( + &worktree_store, + HashSet::from_iter([PathTrust::Worktree(worktree_id)]), + cx, + ); + }); + cx.executor().run_until_parked(); + + repository.read_with(cx, |repo, _| { + assert!( + repo.is_trusted(), + "repository should be trusted after worktree is trusted" + ); + }); + + trusted_worktrees.update(cx, |store, cx| { + store.restrict( + worktree_store.downgrade(), + HashSet::from_iter([PathTrust::Worktree(worktree_id)]), + cx, + ); + }); + cx.executor().run_until_parked(); + + repository.read_with(cx, |repo, _| { + assert!( + !repo.is_trusted(), + "repository should be untrusted after worktree is restricted" + ); + }); + + trusted_worktrees.update(cx, |store, cx| { + store.trust( + &worktree_store, + HashSet::from_iter([PathTrust::Worktree(worktree_id)]), + cx, + ); + }); + cx.executor().run_until_parked(); + + repository.read_with(cx, |repo, _| { + assert!( + repo.is_trusted(), + "repository should be trusted again after second trust" + ); + }); + } +} diff --git a/crates/project/tests/integration/project_tests.rs b/crates/project/tests/integration/project_tests.rs index 9bd0be45ae3fa1e66e8af2c43657ba039045ecef..d86b969e61ed173ee314cde6f584f2dbab6859f9 100644 --- a/crates/project/tests/integration/project_tests.rs +++ b/crates/project/tests/integration/project_tests.rs @@ -31,7 +31,7 @@ use futures::{StreamExt, future}; use git::{ GitHostingProviderRegistry, repository::{RepoPath, repo_path}, - status::{FileStatus, StatusCode, TrackedStatus}, + status::{DiffStat, FileStatus, StatusCode, TrackedStatus}, }; use git2::RepositoryInitOptions; use gpui::{ @@ -5359,6 +5359,52 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) { }); } +#[cfg(target_os = "linux")] +#[gpui::test(retries = 5)] +async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let dir = TempTree::new(json!({})); + let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await; + let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap()); + + tree.flush_fs_events(cx).await; + + let repro_dir = dir.path().join("repro"); + std::fs::create_dir(&repro_dir).unwrap(); + tree.flush_fs_events(cx).await; + + cx.update(|cx| { + assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some()); + }); + + std::fs::remove_dir_all(&repro_dir).unwrap(); + tree.flush_fs_events(cx).await; + + cx.update(|cx| { + assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none()); + }); + + std::fs::create_dir(&repro_dir).unwrap(); + tree.flush_fs_events(cx).await; + + cx.update(|cx| { + assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some()); + }); + + std::fs::write(repro_dir.join("repro-marker"), "").unwrap(); + tree.flush_fs_events(cx).await; + + cx.update(|cx| { + assert!( + tree.read(cx) + .entry_for_path(rel_path("repro/repro-marker")) + .is_some() + ); + }); +} + #[gpui::test(iterations = 10)] async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -9207,14 +9253,23 @@ async fn test_git_repository_status(cx: &mut gpui::TestAppContext) { StatusEntry { repo_path: repo_path("a.txt"), status: StatusCode::Modified.worktree(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }, StatusEntry { repo_path: repo_path("b.txt"), status: FileStatus::Untracked, + diff_stat: None, }, StatusEntry { repo_path: repo_path("d.txt"), status: StatusCode::Deleted.worktree(), + diff_stat: Some(DiffStat { + added: 0, + deleted: 1, + }), }, ] ); @@ -9236,18 +9291,31 @@ async fn test_git_repository_status(cx: &mut gpui::TestAppContext) { StatusEntry { repo_path: repo_path("a.txt"), status: StatusCode::Modified.worktree(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }, StatusEntry { repo_path: repo_path("b.txt"), status: FileStatus::Untracked, + diff_stat: None, }, StatusEntry { repo_path: repo_path("c.txt"), status: StatusCode::Modified.worktree(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }, StatusEntry { repo_path: repo_path("d.txt"), status: StatusCode::Deleted.worktree(), + diff_stat: Some(DiffStat { + added: 0, + deleted: 1, + }), }, ] ); @@ -9281,6 +9349,10 @@ async fn test_git_repository_status(cx: &mut gpui::TestAppContext) { [StatusEntry { repo_path: repo_path("a.txt"), status: StatusCode::Deleted.worktree(), + diff_stat: Some(DiffStat { + added: 0, + deleted: 1, + }), }] ); }); @@ -9345,6 +9417,7 @@ async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) { worktree_status: StatusCode::Added } .into(), + diff_stat: None, }] ) }); @@ -9547,6 +9620,10 @@ async fn test_repository_pending_ops_staging( worktree_status: StatusCode::Unmodified } .into(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 0, + }), }] ); }); @@ -9653,6 +9730,10 @@ async fn test_repository_pending_ops_long_running_staging( worktree_status: StatusCode::Unmodified } .into(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 0, + }), }] ); }); @@ -9777,10 +9858,12 @@ async fn test_repository_pending_ops_stage_all( StatusEntry { repo_path: repo_path("a.txt"), status: FileStatus::Untracked, + diff_stat: None, }, StatusEntry { repo_path: repo_path("b.txt"), status: FileStatus::Untracked, + diff_stat: None, }, ] ); @@ -10409,10 +10492,7 @@ async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) { assert_eq!( repository_updates.lock().drain(..).collect::>(), - vec![ - RepositoryEvent::StatusesChanged, - RepositoryEvent::MergeHeadsChanged, - ], + vec![RepositoryEvent::StatusesChanged,], "Initial worktree scan should produce a repo update event" ); assert_eq!( @@ -10579,7 +10659,6 @@ async fn test_odd_events_for_ignored_dirs( assert_eq!( repository_updates.lock().drain(..).collect::>(), vec![ - RepositoryEvent::MergeHeadsChanged, RepositoryEvent::BranchChanged, RepositoryEvent::StatusesChanged, RepositoryEvent::StatusesChanged, diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 6c0c10c0715a35de25efaa7f6fddbcb5c0257934..d647676834e9847ac697f1b51fc61bc1b2425adf 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -46,6 +46,7 @@ use settings::{ update_settings_file, }; use smallvec::SmallVec; +use std::ops::Neg; use std::{any::TypeId, time::Instant}; use std::{ cell::OnceCell, @@ -146,6 +147,7 @@ pub struct ProjectPanel { width: Option, pending_serialization: Task>, diagnostics: HashMap<(WorktreeId, Arc), DiagnosticSeverity>, + diagnostic_counts: HashMap<(WorktreeId, Arc), DiagnosticCount>, diagnostic_summary_update: Task<()>, // We keep track of the mouse down state on entries so we don't flash the UI // in case a user clicks to open a file. @@ -232,6 +234,30 @@ enum ClipboardEntry { Cut(BTreeSet), } +#[derive(Debug, Default, PartialEq, Eq, Clone, Copy)] +struct DiagnosticCount { + error_count: usize, + warning_count: usize, +} + +impl DiagnosticCount { + fn capped_error_count(&self) -> String { + Self::capped_count(self.error_count) + } + + fn capped_warning_count(&self) -> String { + Self::capped_count(self.warning_count) + } + + fn capped_count(count: usize) -> String { + if count > 99 { + "99+".to_string() + } else { + count.to_string() + } + } +} + #[derive(Debug, PartialEq, Eq, Clone)] struct EntryDetails { filename: String, @@ -249,6 +275,7 @@ struct EntryDetails { sticky: Option, filename_text_color: Color, diagnostic_severity: Option, + diagnostic_count: Option, git_status: GitSummary, is_private: bool, worktree_id: WorktreeId, @@ -847,6 +874,7 @@ impl ProjectPanel { width: None, pending_serialization: Task::ready(None), diagnostics: Default::default(), + diagnostic_counts: Default::default(), diagnostic_summary_update: Task::ready(()), scroll_handle, mouse_down: false, @@ -1029,6 +1057,26 @@ impl ProjectPanel { }); } self.diagnostics = diagnostics; + + let diagnostic_badges = ProjectPanelSettings::get_global(cx).diagnostic_badges; + self.diagnostic_counts = + if diagnostic_badges && show_diagnostics_setting != ShowDiagnostics::Off { + self.project.read(cx).diagnostic_summaries(false, cx).fold( + HashMap::default(), + |mut counts, (project_path, _, summary)| { + let entry = counts + .entry((project_path.worktree_id, project_path.path)) + .or_default(); + entry.error_count += summary.error_count; + if show_diagnostics_setting == ShowDiagnostics::All { + entry.warning_count += summary.warning_count; + } + counts + }, + ) + } else { + Default::default() + }; } fn update_strongest_diagnostic_severity( @@ -5044,6 +5092,7 @@ impl ProjectPanel { let filename_text_color = details.filename_text_color; let diagnostic_severity = details.diagnostic_severity; + let diagnostic_count = details.diagnostic_count; let item_colors = get_item_color(is_sticky, cx); let canonical_path = details @@ -5482,22 +5531,55 @@ impl ProjectPanel { ProjectPanelEntrySpacing::Standard => ListItemSpacing::ExtraDense, }) .selectable(false) - .when_some(canonical_path, |this, path| { - this.end_slot::( - div() - .id("symlink_icon") - .pr_3() - .tooltip(move |_window, cx| { - Tooltip::with_meta(path.to_string(), None, "Symbolic Link", cx) - }) - .child( - Icon::new(IconName::ArrowUpRight) - .size(IconSize::Indicator) - .color(filename_text_color), - ) - .into_any_element(), - ) - }) + .when( + canonical_path.is_some() || diagnostic_count.is_some(), + |this| { + let symlink_element = canonical_path.map(|path| { + div() + .id("symlink_icon") + .tooltip(move |_window, cx| { + Tooltip::with_meta( + path.to_string(), + None, + "Symbolic Link", + cx, + ) + }) + .child( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Indicator) + .color(filename_text_color), + ) + }); + this.end_slot::( + h_flex() + .gap_1() + .flex_none() + .pr_3() + .when_some(diagnostic_count, |this, count| { + this.when(count.error_count > 0, |this| { + this.child( + Label::new(count.capped_error_count()) + .size(LabelSize::Small) + .color(Color::Error), + ) + }) + .when( + count.warning_count > 0, + |this| { + this.child( + Label::new(count.capped_warning_count()) + .size(LabelSize::Small) + .color(Color::Warning), + ) + }, + ) + }) + .when_some(symlink_element, |this, el| this.child(el)) + .into_any_element(), + ) + }, + ) .child(if let Some(icon) = &icon { if let Some((_, decoration_color)) = entry_diagnostic_aware_icon_decoration_and_color(diagnostic_severity) @@ -5907,6 +5989,11 @@ impl ProjectPanel { .get(&(worktree_id, entry.path.clone())) .cloned(); + let diagnostic_count = self + .diagnostic_counts + .get(&(worktree_id, entry.path.clone())) + .copied(); + let filename_text_color = entry_git_aware_label_color(git_status, entry.is_ignored, is_marked); @@ -5931,6 +6018,7 @@ impl ProjectPanel { sticky, filename_text_color, diagnostic_severity, + diagnostic_count, git_status, is_private: entry.is_private, worktree_id, @@ -5966,12 +6054,27 @@ impl ProjectPanel { .worktree_for_entry(entry_id, cx) .context("can't reveal a non-existent entry in the project panel")?; let worktree = worktree.read(cx); - if skip_ignored - && worktree - .entry_for_id(entry_id) - .is_none_or(|entry| entry.is_ignored && !entry.is_always_included) - { - anyhow::bail!("can't reveal an ignored entry in the project panel"); + let worktree_id = worktree.id(); + let is_ignored = worktree + .entry_for_id(entry_id) + .is_none_or(|entry| entry.is_ignored && !entry.is_always_included); + if skip_ignored && is_ignored { + if self.index_for_entry(entry_id, worktree_id).is_none() { + anyhow::bail!("can't reveal an ignored entry in the project panel"); + } + + self.selection = Some(SelectedEntry { + worktree_id, + entry_id, + }); + self.marked_entries.clear(); + self.marked_entries.push(SelectedEntry { + worktree_id, + entry_id, + }); + self.autoscroll(cx); + cx.notify(); + return Ok(()); } let is_active_item_file_diff_view = self .workspace @@ -5983,7 +6086,6 @@ impl ProjectPanel { return Ok(()); } - let worktree_id = worktree.id(); self.expand_entry(worktree_id, entry_id, cx); self.update_visible_entries(Some((worktree_id, entry_id)), false, true, window, cx); self.marked_entries.clear(); @@ -6356,11 +6458,14 @@ impl Render for ProjectPanel { el.on_action(cx.listener(Self::trash)) }) }) - .when(project.is_local(), |el| { - el.on_action(cx.listener(Self::reveal_in_finder)) - .on_action(cx.listener(Self::open_system)) - .on_action(cx.listener(Self::open_in_terminal)) - }) + .when( + project.is_local() || project.is_via_wsl_with_host_interop(cx), + |el| { + el.on_action(cx.listener(Self::reveal_in_finder)) + .on_action(cx.listener(Self::open_system)) + .on_action(cx.listener(Self::open_in_terminal)) + }, + ) .when(project.is_via_remote_server(), |el| { el.on_action(cx.listener(Self::open_in_terminal)) .on_action(cx.listener(Self::download_from_remote)) @@ -6587,6 +6692,24 @@ impl Render for ProjectPanel { .id("project-panel-blank-area") .block_mouse_except_scroll() .flex_grow() + .on_scroll_wheel({ + let scroll_handle = self.scroll_handle.clone(); + let entity_id = cx.entity().entity_id(); + move |event, window, cx| { + let state = scroll_handle.0.borrow(); + let base_handle = &state.base_handle; + let current_offset = base_handle.offset(); + let max_offset = base_handle.max_offset(); + let delta = event.delta.pixel_delta(window.line_height()); + let new_offset = (current_offset + delta) + .clamp(&max_offset.neg(), &Point::default()); + + if new_offset != current_offset { + base_handle.set_offset(new_offset); + cx.notify(entity_id); + } + } + }) .when( self.drag_target_entry.as_ref().is_some_and( |entry| match entry { @@ -6756,14 +6879,17 @@ impl Render for ProjectPanel { Button::new("open_project", "Open Project") .full_width() .key_binding(KeyBinding::for_action_in( - &workspace::Open, + &workspace::Open::default(), &focus_handle, cx, )) .on_click(cx.listener(|this, _, window, cx| { this.workspace .update(cx, |_, cx| { - window.dispatch_action(workspace::Open.boxed_clone(), cx); + window.dispatch_action( + workspace::Open::default().boxed_clone(), + cx, + ); }) .log_err(); })), diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index 6b6b7a377276a9fb8b812e495a07a6c4c7aac15e..0d703c55c06dfff2976fe59f6e030ad9eb1d758b 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -35,6 +35,7 @@ pub struct ProjectPanelSettings { pub drag_and_drop: bool, pub auto_open: AutoOpenSettings, pub sort_mode: ProjectPanelSortMode, + pub diagnostic_badges: bool, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] @@ -125,9 +126,8 @@ impl Settings for ProjectPanelSettings { on_drop: auto_open.on_drop.unwrap(), } }, - sort_mode: project_panel - .sort_mode - .unwrap_or(ProjectPanelSortMode::DirectoriesFirst), + sort_mode: project_panel.sort_mode.unwrap(), + diagnostic_badges: project_panel.diagnostic_badges.unwrap(), } } } diff --git a/crates/project_panel/src/project_panel_tests.rs b/crates/project_panel/src/project_panel_tests.rs index 01d165174784f4ab5360b99e16a514a4b8f669b4..af84a7f522a60abf2608bf1f3435b367d24f6bdc 100644 --- a/crates/project_panel/src/project_panel_tests.rs +++ b/crates/project_panel/src/project_panel_tests.rs @@ -4843,6 +4843,64 @@ async fn test_autoreveal_and_gitignored_files(cx: &mut gpui::TestAppContext) { ], "When a gitignored entry is explicitly revealed, it should be shown in the project tree" ); + + panel.update(cx, |panel, cx| { + panel.project.update(cx, |_, cx| { + cx.emit(project::Event::ActiveEntryChanged(Some(dir_2_file))) + }) + }); + cx.run_until_parked(); + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v project_root", + " > .git", + " v dir_1", + " v gitignored_dir", + " file_a.py", + " file_b.py", + " file_c.py", + " file_1.py", + " file_2.py", + " file_3.py", + " v dir_2", + " file_1.py <== selected <== marked", + " file_2.py", + " file_3.py", + " .gitignore", + ], + "After switching to dir_2_file, it should be selected and marked" + ); + + panel.update(cx, |panel, cx| { + panel.project.update(cx, |_, cx| { + cx.emit(project::Event::ActiveEntryChanged(Some( + gitignored_dir_file, + ))) + }) + }); + cx.run_until_parked(); + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v project_root", + " > .git", + " v dir_1", + " v gitignored_dir", + " file_a.py <== selected <== marked", + " file_b.py", + " file_c.py", + " file_1.py", + " file_2.py", + " file_3.py", + " v dir_2", + " file_1.py", + " file_2.py", + " file_3.py", + " .gitignore", + ], + "When a gitignored entry is already visible, auto reveal should mark it as selected" + ); } #[gpui::test] diff --git a/crates/proto/proto/ai.proto b/crates/proto/proto/ai.proto index b2a8a371c4422e80ad5edd677f2b75288f69ebd4..428d971c536f6e830e0c056372d311dc7ed7028f 100644 --- a/crates/proto/proto/ai.proto +++ b/crates/proto/proto/ai.proto @@ -5,245 +5,245 @@ import "buffer.proto"; import "task.proto"; message Context { - repeated ContextOperation operations = 1; + repeated ContextOperation operations = 1; } message ContextMetadata { - string context_id = 1; - optional string summary = 2; + string context_id = 1; + optional string summary = 2; } message ContextMessageStatus { - oneof variant { - Done done = 1; - Pending pending = 2; - Error error = 3; - Canceled canceled = 4; - } + oneof variant { + Done done = 1; + Pending pending = 2; + Error error = 3; + Canceled canceled = 4; + } - message Done {} + message Done {} - message Pending {} + message Pending {} - message Error { - string message = 1; - } + message Error { + string message = 1; + } - message Canceled {} + message Canceled {} } message ContextMessage { - LamportTimestamp id = 1; - Anchor start = 2; - LanguageModelRole role = 3; - ContextMessageStatus status = 4; + LamportTimestamp id = 1; + Anchor start = 2; + LanguageModelRole role = 3; + ContextMessageStatus status = 4; } message SlashCommandOutputSection { - AnchorRange range = 1; - string icon_name = 2; - string label = 3; - optional string metadata = 4; + AnchorRange range = 1; + string icon_name = 2; + string label = 3; + optional string metadata = 4; } message ThoughtProcessOutputSection { - AnchorRange range = 1; + AnchorRange range = 1; } message ContextOperation { - oneof variant { - InsertMessage insert_message = 1; - UpdateMessage update_message = 2; - UpdateSummary update_summary = 3; - BufferOperation buffer_operation = 5; - SlashCommandStarted slash_command_started = 6; - SlashCommandOutputSectionAdded slash_command_output_section_added = 7; - SlashCommandCompleted slash_command_completed = 8; - ThoughtProcessOutputSectionAdded thought_process_output_section_added = 9; - } - - reserved 4; - - message InsertMessage { - ContextMessage message = 1; - repeated VectorClockEntry version = 2; - } - - message UpdateMessage { - LamportTimestamp message_id = 1; - LanguageModelRole role = 2; - ContextMessageStatus status = 3; - LamportTimestamp timestamp = 4; - repeated VectorClockEntry version = 5; - } - - message UpdateSummary { - string summary = 1; - bool done = 2; - LamportTimestamp timestamp = 3; - repeated VectorClockEntry version = 4; - } - - message SlashCommandStarted { - LamportTimestamp id = 1; - AnchorRange output_range = 2; - string name = 3; - repeated VectorClockEntry version = 4; - } - - message SlashCommandOutputSectionAdded { - LamportTimestamp timestamp = 1; - SlashCommandOutputSection section = 2; - repeated VectorClockEntry version = 3; - } - - message SlashCommandCompleted { - LamportTimestamp id = 1; - LamportTimestamp timestamp = 3; - optional string error_message = 4; - repeated VectorClockEntry version = 5; - } - - message ThoughtProcessOutputSectionAdded { - LamportTimestamp timestamp = 1; - ThoughtProcessOutputSection section = 2; - repeated VectorClockEntry version = 3; - } - - message BufferOperation { - Operation operation = 1; - } + oneof variant { + InsertMessage insert_message = 1; + UpdateMessage update_message = 2; + UpdateSummary update_summary = 3; + BufferOperation buffer_operation = 5; + SlashCommandStarted slash_command_started = 6; + SlashCommandOutputSectionAdded slash_command_output_section_added = 7; + SlashCommandCompleted slash_command_completed = 8; + ThoughtProcessOutputSectionAdded thought_process_output_section_added = 9; + } + + reserved 4; + + message InsertMessage { + ContextMessage message = 1; + repeated VectorClockEntry version = 2; + } + + message UpdateMessage { + LamportTimestamp message_id = 1; + LanguageModelRole role = 2; + ContextMessageStatus status = 3; + LamportTimestamp timestamp = 4; + repeated VectorClockEntry version = 5; + } + + message UpdateSummary { + string summary = 1; + bool done = 2; + LamportTimestamp timestamp = 3; + repeated VectorClockEntry version = 4; + } + + message SlashCommandStarted { + LamportTimestamp id = 1; + AnchorRange output_range = 2; + string name = 3; + repeated VectorClockEntry version = 4; + } + + message SlashCommandOutputSectionAdded { + LamportTimestamp timestamp = 1; + SlashCommandOutputSection section = 2; + repeated VectorClockEntry version = 3; + } + + message SlashCommandCompleted { + LamportTimestamp id = 1; + LamportTimestamp timestamp = 3; + optional string error_message = 4; + repeated VectorClockEntry version = 5; + } + + message ThoughtProcessOutputSectionAdded { + LamportTimestamp timestamp = 1; + ThoughtProcessOutputSection section = 2; + repeated VectorClockEntry version = 3; + } + + message BufferOperation { + Operation operation = 1; + } } message AdvertiseContexts { - uint64 project_id = 1; - repeated ContextMetadata contexts = 2; + uint64 project_id = 1; + repeated ContextMetadata contexts = 2; } message OpenContext { - uint64 project_id = 1; - string context_id = 2; + uint64 project_id = 1; + string context_id = 2; } message OpenContextResponse { - Context context = 1; + Context context = 1; } message CreateContext { - uint64 project_id = 1; + uint64 project_id = 1; } message CreateContextResponse { - string context_id = 1; - Context context = 2; + string context_id = 1; + Context context = 2; } message UpdateContext { - uint64 project_id = 1; - string context_id = 2; - ContextOperation operation = 3; + uint64 project_id = 1; + string context_id = 2; + ContextOperation operation = 3; } message ContextVersion { - string context_id = 1; - repeated VectorClockEntry context_version = 2; - repeated VectorClockEntry buffer_version = 3; + string context_id = 1; + repeated VectorClockEntry context_version = 2; + repeated VectorClockEntry buffer_version = 3; } message SynchronizeContexts { - uint64 project_id = 1; - repeated ContextVersion contexts = 2; + uint64 project_id = 1; + repeated ContextVersion contexts = 2; } message SynchronizeContextsResponse { - repeated ContextVersion contexts = 1; + repeated ContextVersion contexts = 1; } enum LanguageModelRole { - LanguageModelUser = 0; - LanguageModelAssistant = 1; - LanguageModelSystem = 2; - reserved 3; + LanguageModelUser = 0; + LanguageModelAssistant = 1; + LanguageModelSystem = 2; + reserved 3; } message GetAgentServerCommand { - uint64 project_id = 1; - string name = 2; - optional string root_dir = 3; + uint64 project_id = 1; + string name = 2; + optional string root_dir = 3; } message GetContextServerCommand { - uint64 project_id = 1; - string server_id = 2; - optional string root_dir = 3; + uint64 project_id = 1; + string server_id = 2; + optional string root_dir = 3; } message ContextServerCommand { - string path = 1; - repeated string args = 2; - map env = 3; + string path = 1; + repeated string args = 2; + map env = 3; } message AgentServerCommand { - string path = 1; - repeated string args = 2; - map env = 3; - string root_dir = 4; + string path = 1; + repeated string args = 2; + map env = 3; + string root_dir = 4; - optional SpawnInTerminal login = 5; + optional SpawnInTerminal login = 5; } message ExternalAgentsUpdated { - uint64 project_id = 1; - repeated string names = 2; + uint64 project_id = 1; + repeated string names = 2; } message ExternalExtensionAgentTarget { - string archive = 1; - string cmd = 2; - repeated string args = 3; - optional string sha256 = 4; - map env = 5; + string archive = 1; + string cmd = 2; + repeated string args = 3; + optional string sha256 = 4; + map env = 5; } message ExternalExtensionAgent { - string name = 1; - optional string icon_path = 2; - string extension_id = 3; - map targets = 4; - map env = 5; + string name = 1; + optional string icon_path = 2; + string extension_id = 3; + map targets = 4; + map env = 5; } message ExternalExtensionAgentsUpdated { - uint64 project_id = 1; - repeated ExternalExtensionAgent agents = 2; + uint64 project_id = 1; + repeated ExternalExtensionAgent agents = 2; } message ExternalAgentLoadingStatusUpdated { - uint64 project_id = 1; - string name = 2; - string status = 3; + uint64 project_id = 1; + string name = 2; + string status = 3; } message NewExternalAgentVersionAvailable { - uint64 project_id = 1; - string name = 2; - string version = 3; + uint64 project_id = 1; + string name = 2; + string version = 3; } message ShareAgentThread { - string session_id = 1; // Client-generated UUID (acp::SessionId) - string title = 2; - bytes thread_data = 3; + string session_id = 1; // Client-generated UUID (acp::SessionId) + string title = 2; + bytes thread_data = 3; } message GetSharedAgentThread { - string session_id = 1; // UUID string + string session_id = 1; // UUID string } message GetSharedAgentThreadResponse { - string title = 1; - bytes thread_data = 2; - string sharer_username = 3; - string created_at = 4; + string title = 1; + bytes thread_data = 2; + string sharer_username = 3; + string created_at = 4; } diff --git a/crates/proto/proto/app.proto b/crates/proto/proto/app.proto index 3aa3b23a889228903e14755e90eecfa168702f0c..2ced6a16d4441c11c124b73115a41a9e7008843a 100644 --- a/crates/proto/proto/app.proto +++ b/crates/proto/proto/app.proto @@ -4,60 +4,59 @@ package zed.messages; message ShutdownRemoteServer {} message Toast { - uint64 project_id = 1; - string notification_id = 2; - string message = 3; + uint64 project_id = 1; + string notification_id = 2; + string message = 3; } message HideToast { - uint64 project_id = 1; - string notification_id = 2; + uint64 project_id = 1; + string notification_id = 2; } message OpenServerSettings { - uint64 project_id = 1; + uint64 project_id = 1; } -message GetCrashFiles { -} +message GetCrashFiles {} message GetCrashFilesResponse { - repeated CrashReport crashes = 1; - reserved 2; // old panics + repeated CrashReport crashes = 1; + reserved 2; // old panics } message CrashReport { - reserved 1, 2; - string metadata = 3; - bytes minidump_contents = 4; + reserved 1, 2; + string metadata = 3; + bytes minidump_contents = 4; } message Extension { - string id = 1; - string version = 2; - bool dev = 3; + string id = 1; + string version = 2; + bool dev = 3; } message SyncExtensions { - repeated Extension extensions = 1; + repeated Extension extensions = 1; } message SyncExtensionsResponse { - string tmp_dir = 1; - repeated Extension missing_extensions = 2; + string tmp_dir = 1; + repeated Extension missing_extensions = 2; } message InstallExtension { - Extension extension = 1; - string tmp_dir = 2; + Extension extension = 1; + string tmp_dir = 2; } message AskPassRequest { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - uint64 askpass_id = 4; - string prompt = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + uint64 askpass_id = 4; + string prompt = 5; } message AskPassResponse { @@ -65,29 +64,29 @@ message AskPassResponse { } message GetRemoteProfilingData { - uint64 project_id = 1; - bool foreground_only = 2; + uint64 project_id = 1; + bool foreground_only = 2; } message GetRemoteProfilingDataResponse { - repeated RemoteProfilingThread threads = 1; - uint64 now_nanos = 2; + repeated RemoteProfilingThread threads = 1; + uint64 now_nanos = 2; } message RemoteProfilingThread { - optional string thread_name = 1; - uint64 thread_id = 2; - repeated RemoteProfilingTiming timings = 3; + optional string thread_name = 1; + uint64 thread_id = 2; + repeated RemoteProfilingTiming timings = 3; } message RemoteProfilingTiming { - RemoteProfilingLocation location = 1; - uint64 start_nanos = 2; - uint64 duration_nanos = 3; + RemoteProfilingLocation location = 1; + uint64 start_nanos = 2; + uint64 duration_nanos = 3; } message RemoteProfilingLocation { - string file = 1; - uint32 line = 2; - uint32 column = 3; + string file = 1; + uint32 line = 2; + uint32 column = 3; } diff --git a/crates/proto/proto/buf.yaml b/crates/proto/proto/buf.yaml index 93e819b2f771c2f2e3c032e6c50c0d126758ac19..37436d8d80f9435729d54da4326000be05b085f7 100644 --- a/crates/proto/proto/buf.yaml +++ b/crates/proto/proto/buf.yaml @@ -2,3 +2,13 @@ version: v1 breaking: use: - WIRE +lint: + except: + # Since we use post_build instead of buf this doesn't matter + - PACKAGE_DIRECTORY_MATCH + # This is internal to Zed only so we don't enforce versions + - PACKAGE_VERSION_SUFFIX + # Style rules we don't enforce + - ENUM_VALUE_PREFIX + - ENUM_VALUE_UPPER_SNAKE_CASE + - ENUM_ZERO_VALUE_SUFFIX diff --git a/crates/proto/proto/buffer.proto b/crates/proto/proto/buffer.proto index 4cd83af2aab8a44feb9f9646ec85d343b8875f82..01f4bda9e9f450ed65d4f6cb8dc9abc7c35451dd 100644 --- a/crates/proto/proto/buffer.proto +++ b/crates/proto/proto/buffer.proto @@ -5,313 +5,312 @@ import "core.proto"; import "worktree.proto"; message OpenNewBuffer { - uint64 project_id = 1; + uint64 project_id = 1; } message OpenBufferResponse { - uint64 buffer_id = 1; + uint64 buffer_id = 1; } message CreateBufferForPeer { - uint64 project_id = 1; - PeerId peer_id = 2; - oneof variant { - BufferState state = 3; - BufferChunk chunk = 4; - } + uint64 project_id = 1; + PeerId peer_id = 2; + oneof variant { + BufferState state = 3; + BufferChunk chunk = 4; + } } message UpdateBuffer { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated Operation operations = 3; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated Operation operations = 3; } message OpenBufferByPath { - uint64 project_id = 1; - uint64 worktree_id = 2; - string path = 3; + uint64 project_id = 1; + uint64 worktree_id = 2; + string path = 3; } message OpenBufferById { - uint64 project_id = 1; - uint64 id = 2; + uint64 project_id = 1; + uint64 id = 2; } message UpdateBufferFile { - uint64 project_id = 1; - uint64 buffer_id = 2; - File file = 3; + uint64 project_id = 1; + uint64 buffer_id = 2; + File file = 3; } message SaveBuffer { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; - optional ProjectPath new_path = 4; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated VectorClockEntry version = 3; + optional ProjectPath new_path = 4; } message CloseBuffer { - uint64 project_id = 1; - uint64 buffer_id = 2; + uint64 project_id = 1; + uint64 buffer_id = 2; } message BufferSaved { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; - Timestamp mtime = 4; - reserved 5; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated VectorClockEntry version = 3; + Timestamp mtime = 4; + reserved 5; } message BufferReloaded { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; - Timestamp mtime = 4; - reserved 5; - LineEnding line_ending = 6; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated VectorClockEntry version = 3; + Timestamp mtime = 4; + reserved 5; + LineEnding line_ending = 6; } message ReloadBuffers { - uint64 project_id = 1; - repeated uint64 buffer_ids = 2; + uint64 project_id = 1; + repeated uint64 buffer_ids = 2; } message ReloadBuffersResponse { - ProjectTransaction transaction = 1; + ProjectTransaction transaction = 1; } message SynchronizeBuffers { - uint64 project_id = 1; - repeated BufferVersion buffers = 2; + uint64 project_id = 1; + repeated BufferVersion buffers = 2; } message SynchronizeBuffersResponse { - repeated BufferVersion buffers = 1; + repeated BufferVersion buffers = 1; } message BufferVersion { - uint64 id = 1; - repeated VectorClockEntry version = 2; + uint64 id = 1; + repeated VectorClockEntry version = 2; } message BufferState { - uint64 id = 1; - optional File file = 2; - string base_text = 3; - LineEnding line_ending = 5; - repeated VectorClockEntry saved_version = 6; - Timestamp saved_mtime = 8; + uint64 id = 1; + optional File file = 2; + string base_text = 3; + LineEnding line_ending = 5; + repeated VectorClockEntry saved_version = 6; + Timestamp saved_mtime = 8; - reserved 7; - reserved 4; + reserved 7; + reserved 4; } message BufferChunk { - uint64 buffer_id = 1; - repeated Operation operations = 2; - bool is_last = 3; + uint64 buffer_id = 1; + repeated Operation operations = 2; + bool is_last = 3; } enum LineEnding { - Unix = 0; - Windows = 1; + Unix = 0; + Windows = 1; } message VectorClockEntry { - uint32 replica_id = 1; - uint32 timestamp = 2; + uint32 replica_id = 1; + uint32 timestamp = 2; } message UndoMapEntry { - uint32 replica_id = 1; - uint32 local_timestamp = 2; - repeated UndoCount counts = 3; + uint32 replica_id = 1; + uint32 local_timestamp = 2; + repeated UndoCount counts = 3; } message UndoCount { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - uint32 count = 3; + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + uint32 count = 3; } message Operation { - oneof variant { - Edit edit = 1; - Undo undo = 2; - UpdateSelections update_selections = 3; - UpdateDiagnostics update_diagnostics = 4; - UpdateCompletionTriggers update_completion_triggers = 5; - UpdateLineEnding update_line_ending = 6; - } - - message Edit { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - repeated VectorClockEntry version = 3; - repeated Range ranges = 4; - repeated string new_text = 5; - } - - message Undo { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - repeated VectorClockEntry version = 3; - repeated UndoCount counts = 4; - } - - message UpdateSelections { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - repeated Selection selections = 3; - bool line_mode = 4; - CursorShape cursor_shape = 5; - } - - message UpdateCompletionTriggers { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - repeated string triggers = 3; - uint64 language_server_id = 4; - } - - message UpdateLineEnding { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - LineEnding line_ending = 3; - } + oneof variant { + Edit edit = 1; + Undo undo = 2; + UpdateSelections update_selections = 3; + UpdateDiagnostics update_diagnostics = 4; + UpdateCompletionTriggers update_completion_triggers = 5; + UpdateLineEnding update_line_ending = 6; + } + + message Edit { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated VectorClockEntry version = 3; + repeated Range ranges = 4; + repeated string new_text = 5; + } + + message Undo { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated VectorClockEntry version = 3; + repeated UndoCount counts = 4; + } + + message UpdateSelections { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated Selection selections = 3; + bool line_mode = 4; + CursorShape cursor_shape = 5; + } + + message UpdateCompletionTriggers { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated string triggers = 3; + uint64 language_server_id = 4; + } + + message UpdateLineEnding { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + LineEnding line_ending = 3; + } } message ProjectTransaction { - repeated uint64 buffer_ids = 1; - repeated Transaction transactions = 2; + repeated uint64 buffer_ids = 1; + repeated Transaction transactions = 2; } message Transaction { - LamportTimestamp id = 1; - repeated LamportTimestamp edit_ids = 2; - repeated VectorClockEntry start = 3; + LamportTimestamp id = 1; + repeated LamportTimestamp edit_ids = 2; + repeated VectorClockEntry start = 3; } message LamportTimestamp { - uint32 replica_id = 1; - uint32 value = 2; + uint32 replica_id = 1; + uint32 value = 2; } message Range { - uint64 start = 1; - uint64 end = 2; + uint64 start = 1; + uint64 end = 2; } message Selection { - uint64 id = 1; - EditorAnchor start = 2; - EditorAnchor end = 3; - bool reversed = 4; + uint64 id = 1; + EditorAnchor start = 2; + EditorAnchor end = 3; + bool reversed = 4; } message EditorAnchor { - uint64 excerpt_id = 1; - Anchor anchor = 2; + uint64 excerpt_id = 1; + Anchor anchor = 2; } enum CursorShape { - CursorBar = 0; - CursorBlock = 1; - CursorUnderscore = 2; - CursorHollow = 3; + CursorBar = 0; + CursorBlock = 1; + CursorUnderscore = 2; + CursorHollow = 3; } message UpdateDiagnostics { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - uint64 server_id = 3; - repeated Diagnostic diagnostics = 4; + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + uint64 server_id = 3; + repeated Diagnostic diagnostics = 4; } message Anchor { - uint32 replica_id = 1; - uint32 timestamp = 2; - uint64 offset = 3; - Bias bias = 4; - optional uint64 buffer_id = 5; + uint32 replica_id = 1; + uint32 timestamp = 2; + uint64 offset = 3; + Bias bias = 4; + optional uint64 buffer_id = 5; } message AnchorRange { - Anchor start = 1; - Anchor end = 2; + Anchor start = 1; + Anchor end = 2; } message Location { - uint64 buffer_id = 1; - Anchor start = 2; - Anchor end = 3; + uint64 buffer_id = 1; + Anchor start = 2; + Anchor end = 3; } enum Bias { - Left = 0; - Right = 1; + Left = 0; + Right = 1; } message Diagnostic { - Anchor start = 1; - Anchor end = 2; - optional string source = 3; - optional string registration_id = 17; - - enum SourceKind { - Pulled = 0; - Pushed = 1; - Other = 2; - } - - SourceKind source_kind = 16; - Severity severity = 4; - string message = 5; - optional string code = 6; - uint64 group_id = 7; - bool is_primary = 8; - - reserved 9; - - bool is_disk_based = 10; - bool is_unnecessary = 11; - bool underline = 15; - - enum Severity { - None = 0; - Error = 1; - Warning = 2; - Information = 3; - Hint = 4; - } - optional string data = 12; - optional string code_description = 13; - optional string markdown = 14; + Anchor start = 1; + Anchor end = 2; + optional string source = 3; + optional string registration_id = 17; + + enum SourceKind { + Pulled = 0; + Pushed = 1; + Other = 2; + } + + SourceKind source_kind = 16; + Severity severity = 4; + string message = 5; + optional string code = 6; + uint64 group_id = 7; + bool is_primary = 8; + + reserved 9; + + bool is_disk_based = 10; + bool is_unnecessary = 11; + bool underline = 15; + + enum Severity { + None = 0; + Error = 1; + Warning = 2; + Information = 3; + Hint = 4; + } + optional string data = 12; + optional string code_description = 13; + optional string markdown = 14; } message SearchQuery { - string query = 2; - bool regex = 3; - bool whole_word = 4; - bool case_sensitive = 5; - repeated string files_to_include = 10; - repeated string files_to_exclude = 11; - bool match_full_paths = 9; - bool include_ignored = 8; - string files_to_include_legacy = 6; - string files_to_exclude_legacy = 7; + string query = 2; + bool regex = 3; + bool whole_word = 4; + bool case_sensitive = 5; + repeated string files_to_include = 10; + repeated string files_to_exclude = 11; + bool match_full_paths = 9; + bool include_ignored = 8; + string files_to_include_legacy = 6; + string files_to_exclude_legacy = 7; } message FindSearchCandidates { - uint64 project_id = 1; - SearchQuery query = 2; - uint64 limit = 3; - uint64 handle = 4; + uint64 project_id = 1; + SearchQuery query = 2; + uint64 limit = 3; + uint64 handle = 4; } - message FindSearchCandidatesDone {} message FindSearchCandidatesMatches { @@ -330,6 +329,6 @@ message FindSearchCandidatesChunk { } message FindSearchCandidatesCancelled { - uint64 project_id = 1; - uint64 handle = 2; + uint64 project_id = 1; + uint64 handle = 2; } diff --git a/crates/proto/proto/call.proto b/crates/proto/proto/call.proto index a7fe607bb5aaaff53518652186f46bcb6529e661..31448a8819d13f50088aa7eafcd6af8b6d52bc17 100644 --- a/crates/proto/proto/call.proto +++ b/crates/proto/proto/call.proto @@ -1,424 +1,424 @@ syntax = "proto3"; package zed.messages; -import "core.proto"; -import "worktree.proto"; import "buffer.proto"; -import "lsp.proto"; import "channel.proto"; +import "core.proto"; import "git.proto"; +import "lsp.proto"; +import "worktree.proto"; message CreateRoom {} message CreateRoomResponse { - Room room = 1; - optional LiveKitConnectionInfo live_kit_connection_info = 2; + Room room = 1; + optional LiveKitConnectionInfo live_kit_connection_info = 2; } message JoinRoom { - uint64 id = 1; + uint64 id = 1; } message JoinRoomResponse { - Room room = 1; - optional uint64 channel_id = 2; - optional LiveKitConnectionInfo live_kit_connection_info = 3; + Room room = 1; + optional uint64 channel_id = 2; + optional LiveKitConnectionInfo live_kit_connection_info = 3; } message RejoinRoom { - uint64 id = 1; - repeated UpdateProject reshared_projects = 2; - repeated RejoinProject rejoined_projects = 3; + uint64 id = 1; + repeated UpdateProject reshared_projects = 2; + repeated RejoinProject rejoined_projects = 3; } message RejoinRemoteProjects { - repeated RejoinProject rejoined_projects = 1; + repeated RejoinProject rejoined_projects = 1; } message RejoinRemoteProjectsResponse { - repeated RejoinedProject rejoined_projects = 1; + repeated RejoinedProject rejoined_projects = 1; } message RejoinProject { - uint64 id = 1; - repeated RejoinWorktree worktrees = 2; - repeated RejoinRepository repositories = 3; + uint64 id = 1; + repeated RejoinWorktree worktrees = 2; + repeated RejoinRepository repositories = 3; } message RejoinWorktree { - uint64 id = 1; - uint64 scan_id = 2; + uint64 id = 1; + uint64 scan_id = 2; } message RejoinRepository { - uint64 id = 1; - uint64 scan_id = 2; + uint64 id = 1; + uint64 scan_id = 2; } message RejoinRoomResponse { - Room room = 1; - repeated ResharedProject reshared_projects = 2; - repeated RejoinedProject rejoined_projects = 3; + Room room = 1; + repeated ResharedProject reshared_projects = 2; + repeated RejoinedProject rejoined_projects = 3; } message ResharedProject { - uint64 id = 1; - repeated Collaborator collaborators = 2; + uint64 id = 1; + repeated Collaborator collaborators = 2; } message RejoinedProject { - uint64 id = 1; - repeated WorktreeMetadata worktrees = 2; - repeated Collaborator collaborators = 3; - repeated LanguageServer language_servers = 4; - repeated string language_server_capabilities = 5; + uint64 id = 1; + repeated WorktreeMetadata worktrees = 2; + repeated Collaborator collaborators = 3; + repeated LanguageServer language_servers = 4; + repeated string language_server_capabilities = 5; } message LeaveRoom {} message Room { - uint64 id = 1; - repeated Participant participants = 2; - repeated PendingParticipant pending_participants = 3; - repeated Follower followers = 4; - string livekit_room = 5; + uint64 id = 1; + repeated Participant participants = 2; + repeated PendingParticipant pending_participants = 3; + repeated Follower followers = 4; + string livekit_room = 5; } message Participant { - uint64 user_id = 1; - PeerId peer_id = 2; - repeated ParticipantProject projects = 3; - ParticipantLocation location = 4; - uint32 participant_index = 5; - ChannelRole role = 6; - reserved 7; + uint64 user_id = 1; + PeerId peer_id = 2; + repeated ParticipantProject projects = 3; + ParticipantLocation location = 4; + uint32 participant_index = 5; + ChannelRole role = 6; + reserved 7; } message PendingParticipant { - uint64 user_id = 1; - uint64 calling_user_id = 2; - optional uint64 initial_project_id = 3; + uint64 user_id = 1; + uint64 calling_user_id = 2; + optional uint64 initial_project_id = 3; } message ParticipantProject { - uint64 id = 1; - repeated string worktree_root_names = 2; + uint64 id = 1; + repeated string worktree_root_names = 2; } message Follower { - PeerId leader_id = 1; - PeerId follower_id = 2; - uint64 project_id = 3; + PeerId leader_id = 1; + PeerId follower_id = 2; + uint64 project_id = 3; } message ParticipantLocation { - oneof variant { - SharedProject shared_project = 1; - UnsharedProject unshared_project = 2; - External external = 3; - } + oneof variant { + SharedProject shared_project = 1; + UnsharedProject unshared_project = 2; + External external = 3; + } - message SharedProject { - uint64 id = 1; - } + message SharedProject { + uint64 id = 1; + } - message UnsharedProject {} + message UnsharedProject {} - message External {} + message External {} } message Call { - uint64 room_id = 1; - uint64 called_user_id = 2; - optional uint64 initial_project_id = 3; + uint64 room_id = 1; + uint64 called_user_id = 2; + optional uint64 initial_project_id = 3; } message IncomingCall { - uint64 room_id = 1; - uint64 calling_user_id = 2; - repeated uint64 participant_user_ids = 3; - optional ParticipantProject initial_project = 4; + uint64 room_id = 1; + uint64 calling_user_id = 2; + repeated uint64 participant_user_ids = 3; + optional ParticipantProject initial_project = 4; } message CallCanceled { - uint64 room_id = 1; + uint64 room_id = 1; } message CancelCall { - uint64 room_id = 1; - uint64 called_user_id = 2; + uint64 room_id = 1; + uint64 called_user_id = 2; } message DeclineCall { - uint64 room_id = 1; + uint64 room_id = 1; } message UpdateParticipantLocation { - uint64 room_id = 1; - ParticipantLocation location = 2; + uint64 room_id = 1; + ParticipantLocation location = 2; } message RoomUpdated { - Room room = 1; + Room room = 1; } message LiveKitConnectionInfo { - string server_url = 1; - string token = 2; - bool can_publish = 3; + string server_url = 1; + string token = 2; + bool can_publish = 3; } message ShareProject { - uint64 room_id = 1; - repeated WorktreeMetadata worktrees = 2; - reserved 3; - bool is_ssh_project = 4; - optional bool windows_paths = 5; + uint64 room_id = 1; + repeated WorktreeMetadata worktrees = 2; + reserved 3; + bool is_ssh_project = 4; + optional bool windows_paths = 5; } message ShareProjectResponse { - uint64 project_id = 1; + uint64 project_id = 1; } message UnshareProject { - uint64 project_id = 1; + uint64 project_id = 1; } message UpdateProject { - uint64 project_id = 1; - repeated WorktreeMetadata worktrees = 2; + uint64 project_id = 1; + repeated WorktreeMetadata worktrees = 2; } message JoinProject { - uint64 project_id = 1; - optional string committer_email = 2; - optional string committer_name = 3; + uint64 project_id = 1; + optional string committer_email = 2; + optional string committer_name = 3; } message JoinProjectResponse { - uint64 project_id = 5; - uint32 replica_id = 1; - repeated WorktreeMetadata worktrees = 2; - repeated Collaborator collaborators = 3; - repeated LanguageServer language_servers = 4; - repeated string language_server_capabilities = 8; - ChannelRole role = 6; - bool windows_paths = 9; - reserved 7; + uint64 project_id = 5; + uint32 replica_id = 1; + repeated WorktreeMetadata worktrees = 2; + repeated Collaborator collaborators = 3; + repeated LanguageServer language_servers = 4; + repeated string language_server_capabilities = 8; + ChannelRole role = 6; + bool windows_paths = 9; + reserved 7; } message LeaveProject { - uint64 project_id = 1; + uint64 project_id = 1; } message UpdateWorktree { - uint64 project_id = 1; - uint64 worktree_id = 2; - string root_name = 3; - repeated Entry updated_entries = 4; - repeated uint64 removed_entries = 5; - repeated RepositoryEntry updated_repositories = 6; // deprecated - repeated uint64 removed_repositories = 7; // deprecated - uint64 scan_id = 8; - bool is_last_update = 9; - string abs_path = 10; + uint64 project_id = 1; + uint64 worktree_id = 2; + string root_name = 3; + repeated Entry updated_entries = 4; + repeated uint64 removed_entries = 5; + repeated RepositoryEntry updated_repositories = 6; // deprecated + repeated uint64 removed_repositories = 7; // deprecated + uint64 scan_id = 8; + bool is_last_update = 9; + string abs_path = 10; } // deprecated message RepositoryEntry { - uint64 repository_id = 1; - reserved 2; - repeated StatusEntry updated_statuses = 3; - repeated string removed_statuses = 4; - repeated string current_merge_conflicts = 5; - optional Branch branch_summary = 6; + uint64 repository_id = 1; + reserved 2; + repeated StatusEntry updated_statuses = 3; + repeated string removed_statuses = 4; + repeated string current_merge_conflicts = 5; + optional Branch branch_summary = 6; } message AddProjectCollaborator { - uint64 project_id = 1; - Collaborator collaborator = 2; + uint64 project_id = 1; + Collaborator collaborator = 2; } message UpdateProjectCollaborator { - uint64 project_id = 1; - PeerId old_peer_id = 2; - PeerId new_peer_id = 3; + uint64 project_id = 1; + PeerId old_peer_id = 2; + PeerId new_peer_id = 3; } message RemoveProjectCollaborator { - uint64 project_id = 1; - PeerId peer_id = 2; + uint64 project_id = 1; + PeerId peer_id = 2; } message GetUsers { - repeated uint64 user_ids = 1; + repeated uint64 user_ids = 1; } message FuzzySearchUsers { - string query = 1; + string query = 1; } message UsersResponse { - repeated User users = 1; + repeated User users = 1; } message RequestContact { - uint64 responder_id = 1; + uint64 responder_id = 1; } message RemoveContact { - uint64 user_id = 1; + uint64 user_id = 1; } message RespondToContactRequest { - uint64 requester_id = 1; - ContactRequestResponse response = 2; + uint64 requester_id = 1; + ContactRequestResponse response = 2; } enum ContactRequestResponse { - Accept = 0; - Decline = 1; - Block = 2; - Dismiss = 3; + Accept = 0; + Decline = 1; + Block = 2; + Dismiss = 3; } message UpdateContacts { - repeated Contact contacts = 1; - repeated uint64 remove_contacts = 2; - repeated IncomingContactRequest incoming_requests = 3; - repeated uint64 remove_incoming_requests = 4; - repeated uint64 outgoing_requests = 5; - repeated uint64 remove_outgoing_requests = 6; + repeated Contact contacts = 1; + repeated uint64 remove_contacts = 2; + repeated IncomingContactRequest incoming_requests = 3; + repeated uint64 remove_incoming_requests = 4; + repeated uint64 outgoing_requests = 5; + repeated uint64 remove_outgoing_requests = 6; } message ShowContacts {} message IncomingContactRequest { - uint64 requester_id = 1; + uint64 requester_id = 1; } message Follow { - uint64 room_id = 1; - optional uint64 project_id = 2; - PeerId leader_id = 3; + uint64 room_id = 1; + optional uint64 project_id = 2; + PeerId leader_id = 3; } message FollowResponse { - View active_view = 3; - reserved 1; - repeated View views = 2; + View active_view = 3; + reserved 1; + repeated View views = 2; } message UpdateFollowers { - uint64 room_id = 1; - optional uint64 project_id = 2; - reserved 3; - oneof variant { - View create_view = 5; - UpdateActiveView update_active_view = 4; - UpdateView update_view = 6; - } + uint64 room_id = 1; + optional uint64 project_id = 2; + reserved 3; + oneof variant { + View create_view = 5; + UpdateActiveView update_active_view = 4; + UpdateView update_view = 6; + } } message Unfollow { - uint64 room_id = 1; - optional uint64 project_id = 2; - PeerId leader_id = 3; + uint64 room_id = 1; + optional uint64 project_id = 2; + PeerId leader_id = 3; } message ViewId { - PeerId creator = 1; - uint64 id = 2; + PeerId creator = 1; + uint64 id = 2; } message UpdateActiveView { - reserved 1, 2; - View view = 3; + reserved 1, 2; + View view = 3; } enum PanelId { - AssistantPanel = 0; - DebugPanel = 1; + AssistantPanel = 0; + DebugPanel = 1; } message UpdateView { - ViewId id = 1; - optional PeerId leader_id = 2; - - oneof variant { - Editor editor = 3; - } - - message Editor { - repeated ExcerptInsertion inserted_excerpts = 1; - repeated uint64 deleted_excerpts = 2; - repeated Selection selections = 3; - optional Selection pending_selection = 4; - EditorAnchor scroll_top_anchor = 5; - reserved 6; - reserved 7; - double scroll_x = 8; - double scroll_y = 9; - } + ViewId id = 1; + optional PeerId leader_id = 2; + + oneof variant { + Editor editor = 3; + } + + message Editor { + repeated ExcerptInsertion inserted_excerpts = 1; + repeated uint64 deleted_excerpts = 2; + repeated Selection selections = 3; + optional Selection pending_selection = 4; + EditorAnchor scroll_top_anchor = 5; + reserved 6; + reserved 7; + double scroll_x = 8; + double scroll_y = 9; + } } message View { - ViewId id = 1; - optional PeerId leader_id = 2; - optional PanelId panel_id = 6; - - oneof variant { - Editor editor = 3; - ChannelView channel_view = 4; - ContextEditor context_editor = 5; - } - - message Editor { - bool singleton = 1; - optional string title = 2; - repeated Excerpt excerpts = 3; - repeated Selection selections = 4; - optional Selection pending_selection = 5; - EditorAnchor scroll_top_anchor = 6; - reserved 7; - reserved 8; - double scroll_x = 9; - double scroll_y = 10; - } - - message ChannelView { - uint64 channel_id = 1; - Editor editor = 2; - } - - message ContextEditor { - string context_id = 1; - Editor editor = 2; - } + ViewId id = 1; + optional PeerId leader_id = 2; + optional PanelId panel_id = 6; + + oneof variant { + Editor editor = 3; + ChannelView channel_view = 4; + ContextEditor context_editor = 5; + } + + message Editor { + bool singleton = 1; + optional string title = 2; + repeated Excerpt excerpts = 3; + repeated Selection selections = 4; + optional Selection pending_selection = 5; + EditorAnchor scroll_top_anchor = 6; + reserved 7; + reserved 8; + double scroll_x = 9; + double scroll_y = 10; + } + + message ChannelView { + uint64 channel_id = 1; + Editor editor = 2; + } + + message ContextEditor { + string context_id = 1; + Editor editor = 2; + } } message ExcerptInsertion { - Excerpt excerpt = 1; - optional uint64 previous_excerpt_id = 2; + Excerpt excerpt = 1; + optional uint64 previous_excerpt_id = 2; } message Excerpt { - uint64 id = 1; - uint64 buffer_id = 2; - Anchor context_start = 3; - Anchor context_end = 4; - Anchor primary_start = 5; - Anchor primary_end = 6; + uint64 id = 1; + uint64 buffer_id = 2; + Anchor context_start = 3; + Anchor context_end = 4; + Anchor primary_start = 5; + Anchor primary_end = 6; } message Contact { - uint64 user_id = 1; - bool online = 2; - bool busy = 3; + uint64 user_id = 1; + bool online = 2; + bool busy = 3; } message SetRoomParticipantRole { - uint64 room_id = 1; - uint64 user_id = 2; - ChannelRole role = 3; + uint64 room_id = 1; + uint64 user_id = 2; + ChannelRole role = 3; } diff --git a/crates/proto/proto/channel.proto b/crates/proto/proto/channel.proto index cada21cd5b7ede4730f2f4e71e98fb9a3dc12ff0..f1238b20a37815c9c6db999b8031a8eff2ba6cea 100644 --- a/crates/proto/proto/channel.proto +++ b/crates/proto/proto/channel.proto @@ -1,294 +1,294 @@ syntax = "proto3"; package zed.messages; -import "core.proto"; import "buffer.proto"; +import "core.proto"; message Channel { - uint64 id = 1; - string name = 2; - ChannelVisibility visibility = 3; - int32 channel_order = 4; - repeated uint64 parent_path = 5; + uint64 id = 1; + string name = 2; + ChannelVisibility visibility = 3; + int32 channel_order = 4; + repeated uint64 parent_path = 5; } enum ChannelVisibility { - Public = 0; - Members = 1; + Public = 0; + Members = 1; } message UpdateChannels { - repeated Channel channels = 1; - repeated uint64 delete_channels = 4; - repeated Channel channel_invitations = 5; - repeated uint64 remove_channel_invitations = 6; - repeated ChannelParticipants channel_participants = 7; - repeated ChannelBufferVersion latest_channel_buffer_versions = 9; + repeated Channel channels = 1; + repeated uint64 delete_channels = 4; + repeated Channel channel_invitations = 5; + repeated uint64 remove_channel_invitations = 6; + repeated ChannelParticipants channel_participants = 7; + repeated ChannelBufferVersion latest_channel_buffer_versions = 9; - reserved 8; - reserved 10 to 15; + reserved 8; + reserved 10 to 15; } message UpdateUserChannels { - repeated ChannelBufferVersion observed_channel_buffer_version = 2; - repeated ChannelMembership channel_memberships = 3; + repeated ChannelBufferVersion observed_channel_buffer_version = 2; + repeated ChannelMembership channel_memberships = 3; - reserved 1; + reserved 1; } message ChannelMembership { - uint64 channel_id = 1; - ChannelRole role = 2; + uint64 channel_id = 1; + ChannelRole role = 2; } message ChannelMessageId { - uint64 channel_id = 1; - uint64 message_id = 2; + uint64 channel_id = 1; + uint64 message_id = 2; } message ChannelPermission { - uint64 channel_id = 1; - ChannelRole role = 3; + uint64 channel_id = 1; + ChannelRole role = 3; } message ChannelParticipants { - uint64 channel_id = 1; - repeated uint64 participant_user_ids = 2; + uint64 channel_id = 1; + repeated uint64 participant_user_ids = 2; } message JoinChannel { - uint64 channel_id = 1; + uint64 channel_id = 1; } message DeleteChannel { - uint64 channel_id = 1; + uint64 channel_id = 1; } message GetChannelMembers { - uint64 channel_id = 1; - string query = 2; - uint64 limit = 3; + uint64 channel_id = 1; + string query = 2; + uint64 limit = 3; } message GetChannelMembersResponse { - repeated ChannelMember members = 1; - repeated User users = 2; + repeated ChannelMember members = 1; + repeated User users = 2; } message ChannelMember { - uint64 user_id = 1; - Kind kind = 3; - ChannelRole role = 4; + uint64 user_id = 1; + Kind kind = 3; + ChannelRole role = 4; - enum Kind { - Member = 0; - Invitee = 1; - } + enum Kind { + Member = 0; + Invitee = 1; + } } message SubscribeToChannels {} message CreateChannel { - string name = 1; - optional uint64 parent_id = 2; + string name = 1; + optional uint64 parent_id = 2; } message CreateChannelResponse { - Channel channel = 1; - optional uint64 parent_id = 2; + Channel channel = 1; + optional uint64 parent_id = 2; } message InviteChannelMember { - uint64 channel_id = 1; - uint64 user_id = 2; - ChannelRole role = 4; + uint64 channel_id = 1; + uint64 user_id = 2; + ChannelRole role = 4; } message RemoveChannelMember { - uint64 channel_id = 1; - uint64 user_id = 2; + uint64 channel_id = 1; + uint64 user_id = 2; } enum ChannelRole { - Admin = 0; - Member = 1; - Guest = 2; - Banned = 3; - Talker = 4; + Admin = 0; + Member = 1; + Guest = 2; + Banned = 3; + Talker = 4; } message SetChannelMemberRole { - uint64 channel_id = 1; - uint64 user_id = 2; - ChannelRole role = 3; + uint64 channel_id = 1; + uint64 user_id = 2; + ChannelRole role = 3; } message SetChannelVisibility { - uint64 channel_id = 1; - ChannelVisibility visibility = 2; + uint64 channel_id = 1; + ChannelVisibility visibility = 2; } message RenameChannel { - uint64 channel_id = 1; - string name = 2; + uint64 channel_id = 1; + string name = 2; } message RenameChannelResponse { - Channel channel = 1; + Channel channel = 1; } message JoinChannelChat { - uint64 channel_id = 1; + uint64 channel_id = 1; } message JoinChannelChatResponse { - repeated ChannelMessage messages = 1; - bool done = 2; + repeated ChannelMessage messages = 1; + bool done = 2; } message LeaveChannelChat { - uint64 channel_id = 1; + uint64 channel_id = 1; } message SendChannelMessage { - uint64 channel_id = 1; - string body = 2; - Nonce nonce = 3; - repeated ChatMention mentions = 4; - optional uint64 reply_to_message_id = 5; + uint64 channel_id = 1; + string body = 2; + Nonce nonce = 3; + repeated ChatMention mentions = 4; + optional uint64 reply_to_message_id = 5; } message RemoveChannelMessage { - uint64 channel_id = 1; - uint64 message_id = 2; + uint64 channel_id = 1; + uint64 message_id = 2; } message UpdateChannelMessage { - uint64 channel_id = 1; - uint64 message_id = 2; - Nonce nonce = 4; - string body = 5; - repeated ChatMention mentions = 6; + uint64 channel_id = 1; + uint64 message_id = 2; + Nonce nonce = 4; + string body = 5; + repeated ChatMention mentions = 6; } message AckChannelMessage { - uint64 channel_id = 1; - uint64 message_id = 2; + uint64 channel_id = 1; + uint64 message_id = 2; } message SendChannelMessageResponse { - ChannelMessage message = 1; + ChannelMessage message = 1; } message ChannelMessageSent { - uint64 channel_id = 1; - ChannelMessage message = 2; + uint64 channel_id = 1; + ChannelMessage message = 2; } message ChannelMessageUpdate { - uint64 channel_id = 1; - ChannelMessage message = 2; + uint64 channel_id = 1; + ChannelMessage message = 2; } message GetChannelMessages { - uint64 channel_id = 1; - uint64 before_message_id = 2; + uint64 channel_id = 1; + uint64 before_message_id = 2; } message GetChannelMessagesResponse { - repeated ChannelMessage messages = 1; - bool done = 2; + repeated ChannelMessage messages = 1; + bool done = 2; } message GetChannelMessagesById { - repeated uint64 message_ids = 1; + repeated uint64 message_ids = 1; } message MoveChannel { - uint64 channel_id = 1; - uint64 to = 2; + uint64 channel_id = 1; + uint64 to = 2; } message ReorderChannel { - uint64 channel_id = 1; - enum Direction { - Up = 0; - Down = 1; - } - Direction direction = 2; + uint64 channel_id = 1; + enum Direction { + Up = 0; + Down = 1; + } + Direction direction = 2; } message JoinChannelBuffer { - uint64 channel_id = 1; + uint64 channel_id = 1; } message ChannelBufferVersion { - uint64 channel_id = 1; - repeated VectorClockEntry version = 2; - uint64 epoch = 3; + uint64 channel_id = 1; + repeated VectorClockEntry version = 2; + uint64 epoch = 3; } message UpdateChannelBufferCollaborators { - uint64 channel_id = 1; - repeated Collaborator collaborators = 2; + uint64 channel_id = 1; + repeated Collaborator collaborators = 2; } message UpdateChannelBuffer { - uint64 channel_id = 1; - repeated Operation operations = 2; + uint64 channel_id = 1; + repeated Operation operations = 2; } message ChannelMessage { - uint64 id = 1; - string body = 2; - uint64 timestamp = 3; - uint64 sender_id = 4; - Nonce nonce = 5; - repeated ChatMention mentions = 6; - optional uint64 reply_to_message_id = 7; - optional uint64 edited_at = 8; + uint64 id = 1; + string body = 2; + uint64 timestamp = 3; + uint64 sender_id = 4; + Nonce nonce = 5; + repeated ChatMention mentions = 6; + optional uint64 reply_to_message_id = 7; + optional uint64 edited_at = 8; } message ChatMention { - Range range = 1; - uint64 user_id = 2; + Range range = 1; + uint64 user_id = 2; } message RejoinChannelBuffers { - repeated ChannelBufferVersion buffers = 1; + repeated ChannelBufferVersion buffers = 1; } message RejoinChannelBuffersResponse { - repeated RejoinedChannelBuffer buffers = 1; + repeated RejoinedChannelBuffer buffers = 1; } message AckBufferOperation { - uint64 buffer_id = 1; - uint64 epoch = 2; - repeated VectorClockEntry version = 3; + uint64 buffer_id = 1; + uint64 epoch = 2; + repeated VectorClockEntry version = 3; } message JoinChannelBufferResponse { - uint64 buffer_id = 1; - uint32 replica_id = 2; - string base_text = 3; - repeated Operation operations = 4; - repeated Collaborator collaborators = 5; - uint64 epoch = 6; + uint64 buffer_id = 1; + uint32 replica_id = 2; + string base_text = 3; + repeated Operation operations = 4; + repeated Collaborator collaborators = 5; + uint64 epoch = 6; } message RejoinedChannelBuffer { - uint64 channel_id = 1; - repeated VectorClockEntry version = 2; - repeated Operation operations = 3; - repeated Collaborator collaborators = 4; + uint64 channel_id = 1; + repeated VectorClockEntry version = 2; + repeated Operation operations = 3; + repeated Collaborator collaborators = 4; } message LeaveChannelBuffer { - uint64 channel_id = 1; + uint64 channel_id = 1; } message RespondToChannelInvite { - uint64 channel_id = 1; - bool accept = 2; + uint64 channel_id = 1; + bool accept = 2; } diff --git a/crates/proto/proto/core.proto b/crates/proto/proto/core.proto index 121ea749127d7af4bbc34da2a1edbad78b7763df..c721ab62a11620895f8d54e69b4eb0bf168e43d0 100644 --- a/crates/proto/proto/core.proto +++ b/crates/proto/proto/core.proto @@ -2,28 +2,28 @@ syntax = "proto3"; package zed.messages; message PeerId { - uint32 owner_id = 1; - uint32 id = 2; + uint32 owner_id = 1; + uint32 id = 2; } message User { - reserved 4; - uint64 id = 1; - string github_login = 2; - string avatar_url = 3; - optional string name = 5; + reserved 4; + uint64 id = 1; + string github_login = 2; + string avatar_url = 3; + optional string name = 5; } message Nonce { - uint64 upper_half = 1; - uint64 lower_half = 2; + uint64 upper_half = 1; + uint64 lower_half = 2; } message Collaborator { - PeerId peer_id = 1; - uint32 replica_id = 2; - uint64 user_id = 3; - bool is_host = 4; - optional string committer_name = 5; - optional string committer_email = 6; + PeerId peer_id = 1; + uint32 replica_id = 2; + uint64 user_id = 3; + bool is_host = 4; + optional string committer_name = 5; + optional string committer_email = 6; } diff --git a/crates/proto/proto/debugger.proto b/crates/proto/proto/debugger.proto index dcfb91c77dd0004bfb248d4e4c23dcf269b7bc11..bf29411f96a45a26265650727d1529e9351245d2 100644 --- a/crates/proto/proto/debugger.proto +++ b/crates/proto/proto/debugger.proto @@ -1,555 +1,553 @@ syntax = "proto3"; package zed.messages; -import "core.proto"; import "buffer.proto"; import "task.proto"; enum BreakpointState { - Enabled = 0; - Disabled = 1; + Enabled = 0; + Disabled = 1; } message Breakpoint { - Anchor position = 1; - BreakpointState state = 2; - reserved 3; - optional string message = 4; - optional string condition = 5; - optional string hit_condition = 6; - map session_state = 7; + Anchor position = 1; + BreakpointState state = 2; + reserved 3; + optional string message = 4; + optional string condition = 5; + optional string hit_condition = 6; + map session_state = 7; } message BreakpointSessionState { - uint64 id = 1; - bool verified = 2; + uint64 id = 1; + bool verified = 2; } message BreakpointsForFile { - uint64 project_id = 1; - string path = 2; - repeated Breakpoint breakpoints = 3; + uint64 project_id = 1; + string path = 2; + repeated Breakpoint breakpoints = 3; } message ToggleBreakpoint { - uint64 project_id = 1; - string path = 2; - Breakpoint breakpoint = 3; + uint64 project_id = 1; + string path = 2; + Breakpoint breakpoint = 3; } enum DapThreadStatus { - Running = 0; - Stopped = 1; - Exited = 2; - Ended = 3; + Running = 0; + Stopped = 1; + Exited = 2; + Ended = 3; } enum VariablesArgumentsFilter { - Indexed = 0; - Named = 1; + Indexed = 0; + Named = 1; } message ValueFormat { - optional bool hex = 1; + optional bool hex = 1; } message VariablesRequest { - uint64 project_id = 1; - uint64 client_id = 2; - uint64 variables_reference = 3; - optional VariablesArgumentsFilter filter = 4; - optional uint64 start = 5; - optional uint64 count = 6; - optional ValueFormat format = 7; + uint64 project_id = 1; + uint64 client_id = 2; + uint64 variables_reference = 3; + optional VariablesArgumentsFilter filter = 4; + optional uint64 start = 5; + optional uint64 count = 6; + optional ValueFormat format = 7; } enum SteppingGranularity { - Statement = 0; - Line = 1; - Instruction = 2; + Statement = 0; + Line = 1; + Instruction = 2; } message DapLocationsRequest { - uint64 project_id = 1; - uint64 session_id = 2; - uint64 location_reference = 3; + uint64 project_id = 1; + uint64 session_id = 2; + uint64 location_reference = 3; } message DapLocationsResponse { - DapSource source = 1; - uint64 line = 2; - optional uint64 column = 3; - optional uint64 end_line = 4; - optional uint64 end_column = 5; + DapSource source = 1; + uint64 line = 2; + optional uint64 column = 3; + optional uint64 end_line = 4; + optional uint64 end_column = 5; } enum DapEvaluateContext { - Repl = 0; - Watch = 1; - Hover = 2; - Clipboard = 3; - EvaluateVariables = 4; - EvaluateUnknown = 5; + Repl = 0; + Watch = 1; + Hover = 2; + Clipboard = 3; + EvaluateVariables = 4; + EvaluateUnknown = 5; } message DapEvaluateRequest { - uint64 project_id = 1; - uint64 client_id = 2; - string expression = 3; - optional uint64 frame_id = 4; - optional DapEvaluateContext context = 5; + uint64 project_id = 1; + uint64 client_id = 2; + string expression = 3; + optional uint64 frame_id = 4; + optional DapEvaluateContext context = 5; } message DapEvaluateResponse { - string result = 1; - optional string evaluate_type = 2; - uint64 variable_reference = 3; - optional uint64 named_variables = 4; - optional uint64 indexed_variables = 5; - optional string memory_reference = 6; + string result = 1; + optional string evaluate_type = 2; + uint64 variable_reference = 3; + optional uint64 named_variables = 4; + optional uint64 indexed_variables = 5; + optional string memory_reference = 6; } - message DapCompletionRequest { - uint64 project_id = 1; - uint64 client_id = 2; - string query = 3; - optional uint64 frame_id = 4; - optional uint64 line = 5; - uint64 column = 6; + uint64 project_id = 1; + uint64 client_id = 2; + string query = 3; + optional uint64 frame_id = 4; + optional uint64 line = 5; + uint64 column = 6; } enum DapCompletionItemType { - Method = 0; - Function = 1; - Constructor = 2; - Field = 3; - Variable = 4; - Class = 5; - Interface = 6; - Module = 7; - Property = 8; - Unit = 9; - Value = 10; - Enum = 11; - Keyword = 12; - Snippet = 13; - Text = 14; - Color = 15; - CompletionItemFile = 16; - Reference = 17; - Customcolor = 19; + Method = 0; + Function = 1; + Constructor = 2; + Field = 3; + Variable = 4; + Class = 5; + Interface = 6; + Module = 7; + Property = 8; + Unit = 9; + Value = 10; + Enum = 11; + Keyword = 12; + Snippet = 13; + Text = 14; + Color = 15; + CompletionItemFile = 16; + Reference = 17; + Customcolor = 19; } message DapCompletionItem { - string label = 1; - optional string text = 2; - optional string sort_text = 3; - optional string detail = 4; - optional DapCompletionItemType typ = 5; - optional uint64 start = 6; - optional uint64 length = 7; - optional uint64 selection_start = 8; - optional uint64 selection_length = 9; + string label = 1; + optional string text = 2; + optional string sort_text = 3; + optional string detail = 4; + optional DapCompletionItemType typ = 5; + optional uint64 start = 6; + optional uint64 length = 7; + optional uint64 selection_start = 8; + optional uint64 selection_length = 9; } message DapCompletionResponse { - uint64 client_id = 1; - repeated DapCompletionItem completions = 2; + uint64 client_id = 1; + repeated DapCompletionItem completions = 2; } message DapScopesRequest { - uint64 project_id = 1; - uint64 client_id = 2; - uint64 stack_frame_id = 3; + uint64 project_id = 1; + uint64 client_id = 2; + uint64 stack_frame_id = 3; } message DapScopesResponse { - repeated DapScope scopes = 1; + repeated DapScope scopes = 1; } message DapSetVariableValueRequest { - uint64 project_id = 1; - uint64 client_id = 2; - string name = 3; - string value = 4; - uint64 variables_reference = 5; + uint64 project_id = 1; + uint64 client_id = 2; + string name = 3; + string value = 4; + uint64 variables_reference = 5; } message DapSetVariableValueResponse { - uint64 client_id = 1; - string value = 2; - optional string variable_type = 3; - optional uint64 variables_reference = 4; - optional uint64 named_variables = 5; - optional uint64 indexed_variables = 6; - optional string memory_reference = 7; + uint64 client_id = 1; + string value = 2; + optional string variable_type = 3; + optional uint64 variables_reference = 4; + optional uint64 named_variables = 5; + optional uint64 indexed_variables = 6; + optional string memory_reference = 7; } message DapPauseRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; } message DapDisconnectRequest { - uint64 project_id = 1; - uint64 client_id = 2; - optional bool restart = 3; - optional bool terminate_debuggee = 4; - optional bool suspend_debuggee = 5; + uint64 project_id = 1; + uint64 client_id = 2; + optional bool restart = 3; + optional bool terminate_debuggee = 4; + optional bool suspend_debuggee = 5; } message DapTerminateThreadsRequest { - uint64 project_id = 1; - uint64 client_id = 2; - repeated int64 thread_ids = 3; + uint64 project_id = 1; + uint64 client_id = 2; + repeated int64 thread_ids = 3; } message DapThreadsRequest { - uint64 project_id = 1; - uint64 client_id = 2; + uint64 project_id = 1; + uint64 client_id = 2; } message DapThreadsResponse { - repeated DapThread threads = 1; + repeated DapThread threads = 1; } message DapTerminateRequest { - uint64 project_id = 1; - uint64 client_id = 2; - optional bool restart = 3; + uint64 project_id = 1; + uint64 client_id = 2; + optional bool restart = 3; } message DapRestartRequest { - uint64 project_id = 1; - uint64 client_id = 2; - bytes raw_args = 3; + uint64 project_id = 1; + uint64 client_id = 2; + bytes raw_args = 3; } message DapRestartStackFrameRequest { - uint64 project_id = 1; - uint64 client_id = 2; - uint64 stack_frame_id = 3; + uint64 project_id = 1; + uint64 client_id = 2; + uint64 stack_frame_id = 3; } message ToggleIgnoreBreakpoints { - uint64 project_id = 1; - uint32 session_id = 2; + uint64 project_id = 1; + uint32 session_id = 2; } message IgnoreBreakpointState { - uint64 project_id = 1; - uint64 session_id = 2; - bool ignore = 3; + uint64 project_id = 1; + uint64 session_id = 2; + bool ignore = 3; } message DapNextRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional bool single_thread = 4; - optional SteppingGranularity granularity = 5; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional bool single_thread = 4; + optional SteppingGranularity granularity = 5; } message DapStepInRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional uint64 target_id = 4; - optional bool single_thread = 5; - optional SteppingGranularity granularity = 6; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional uint64 target_id = 4; + optional bool single_thread = 5; + optional SteppingGranularity granularity = 6; } message DapStepOutRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional bool single_thread = 4; - optional SteppingGranularity granularity = 5; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional bool single_thread = 4; + optional SteppingGranularity granularity = 5; } message DapStepBackRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional bool single_thread = 4; - optional SteppingGranularity granularity = 5; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional bool single_thread = 4; + optional SteppingGranularity granularity = 5; } message DapContinueRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional bool single_thread = 4; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional bool single_thread = 4; } message DapContinueResponse { - uint64 client_id = 1; - optional bool all_threads_continued = 2; + uint64 client_id = 1; + optional bool all_threads_continued = 2; } message DapModulesRequest { - uint64 project_id = 1; - uint64 client_id = 2; + uint64 project_id = 1; + uint64 client_id = 2; } message DapModulesResponse { - uint64 client_id = 1; - repeated DapModule modules = 2; + uint64 client_id = 1; + repeated DapModule modules = 2; } message DapLoadedSourcesRequest { - uint64 project_id = 1; - uint64 client_id = 2; + uint64 project_id = 1; + uint64 client_id = 2; } message DapLoadedSourcesResponse { - uint64 client_id = 1; - repeated DapSource sources = 2; + uint64 client_id = 1; + repeated DapSource sources = 2; } message DapStackTraceRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional uint64 start_frame = 4; - optional uint64 stack_trace_levels = 5; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional uint64 start_frame = 4; + optional uint64 stack_trace_levels = 5; } message DapStackTraceResponse { - repeated DapStackFrame frames = 1; + repeated DapStackFrame frames = 1; } message DapStackFrame { - uint64 id = 1; - string name = 2; - optional DapSource source = 3; - uint64 line = 4; - uint64 column = 5; - optional uint64 end_line = 6; - optional uint64 end_column = 7; - optional bool can_restart = 8; - optional string instruction_pointer_reference = 9; - optional DapModuleId module_id = 10; - optional DapStackPresentationHint presentation_hint = 11; + uint64 id = 1; + string name = 2; + optional DapSource source = 3; + uint64 line = 4; + uint64 column = 5; + optional uint64 end_line = 6; + optional uint64 end_column = 7; + optional bool can_restart = 8; + optional string instruction_pointer_reference = 9; + optional DapModuleId module_id = 10; + optional DapStackPresentationHint presentation_hint = 11; } message DebuggerLoadedSourceList { - uint64 client_id = 1; - repeated DapSource sources = 2; + uint64 client_id = 1; + repeated DapSource sources = 2; } message DapVariables { - uint64 client_id = 1; - repeated DapVariable variables = 2; + uint64 client_id = 1; + repeated DapVariable variables = 2; } // Remote Debugging: Dap Types message DapVariable { - string name = 1; - string value = 2; - optional string type = 3; - // optional DapVariablePresentationHint presentation_hint = 4; - optional string evaluate_name = 5; - uint64 variables_reference = 6; - optional uint64 named_variables = 7; - optional uint64 indexed_variables = 8; - optional string memory_reference = 9; + string name = 1; + string value = 2; + optional string type = 3; + // optional DapVariablePresentationHint presentation_hint = 4; + optional string evaluate_name = 5; + uint64 variables_reference = 6; + optional uint64 named_variables = 7; + optional uint64 indexed_variables = 8; + optional string memory_reference = 9; } message DapThread { - int64 id = 1; - string name = 2; + int64 id = 1; + string name = 2; } message DapScope { - string name = 1; - optional DapScopePresentationHint presentation_hint = 2; - uint64 variables_reference = 3; - optional uint64 named_variables = 4; - optional uint64 indexed_variables = 5; - bool expensive = 6; - optional DapSource source = 7; - optional uint64 line = 8; - optional uint64 column = 9; - optional uint64 end_line = 10; - optional uint64 end_column = 11; + string name = 1; + optional DapScopePresentationHint presentation_hint = 2; + uint64 variables_reference = 3; + optional uint64 named_variables = 4; + optional uint64 indexed_variables = 5; + bool expensive = 6; + optional DapSource source = 7; + optional uint64 line = 8; + optional uint64 column = 9; + optional uint64 end_line = 10; + optional uint64 end_column = 11; } message DapSource { - optional string name = 1; - optional string path = 2; - optional uint64 source_reference = 3; - optional DapSourcePresentationHint presentation_hint = 4; - optional string origin = 5; - repeated DapSource sources = 6; - optional bytes adapter_data = 7; - repeated DapChecksum checksums = 8; + optional string name = 1; + optional string path = 2; + optional uint64 source_reference = 3; + optional DapSourcePresentationHint presentation_hint = 4; + optional string origin = 5; + repeated DapSource sources = 6; + optional bytes adapter_data = 7; + repeated DapChecksum checksums = 8; } enum DapOutputCategory { - ConsoleOutput = 0; - Important = 1; - Stdout = 2; - Stderr = 3; - Unknown = 4; + ConsoleOutput = 0; + Important = 1; + Stdout = 2; + Stderr = 3; + Unknown = 4; } enum DapOutputEventGroup { - Start = 0; - StartCollapsed = 1; - End = 2; + Start = 0; + StartCollapsed = 1; + End = 2; } message DapOutputEvent { - string output = 1; - optional DapOutputCategory category = 2; - optional uint64 variables_reference = 3; - optional DapOutputEventGroup group = 4; - optional DapSource source = 5; - optional uint32 line = 6; - optional uint32 column = 7; + string output = 1; + optional DapOutputCategory category = 2; + optional uint64 variables_reference = 3; + optional DapOutputEventGroup group = 4; + optional DapSource source = 5; + optional uint32 line = 6; + optional uint32 column = 7; } enum DapChecksumAlgorithm { - CHECKSUM_ALGORITHM_UNSPECIFIED = 0; - MD5 = 1; - SHA1 = 2; - SHA256 = 3; - TIMESTAMP = 4; + CHECKSUM_ALGORITHM_UNSPECIFIED = 0; + MD5 = 1; + SHA1 = 2; + SHA256 = 3; + TIMESTAMP = 4; } message DapChecksum { - DapChecksumAlgorithm algorithm = 1; - string checksum = 2; + DapChecksumAlgorithm algorithm = 1; + string checksum = 2; } enum DapScopePresentationHint { - Arguments = 0; - Locals = 1; - Registers = 2; - ReturnValue = 3; - ScopeUnknown = 4; + Arguments = 0; + Locals = 1; + Registers = 2; + ReturnValue = 3; + ScopeUnknown = 4; } enum DapSourcePresentationHint { - SourceNormal = 0; - Emphasize = 1; - Deemphasize = 2; - SourceUnknown = 3; + SourceNormal = 0; + Emphasize = 1; + Deemphasize = 2; + SourceUnknown = 3; } enum DapStackPresentationHint { - StackNormal = 0; - Label = 1; - Subtle = 2; - StackUnknown = 3; + StackNormal = 0; + Label = 1; + Subtle = 2; + StackUnknown = 3; } message DapModule { - DapModuleId id = 1; - string name = 2; - optional string path = 3; - optional bool is_optimized = 4; - optional bool is_user_code = 5; - optional string version = 6; - optional string symbol_status = 7; - optional string symbol_file_path = 8; - optional string date_time_stamp = 9; - optional string address_range = 10; + DapModuleId id = 1; + string name = 2; + optional string path = 3; + optional bool is_optimized = 4; + optional bool is_user_code = 5; + optional string version = 6; + optional string symbol_status = 7; + optional string symbol_file_path = 8; + optional string date_time_stamp = 9; + optional string address_range = 10; } message DebugTaskDefinition { - string adapter = 1; - string label = 2; - string config = 3; - optional TcpHost tcp_connection = 4; + string adapter = 1; + string label = 2; + string config = 3; + optional TcpHost tcp_connection = 4; } message TcpHost { - optional uint32 port = 1; - optional string host = 2; - optional uint64 timeout = 3; + optional uint32 port = 1; + optional string host = 2; + optional uint64 timeout = 3; } message DebugLaunchRequest { - string program = 1; - optional string cwd = 2; - repeated string args = 3; - map env = 4; + string program = 1; + optional string cwd = 2; + repeated string args = 3; + map env = 4; } message DebugAttachRequest { - uint32 process_id = 1; + uint32 process_id = 1; } message DapModuleId { - oneof id { - uint32 number = 1; - string string = 2; - } + oneof id { + uint32 number = 1; + string string = 2; + } } message GetDebugAdapterBinary { - uint64 project_id = 1; - uint64 session_id = 3; - DebugTaskDefinition definition = 2; - uint64 worktree_id = 4; + uint64 project_id = 1; + uint64 session_id = 3; + DebugTaskDefinition definition = 2; + uint64 worktree_id = 4; } message DebugAdapterBinary { - optional string command = 1; - repeated string arguments = 2; - map envs = 3; - optional string cwd = 4; - optional TcpHost connection = 5; - string configuration = 7; - LaunchType launch_type = 8; - enum LaunchType { - Attach = 0; - Launch = 1; - } + optional string command = 1; + repeated string arguments = 2; + map envs = 3; + optional string cwd = 4; + optional TcpHost connection = 5; + string configuration = 7; + LaunchType launch_type = 8; + enum LaunchType { + Attach = 0; + Launch = 1; + } } message RunDebugLocators { - uint64 project_id = 1; - SpawnInTerminal build_command = 2; - string locator = 3; + uint64 project_id = 1; + SpawnInTerminal build_command = 2; + string locator = 3; } message DebugRequest { - oneof request { - DebugLaunchRequest debug_launch_request = 1; - DebugAttachRequest debug_attach_request = 2; - } + oneof request { + DebugLaunchRequest debug_launch_request = 1; + DebugAttachRequest debug_attach_request = 2; + } } message DebugScenario { - string label = 1; - string adapter = 2; - reserved 3; - DebugRequest request = 4; - optional TcpHost connection = 5; - optional bool stop_on_entry = 6; - optional string configuration = 7; + string label = 1; + string adapter = 2; + reserved 3; + DebugRequest request = 4; + optional TcpHost connection = 5; + optional bool stop_on_entry = 6; + optional string configuration = 7; } message LogToDebugConsole { - uint64 project_id = 1; - uint64 session_id = 2; - string message = 3; + uint64 project_id = 1; + uint64 session_id = 2; + string message = 3; } message GetProcesses { - uint64 project_id = 1; + uint64 project_id = 1; } message GetProcessesResponse { - repeated ProcessInfo processes = 1; + repeated ProcessInfo processes = 1; } message ProcessInfo { - uint32 pid = 1; - string name = 2; - repeated string command = 3; + uint32 pid = 1; + string name = 2; + repeated string command = 3; } diff --git a/crates/proto/proto/download.proto b/crates/proto/proto/download.proto index fd1d63e78db581866981cb90372f84716be8a958..44b1da3389abc2996e2fb9acf6e42d2b3ae54f44 100644 --- a/crates/proto/proto/download.proto +++ b/crates/proto/proto/download.proto @@ -5,32 +5,32 @@ import "core.proto"; import "worktree.proto"; message DownloadFileByPath { - uint64 project_id = 1; - uint64 worktree_id = 2; - string path = 3; - uint64 file_id = 4; + uint64 project_id = 1; + uint64 worktree_id = 2; + string path = 3; + uint64 file_id = 4; } message DownloadFileResponse { - uint64 file_id = 1; + uint64 file_id = 1; } message CreateFileForPeer { - uint64 project_id = 1; - PeerId peer_id = 2; - oneof variant { - FileState state = 3; - FileChunk chunk = 4; - } + uint64 project_id = 1; + PeerId peer_id = 2; + oneof variant { + FileState state = 3; + FileChunk chunk = 4; + } } message FileState { - uint64 id = 1; - optional File file = 2; - uint64 content_size = 3; + uint64 id = 1; + optional File file = 2; + uint64 content_size = 3; } message FileChunk { - uint64 file_id = 1; - bytes data = 2; + uint64 file_id = 1; + bytes data = 2; } diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index 994d319913c6d84c2e639ccd78bade4547449a7a..736abcdaa49f62d72582750a8a28ea785baee282 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -1,254 +1,254 @@ syntax = "proto3"; package zed.messages; -import "worktree.proto"; import "buffer.proto"; +import "worktree.proto"; message GitBranchesResponse { - repeated Branch branches = 1; + repeated Branch branches = 1; } message UpdateDiffBases { - uint64 project_id = 1; - uint64 buffer_id = 2; - - enum Mode { - // No collaborator is using the unstaged diff. - HEAD_ONLY = 0; - // No collaborator is using the diff from HEAD. - INDEX_ONLY = 1; - // Both the unstaged and uncommitted diffs are demanded, - // and the contents of the index and HEAD are the same for this path. - INDEX_MATCHES_HEAD = 2; - // Both the unstaged and uncommitted diffs are demanded, - // and the contents of the index and HEAD differ for this path, - // where None means the path doesn't exist in that state of the repo. - INDEX_AND_HEAD = 3; - } - - optional string staged_text = 3; - optional string committed_text = 4; - Mode mode = 5; + uint64 project_id = 1; + uint64 buffer_id = 2; + + enum Mode { + // No collaborator is using the unstaged diff. + HEAD_ONLY = 0; + // No collaborator is using the diff from HEAD. + INDEX_ONLY = 1; + // Both the unstaged and uncommitted diffs are demanded, + // and the contents of the index and HEAD are the same for this path. + INDEX_MATCHES_HEAD = 2; + // Both the unstaged and uncommitted diffs are demanded, + // and the contents of the index and HEAD differ for this path, + // where None means the path doesn't exist in that state of the repo. + INDEX_AND_HEAD = 3; + } + + optional string staged_text = 3; + optional string committed_text = 4; + Mode mode = 5; } message OpenUnstagedDiff { - uint64 project_id = 1; - uint64 buffer_id = 2; + uint64 project_id = 1; + uint64 buffer_id = 2; } message OpenUnstagedDiffResponse { - optional string staged_text = 1; + optional string staged_text = 1; } message OpenUncommittedDiff { - uint64 project_id = 1; - uint64 buffer_id = 2; + uint64 project_id = 1; + uint64 buffer_id = 2; } message OpenUncommittedDiffResponse { - enum Mode { - INDEX_MATCHES_HEAD = 0; - INDEX_AND_HEAD = 1; - } - optional string staged_text = 1; - optional string committed_text = 2; - Mode mode = 3; + enum Mode { + INDEX_MATCHES_HEAD = 0; + INDEX_AND_HEAD = 1; + } + optional string staged_text = 1; + optional string committed_text = 2; + Mode mode = 3; } message SetIndexText { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string path = 4; - optional string text = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string path = 4; + optional string text = 5; } message GetPermalinkToLine { - uint64 project_id = 1; - uint64 buffer_id = 2; - Range selection = 3; + uint64 project_id = 1; + uint64 buffer_id = 2; + Range selection = 3; } message GetPermalinkToLineResponse { - string permalink = 1; + string permalink = 1; } message Branch { - bool is_head = 1; - string ref_name = 2; - optional uint64 unix_timestamp = 3; - optional GitUpstream upstream = 4; - optional CommitSummary most_recent_commit = 5; + bool is_head = 1; + string ref_name = 2; + optional uint64 unix_timestamp = 3; + optional GitUpstream upstream = 4; + optional CommitSummary most_recent_commit = 5; } message GitUpstream { - string ref_name = 1; - optional UpstreamTracking tracking = 2; + string ref_name = 1; + optional UpstreamTracking tracking = 2; } message UpstreamTracking { - uint64 ahead = 1; - uint64 behind = 2; + uint64 ahead = 1; + uint64 behind = 2; } message CommitSummary { - string sha = 1; - string subject = 2; - int64 commit_timestamp = 3; - string author_name = 4; + string sha = 1; + string subject = 2; + int64 commit_timestamp = 3; + string author_name = 4; } message GitBranches { - uint64 project_id = 1; - ProjectPath repository = 2; + uint64 project_id = 1; + ProjectPath repository = 2; } - message UpdateGitBranch { - uint64 project_id = 1; - string branch_name = 2; - ProjectPath repository = 3; + uint64 project_id = 1; + string branch_name = 2; + ProjectPath repository = 3; } message UpdateRepository { - uint64 project_id = 1; - uint64 id = 2; - string abs_path = 3; - repeated uint64 entry_ids = 4; - optional Branch branch_summary = 5; - repeated StatusEntry updated_statuses = 6; - repeated string removed_statuses = 7; - repeated string current_merge_conflicts = 8; - uint64 scan_id = 9; - bool is_last_update = 10; - optional GitCommitDetails head_commit_details = 11; - optional string merge_message = 12; - repeated StashEntry stash_entries = 13; - optional string remote_upstream_url = 14; - optional string remote_origin_url = 15; + uint64 project_id = 1; + uint64 id = 2; + string abs_path = 3; + repeated uint64 entry_ids = 4; + optional Branch branch_summary = 5; + repeated StatusEntry updated_statuses = 6; + repeated string removed_statuses = 7; + repeated string current_merge_conflicts = 8; + uint64 scan_id = 9; + bool is_last_update = 10; + optional GitCommitDetails head_commit_details = 11; + optional string merge_message = 12; + repeated StashEntry stash_entries = 13; + optional string remote_upstream_url = 14; + optional string remote_origin_url = 15; + optional string original_repo_abs_path = 16; } message RemoveRepository { - uint64 project_id = 1; - uint64 id = 2; + uint64 project_id = 1; + uint64 id = 2; } enum GitStatus { - Added = 0; - Modified = 1; - Conflict = 2; - Deleted = 3; - Updated = 4; - TypeChanged = 5; - Renamed = 6; - Copied = 7; - Unmodified = 8; + Added = 0; + Modified = 1; + Conflict = 2; + Deleted = 3; + Updated = 4; + TypeChanged = 5; + Renamed = 6; + Copied = 7; + Unmodified = 8; } message GitFileStatus { - oneof variant { - Untracked untracked = 1; - Ignored ignored = 2; - Unmerged unmerged = 3; - Tracked tracked = 4; - } - - message Untracked {} - message Ignored {} - message Unmerged { - GitStatus first_head = 1; - GitStatus second_head = 2; - } - message Tracked { - GitStatus index_status = 1; - GitStatus worktree_status = 2; - } + oneof variant { + Untracked untracked = 1; + Ignored ignored = 2; + Unmerged unmerged = 3; + Tracked tracked = 4; + } + + message Untracked {} + message Ignored {} + message Unmerged { + GitStatus first_head = 1; + GitStatus second_head = 2; + } + message Tracked { + GitStatus index_status = 1; + GitStatus worktree_status = 2; + } } message GitGetBranches { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; } message GitCreateBranch { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string branch_name = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string branch_name = 4; } message GitChangeBranch { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string branch_name = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string branch_name = 4; } message GitRenameBranch { - uint64 project_id = 1; - uint64 repository_id = 2; - string branch = 3; - string new_name = 4; + uint64 project_id = 1; + uint64 repository_id = 2; + string branch = 3; + string new_name = 4; } message GitCreateRemote { - uint64 project_id = 1; - uint64 repository_id = 2; - string remote_name = 3; - string remote_url = 4; + uint64 project_id = 1; + uint64 repository_id = 2; + string remote_name = 3; + string remote_url = 4; } message GitRemoveRemote { - uint64 project_id = 1; - uint64 repository_id = 2; - string remote_name = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + string remote_name = 3; } message GitDeleteBranch { - uint64 project_id = 1; - uint64 repository_id = 2; - string branch_name = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + string branch_name = 3; } message GitDiff { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - DiffType diff_type = 4; - optional string merge_base_ref = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + DiffType diff_type = 4; + optional string merge_base_ref = 5; - enum DiffType { - HEAD_TO_WORKTREE = 0; - HEAD_TO_INDEX = 1; - MERGE_BASE = 2; - } + enum DiffType { + HEAD_TO_WORKTREE = 0; + HEAD_TO_INDEX = 1; + MERGE_BASE = 2; + } } message GitDiffResponse { - string diff = 1; + string diff = 1; } message GitInit { - uint64 project_id = 1; - string abs_path = 2; - string fallback_branch_name = 3; + uint64 project_id = 1; + string abs_path = 2; + string fallback_branch_name = 3; } message GitClone { - uint64 project_id = 1; - string abs_path = 2; - string remote_repo = 3; + uint64 project_id = 1; + string abs_path = 2; + string remote_repo = 3; } message GitCloneResponse { - bool success = 1; + bool success = 1; } message CheckForPushedCommits { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; } message CheckForPushedCommitsResponse { @@ -256,338 +256,340 @@ message CheckForPushedCommitsResponse { } message GitShow { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string commit = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string commit = 4; } message GitCommitDetails { - string sha = 1; - string message = 2; - int64 commit_timestamp = 3; - string author_email = 4; - string author_name = 5; + string sha = 1; + string message = 2; + int64 commit_timestamp = 3; + string author_email = 4; + string author_name = 5; } message LoadCommitDiff { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string commit = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string commit = 4; } message LoadCommitDiffResponse { - repeated CommitFile files = 1; + repeated CommitFile files = 1; } message CommitFile { - string path = 1; - optional string old_text = 2; - optional string new_text = 3; - bool is_binary = 4; + string path = 1; + optional string old_text = 2; + optional string new_text = 3; + bool is_binary = 4; } message GitReset { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string commit = 4; - ResetMode mode = 5; - enum ResetMode { - SOFT = 0; - MIXED = 1; - } + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string commit = 4; + ResetMode mode = 5; + enum ResetMode { + SOFT = 0; + MIXED = 1; + } } message GitCheckoutFiles { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string commit = 4; - repeated string paths = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string commit = 4; + repeated string paths = 5; } message GitFileHistory { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string path = 4; - uint64 skip = 5; - optional uint64 limit = 6; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string path = 4; + uint64 skip = 5; + optional uint64 limit = 6; } message GitFileHistoryResponse { - repeated FileHistoryEntry entries = 1; - string path = 2; + repeated FileHistoryEntry entries = 1; + string path = 2; } message FileHistoryEntry { - string sha = 1; - string subject = 2; - string message = 3; - int64 commit_timestamp = 4; - string author_name = 5; - string author_email = 6; + string sha = 1; + string subject = 2; + string message = 3; + int64 commit_timestamp = 4; + string author_name = 5; + string author_email = 6; } // Move to `git.proto` once collab's min version is >=0.171.0. message StatusEntry { - string repo_path = 1; - // Can be removed once collab's min version is >=0.171.0. - GitStatus simple_status = 2; - GitFileStatus status = 3; + string repo_path = 1; + // Can be removed once collab's min version is >=0.171.0. + GitStatus simple_status = 2; + GitFileStatus status = 3; + optional uint32 diff_stat_added = 4; + optional uint32 diff_stat_deleted = 5; } message StashEntry { - bytes oid = 1; - string message = 2; - optional string branch = 3; - uint64 index = 4; - int64 timestamp = 5; + bytes oid = 1; + string message = 2; + optional string branch = 3; + uint64 index = 4; + int64 timestamp = 5; } message Stage { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - repeated string paths = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + repeated string paths = 4; } message Unstage { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - repeated string paths = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + repeated string paths = 4; } message Stash { - uint64 project_id = 1; - uint64 repository_id = 2; - repeated string paths = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + repeated string paths = 3; } message StashPop { - uint64 project_id = 1; - uint64 repository_id = 2; - optional uint64 stash_index = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + optional uint64 stash_index = 3; } message StashApply { - uint64 project_id = 1; - uint64 repository_id = 2; - optional uint64 stash_index = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + optional uint64 stash_index = 3; } message StashDrop { - uint64 project_id = 1; - uint64 repository_id = 2; - optional uint64 stash_index = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + optional uint64 stash_index = 3; } message Commit { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - optional string name = 4; - optional string email = 5; - string message = 6; - optional CommitOptions options = 7; - reserved 8; - uint64 askpass_id = 9; - - message CommitOptions { - bool amend = 1; - bool signoff = 2; - } + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + optional string name = 4; + optional string email = 5; + string message = 6; + optional CommitOptions options = 7; + reserved 8; + uint64 askpass_id = 9; + + message CommitOptions { + bool amend = 1; + bool signoff = 2; + } } message OpenCommitMessageBuffer { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; } message Push { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string remote_name = 4; - string branch_name = 5; - optional PushOptions options = 6; - uint64 askpass_id = 7; - string remote_branch_name = 8; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string remote_name = 4; + string branch_name = 5; + optional PushOptions options = 6; + uint64 askpass_id = 7; + string remote_branch_name = 8; - enum PushOptions { - SET_UPSTREAM = 0; - FORCE = 1; - } + enum PushOptions { + SET_UPSTREAM = 0; + FORCE = 1; + } } message Fetch { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - uint64 askpass_id = 4; - optional string remote = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + uint64 askpass_id = 4; + optional string remote = 5; } message GetRemotes { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - optional string branch_name = 4; - bool is_push = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + optional string branch_name = 4; + bool is_push = 5; } message GetRemotesResponse { - repeated Remote remotes = 1; + repeated Remote remotes = 1; - message Remote { - string name = 1; - } + message Remote { + string name = 1; + } } message Pull { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string remote_name = 4; - optional string branch_name = 5; - uint64 askpass_id = 6; - bool rebase = 7; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string remote_name = 4; + optional string branch_name = 5; + uint64 askpass_id = 6; + bool rebase = 7; } message RemoteMessageResponse { - string stdout = 1; - string stderr = 2; + string stdout = 1; + string stderr = 2; } message BlameBuffer { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated VectorClockEntry version = 3; } message BlameEntry { - bytes sha = 1; + bytes sha = 1; - uint32 start_line = 2; - uint32 end_line = 3; - uint32 original_line_number = 4; + uint32 start_line = 2; + uint32 end_line = 3; + uint32 original_line_number = 4; - optional string author = 5; - optional string author_mail = 6; - optional int64 author_time = 7; - optional string author_tz = 8; + optional string author = 5; + optional string author_mail = 6; + optional int64 author_time = 7; + optional string author_tz = 8; - optional string committer = 9; - optional string committer_mail = 10; - optional int64 committer_time = 11; - optional string committer_tz = 12; + optional string committer = 9; + optional string committer_mail = 10; + optional int64 committer_time = 11; + optional string committer_tz = 12; - optional string summary = 13; - optional string previous = 14; + optional string summary = 13; + optional string previous = 14; - string filename = 15; + string filename = 15; } message CommitMessage { - bytes oid = 1; - string message = 2; + bytes oid = 1; + string message = 2; } message CommitPermalink { - bytes oid = 1; - string permalink = 2; + bytes oid = 1; + string permalink = 2; } message BlameBufferResponse { - message BlameResponse { - repeated BlameEntry entries = 1; - repeated CommitMessage messages = 2; - reserved 3; - reserved 4; - } + message BlameResponse { + repeated BlameEntry entries = 1; + repeated CommitMessage messages = 2; + reserved 3; + reserved 4; + } - optional BlameResponse blame_response = 5; + optional BlameResponse blame_response = 5; - reserved 1 to 4; + reserved 1 to 4; } message GetDefaultBranch { - uint64 project_id = 1; - uint64 repository_id = 2; + uint64 project_id = 1; + uint64 repository_id = 2; } message GetDefaultBranchResponse { - optional string branch = 1; + optional string branch = 1; } message GetTreeDiff { - uint64 project_id = 1; - uint64 repository_id = 2; - bool is_merge = 3; - string base = 4; - string head = 5; + uint64 project_id = 1; + uint64 repository_id = 2; + bool is_merge = 3; + string base = 4; + string head = 5; } message GetTreeDiffResponse { - repeated TreeDiffStatus entries = 1; + repeated TreeDiffStatus entries = 1; } message TreeDiffStatus { - enum Status { - ADDED = 0; - MODIFIED = 1; - DELETED = 2; - } + enum Status { + ADDED = 0; + MODIFIED = 1; + DELETED = 2; + } - Status status = 1; - string path = 2; - optional string oid = 3; + Status status = 1; + string path = 2; + optional string oid = 3; } message GetBlobContent { - uint64 project_id = 1; - uint64 repository_id = 2; - string oid =3; + uint64 project_id = 1; + uint64 repository_id = 2; + string oid = 3; } message GetBlobContentResponse { - string content = 1; + string content = 1; } message GitGetWorktrees { - uint64 project_id = 1; - uint64 repository_id = 2; + uint64 project_id = 1; + uint64 repository_id = 2; } message GitWorktreesResponse { - repeated Worktree worktrees = 1; + repeated Worktree worktrees = 1; } message Worktree { - string path = 1; - string ref_name = 2; - string sha = 3; + string path = 1; + string ref_name = 2; + string sha = 3; } message GitCreateWorktree { - uint64 project_id = 1; - uint64 repository_id = 2; - string name = 3; - string directory = 4; - optional string commit = 5; + uint64 project_id = 1; + uint64 repository_id = 2; + string name = 3; + string directory = 4; + optional string commit = 5; } message RunGitHook { - enum GitHook { - PRE_COMMIT = 0; - reserved 1; - } - - uint64 project_id = 1; - uint64 repository_id = 2; - GitHook hook = 3; + enum GitHook { + PRE_COMMIT = 0; + reserved 1; + } + + uint64 project_id = 1; + uint64 repository_id = 2; + GitHook hook = 3; } diff --git a/crates/proto/proto/image.proto b/crates/proto/proto/image.proto index e3232e6847cbc719280bc3ccd5254e5e368dbeb6..ff791e1f87b6089e6e87ec746fad173b180f10ef 100644 --- a/crates/proto/proto/image.proto +++ b/crates/proto/proto/image.proto @@ -5,32 +5,32 @@ import "core.proto"; import "worktree.proto"; message OpenImageByPath { - uint64 project_id = 1; - uint64 worktree_id = 2; - string path = 3; + uint64 project_id = 1; + uint64 worktree_id = 2; + string path = 3; } message OpenImageResponse { - uint64 image_id = 1; + uint64 image_id = 1; } message CreateImageForPeer { - uint64 project_id = 1; - PeerId peer_id = 2; - oneof variant { - ImageState state = 3; - ImageChunk chunk = 4; - } + uint64 project_id = 1; + PeerId peer_id = 2; + oneof variant { + ImageState state = 3; + ImageChunk chunk = 4; + } } message ImageState { - uint64 id = 1; - optional File file = 2; - uint64 content_size = 3; - string format = 4; // e.g., "png", "jpeg", "webp", etc. + uint64 id = 1; + optional File file = 2; + uint64 content_size = 3; + string format = 4; // e.g., "png", "jpeg", "webp", etc. } message ImageChunk { - uint64 image_id = 1; - bytes data = 2; + uint64 image_id = 1; + bytes data = 2; } diff --git a/crates/proto/proto/lsp.proto b/crates/proto/proto/lsp.proto index 9132dafbd42be8e1f7d0de2b1278d7bf757aa9ac..226373a111b6e29e4731edd638a5317dcd244273 100644 --- a/crates/proto/proto/lsp.proto +++ b/crates/proto/proto/lsp.proto @@ -2,8 +2,6 @@ syntax = "proto3"; package zed.messages; import "buffer.proto"; -import "core.proto"; -import "worktree.proto"; message GetDefinition { uint64 project_id = 1; diff --git a/crates/proto/proto/notification.proto b/crates/proto/proto/notification.proto index ebd3d7fe447991c38c9d616fc944f366f51782c0..8a41854ac161100c60d66d0b27b49bc4b2182a22 100644 --- a/crates/proto/proto/notification.proto +++ b/crates/proto/proto/notification.proto @@ -2,36 +2,36 @@ syntax = "proto3"; package zed.messages; message GetNotifications { - optional uint64 before_id = 1; + optional uint64 before_id = 1; } message AddNotification { - Notification notification = 1; + Notification notification = 1; } message GetNotificationsResponse { - repeated Notification notifications = 1; - bool done = 2; + repeated Notification notifications = 1; + bool done = 2; } message DeleteNotification { - uint64 notification_id = 1; + uint64 notification_id = 1; } message UpdateNotification { - Notification notification = 1; + Notification notification = 1; } message MarkNotificationRead { - uint64 notification_id = 1; + uint64 notification_id = 1; } message Notification { - uint64 id = 1; - uint64 timestamp = 2; - string kind = 3; - optional uint64 entity_id = 4; - string content = 5; - bool is_read = 6; - optional bool response = 7; + uint64 id = 1; + uint64 timestamp = 2; + string kind = 3; + optional uint64 entity_id = 4; + string content = 5; + bool is_read = 6; + optional bool response = 7; } diff --git a/crates/proto/proto/task.proto b/crates/proto/proto/task.proto index 1844087d623cc3eac0e5d7500a50dfb31028f304..8d941c2438c55045d8d38cb4c97d918be8abbeb4 100644 --- a/crates/proto/proto/task.proto +++ b/crates/proto/proto/task.proto @@ -4,57 +4,57 @@ package zed.messages; import "buffer.proto"; message TaskContextForLocation { - uint64 project_id = 1; - Location location = 2; - map task_variables = 3; + uint64 project_id = 1; + Location location = 2; + map task_variables = 3; } message TaskContext { - optional string cwd = 1; - map task_variables = 2; - map project_env = 3; + optional string cwd = 1; + map task_variables = 2; + map project_env = 3; } message Shell { - message WithArguments { - string program = 1; - repeated string args = 2; - } + message WithArguments { + string program = 1; + repeated string args = 2; + } - oneof shell_type { - System system = 1; - string program = 2; - WithArguments with_arguments = 3; - } + oneof shell_type { + System system = 1; + string program = 2; + WithArguments with_arguments = 3; + } } message System {} enum RevealStrategy { - RevealAlways = 0; - RevealNever = 1; + RevealAlways = 0; + RevealNever = 1; } enum HideStrategy { - HideAlways = 0; - HideNever = 1; - HideOnSuccess = 2; + HideAlways = 0; + HideNever = 1; + HideOnSuccess = 2; } message SpawnInTerminal { - string label = 1; - optional string command = 2; - repeated string args = 3; - map env = 4; - optional string cwd = 5; + string label = 1; + optional string command = 2; + repeated string args = 3; + map env = 4; + optional string cwd = 5; } message GetDirectoryEnvironment { - uint64 project_id = 1; - Shell shell = 2; - string directory = 3; + uint64 project_id = 1; + Shell shell = 2; + string directory = 3; } message DirectoryEnvironment { - map environment = 1; + map environment = 1; } diff --git a/crates/proto/proto/toolchain.proto b/crates/proto/proto/toolchain.proto index b190322ca0602078ea28d00fe970e4958fb17fb0..a91948148e64eb9eff7f1ca657dab203a9ca7f1f 100644 --- a/crates/proto/proto/toolchain.proto +++ b/crates/proto/proto/toolchain.proto @@ -2,58 +2,58 @@ syntax = "proto3"; package zed.messages; message ListToolchains { - uint64 project_id = 1; - uint64 worktree_id = 2; - string language_name = 3; - optional string path = 4; + uint64 project_id = 1; + uint64 worktree_id = 2; + string language_name = 3; + optional string path = 4; } message Toolchain { - string name = 1; - string path = 2; - string raw_json = 3; + string name = 1; + string path = 2; + string raw_json = 3; } message ToolchainGroup { - uint64 start_index = 1; - string name = 2; + uint64 start_index = 1; + string name = 2; } message ListToolchainsResponse { - repeated Toolchain toolchains = 1; - bool has_values = 2; - repeated ToolchainGroup groups = 3; - optional string relative_worktree_path = 4; + repeated Toolchain toolchains = 1; + bool has_values = 2; + repeated ToolchainGroup groups = 3; + optional string relative_worktree_path = 4; } message ActivateToolchain { - uint64 project_id = 1; - uint64 worktree_id = 2; - Toolchain toolchain = 3; - string language_name = 4; - optional string path = 5; + uint64 project_id = 1; + uint64 worktree_id = 2; + Toolchain toolchain = 3; + string language_name = 4; + optional string path = 5; } message ActiveToolchain { - uint64 project_id = 1; - uint64 worktree_id = 2; - string language_name = 3; - optional string path = 4; + uint64 project_id = 1; + uint64 worktree_id = 2; + string language_name = 3; + optional string path = 4; } message ActiveToolchainResponse { - optional Toolchain toolchain = 1; + optional Toolchain toolchain = 1; } message ResolveToolchain { - uint64 project_id = 1; - string abs_path = 2; - string language_name = 3; + uint64 project_id = 1; + string abs_path = 2; + string language_name = 3; } message ResolveToolchainResponse { - oneof response { - Toolchain toolchain = 1; - string error = 2; - } + oneof response { + Toolchain toolchain = 1; + string error = 2; + } } diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 387ed25027230c7e407983ff5c098ae24bbecc9e..c129b6eff26404b66b38439c29f0b83289b37172 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -18,493 +18,492 @@ import "toolchain.proto"; import "worktree.proto"; // Looking for a number? Search "// current max" - message Envelope { - uint32 id = 1; - optional uint32 responding_to = 2; - optional PeerId original_sender_id = 3; - optional uint32 ack_id = 266; - - oneof payload { - Hello hello = 4; - Ack ack = 5; - Error error = 6; - Ping ping = 7; - Test test = 8; - EndStream end_stream = 165; - - CreateRoom create_room = 9; - CreateRoomResponse create_room_response = 10; - JoinRoom join_room = 11; - JoinRoomResponse join_room_response = 12; - RejoinRoom rejoin_room = 13; - RejoinRoomResponse rejoin_room_response = 14; - LeaveRoom leave_room = 15; - Call call = 16; - IncomingCall incoming_call = 17; - CallCanceled call_canceled = 18; - CancelCall cancel_call = 19; - DeclineCall decline_call = 20; - UpdateParticipantLocation update_participant_location = 21; - RoomUpdated room_updated = 22; - - ShareProject share_project = 23; - ShareProjectResponse share_project_response = 24; - UnshareProject unshare_project = 25; - JoinProject join_project = 26; - JoinProjectResponse join_project_response = 27; - LeaveProject leave_project = 28; - AddProjectCollaborator add_project_collaborator = 29; - UpdateProjectCollaborator update_project_collaborator = 30; - RemoveProjectCollaborator remove_project_collaborator = 31; - - GetDefinition get_definition = 32; - GetDefinitionResponse get_definition_response = 33; - GetDeclaration get_declaration = 237; - GetDeclarationResponse get_declaration_response = 238; - GetTypeDefinition get_type_definition = 34; - GetTypeDefinitionResponse get_type_definition_response = 35; - - GetReferences get_references = 36; - GetReferencesResponse get_references_response = 37; - GetDocumentHighlights get_document_highlights = 38; - GetDocumentHighlightsResponse get_document_highlights_response = 39; - GetProjectSymbols get_project_symbols = 40; - GetProjectSymbolsResponse get_project_symbols_response = 41; - OpenBufferForSymbol open_buffer_for_symbol = 42; - OpenBufferForSymbolResponse open_buffer_for_symbol_response = 43; - - UpdateProject update_project = 44; - UpdateWorktree update_worktree = 45; - - CreateProjectEntry create_project_entry = 46; - RenameProjectEntry rename_project_entry = 47; - CopyProjectEntry copy_project_entry = 48; - DeleteProjectEntry delete_project_entry = 49; - ProjectEntryResponse project_entry_response = 50; - ExpandProjectEntry expand_project_entry = 51; - ExpandProjectEntryResponse expand_project_entry_response = 52; - ExpandAllForProjectEntry expand_all_for_project_entry = 291; - ExpandAllForProjectEntryResponse expand_all_for_project_entry_response = 292; - UpdateDiagnosticSummary update_diagnostic_summary = 53; - StartLanguageServer start_language_server = 54; - UpdateLanguageServer update_language_server = 55; - - OpenBufferById open_buffer_by_id = 56; - OpenBufferByPath open_buffer_by_path = 57; - OpenBufferResponse open_buffer_response = 58; - CreateBufferForPeer create_buffer_for_peer = 59; - UpdateBuffer update_buffer = 60; - UpdateBufferFile update_buffer_file = 61; - SaveBuffer save_buffer = 62; - BufferSaved buffer_saved = 63; - BufferReloaded buffer_reloaded = 64; - ReloadBuffers reload_buffers = 65; - ReloadBuffersResponse reload_buffers_response = 66; - SynchronizeBuffers synchronize_buffers = 67; - SynchronizeBuffersResponse synchronize_buffers_response = 68; - FormatBuffers format_buffers = 69; - FormatBuffersResponse format_buffers_response = 70; - GetCompletions get_completions = 71; - GetCompletionsResponse get_completions_response = 72; - ResolveCompletionDocumentation resolve_completion_documentation = 73; - ResolveCompletionDocumentationResponse resolve_completion_documentation_response = 74; - ApplyCompletionAdditionalEdits apply_completion_additional_edits = 75; - ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 76; - GetCodeActions get_code_actions = 77; - GetCodeActionsResponse get_code_actions_response = 78; - GetHover get_hover = 79; - GetHoverResponse get_hover_response = 80; - ApplyCodeAction apply_code_action = 81; - ApplyCodeActionResponse apply_code_action_response = 82; - PrepareRename prepare_rename = 83; - PrepareRenameResponse prepare_rename_response = 84; - PerformRename perform_rename = 85; - PerformRenameResponse perform_rename_response = 86; - - UpdateContacts update_contacts = 89; - ShowContacts show_contacts = 91; - - GetUsers get_users = 92; - FuzzySearchUsers fuzzy_search_users = 93; - UsersResponse users_response = 94; - RequestContact request_contact = 95; - RespondToContactRequest respond_to_contact_request = 96; - RemoveContact remove_contact = 97; - - Follow follow = 98; - FollowResponse follow_response = 99; - UpdateFollowers update_followers = 100; - Unfollow unfollow = 101; - UpdateDiffBases update_diff_bases = 104; - - OnTypeFormatting on_type_formatting = 105; - OnTypeFormattingResponse on_type_formatting_response = 106; - - UpdateWorktreeSettings update_worktree_settings = 107; - - InlayHints inlay_hints = 108; - InlayHintsResponse inlay_hints_response = 109; - ResolveInlayHint resolve_inlay_hint = 110; - ResolveInlayHintResponse resolve_inlay_hint_response = 111; - RefreshInlayHints refresh_inlay_hints = 112; - - CreateChannel create_channel = 113; - CreateChannelResponse create_channel_response = 114; - InviteChannelMember invite_channel_member = 115; - RemoveChannelMember remove_channel_member = 116; - RespondToChannelInvite respond_to_channel_invite = 117; - UpdateChannels update_channels = 118; - JoinChannel join_channel = 119; - DeleteChannel delete_channel = 120; - GetChannelMembers get_channel_members = 121; - GetChannelMembersResponse get_channel_members_response = 122; - SetChannelMemberRole set_channel_member_role = 123; - RenameChannel rename_channel = 124; - RenameChannelResponse rename_channel_response = 125; - SubscribeToChannels subscribe_to_channels = 207; - - JoinChannelBuffer join_channel_buffer = 126; - JoinChannelBufferResponse join_channel_buffer_response = 127; - UpdateChannelBuffer update_channel_buffer = 128; - LeaveChannelBuffer leave_channel_buffer = 129; - UpdateChannelBufferCollaborators update_channel_buffer_collaborators = 130; - RejoinChannelBuffers rejoin_channel_buffers = 131; - RejoinChannelBuffersResponse rejoin_channel_buffers_response = 132; - AckBufferOperation ack_buffer_operation = 133; - - JoinChannelChat join_channel_chat = 134; - JoinChannelChatResponse join_channel_chat_response = 135; - LeaveChannelChat leave_channel_chat = 136; - SendChannelMessage send_channel_message = 137; - SendChannelMessageResponse send_channel_message_response = 138; - ChannelMessageSent channel_message_sent = 139; - GetChannelMessages get_channel_messages = 140; - GetChannelMessagesResponse get_channel_messages_response = 141; - RemoveChannelMessage remove_channel_message = 142; - AckChannelMessage ack_channel_message = 143; - GetChannelMessagesById get_channel_messages_by_id = 144; - - MoveChannel move_channel = 147; - ReorderChannel reorder_channel = 349; - SetChannelVisibility set_channel_visibility = 148; - - AddNotification add_notification = 149; - GetNotifications get_notifications = 150; - GetNotificationsResponse get_notifications_response = 151; - DeleteNotification delete_notification = 152; - MarkNotificationRead mark_notification_read = 153; - LspExtExpandMacro lsp_ext_expand_macro = 154; - LspExtExpandMacroResponse lsp_ext_expand_macro_response = 155; - SetRoomParticipantRole set_room_participant_role = 156; - - UpdateUserChannels update_user_channels = 157; - - GetImplementation get_implementation = 162; - GetImplementationResponse get_implementation_response = 163; - - UpdateChannelMessage update_channel_message = 170; - ChannelMessageUpdate channel_message_update = 171; - - BlameBuffer blame_buffer = 172; - BlameBufferResponse blame_buffer_response = 173; - - UpdateNotification update_notification = 174; - - RestartLanguageServers restart_language_servers = 208; - - RejoinRemoteProjects rejoin_remote_projects = 186; - RejoinRemoteProjectsResponse rejoin_remote_projects_response = 187; + uint32 id = 1; + optional uint32 responding_to = 2; + optional PeerId original_sender_id = 3; + optional uint32 ack_id = 266; + + oneof payload { + Hello hello = 4; + Ack ack = 5; + Error error = 6; + Ping ping = 7; + Test test = 8; + EndStream end_stream = 165; + + CreateRoom create_room = 9; + CreateRoomResponse create_room_response = 10; + JoinRoom join_room = 11; + JoinRoomResponse join_room_response = 12; + RejoinRoom rejoin_room = 13; + RejoinRoomResponse rejoin_room_response = 14; + LeaveRoom leave_room = 15; + Call call = 16; + IncomingCall incoming_call = 17; + CallCanceled call_canceled = 18; + CancelCall cancel_call = 19; + DeclineCall decline_call = 20; + UpdateParticipantLocation update_participant_location = 21; + RoomUpdated room_updated = 22; + + ShareProject share_project = 23; + ShareProjectResponse share_project_response = 24; + UnshareProject unshare_project = 25; + JoinProject join_project = 26; + JoinProjectResponse join_project_response = 27; + LeaveProject leave_project = 28; + AddProjectCollaborator add_project_collaborator = 29; + UpdateProjectCollaborator update_project_collaborator = 30; + RemoveProjectCollaborator remove_project_collaborator = 31; + + GetDefinition get_definition = 32; + GetDefinitionResponse get_definition_response = 33; + GetDeclaration get_declaration = 237; + GetDeclarationResponse get_declaration_response = 238; + GetTypeDefinition get_type_definition = 34; + GetTypeDefinitionResponse get_type_definition_response = 35; + + GetReferences get_references = 36; + GetReferencesResponse get_references_response = 37; + GetDocumentHighlights get_document_highlights = 38; + GetDocumentHighlightsResponse get_document_highlights_response = 39; + GetProjectSymbols get_project_symbols = 40; + GetProjectSymbolsResponse get_project_symbols_response = 41; + OpenBufferForSymbol open_buffer_for_symbol = 42; + OpenBufferForSymbolResponse open_buffer_for_symbol_response = 43; + + UpdateProject update_project = 44; + UpdateWorktree update_worktree = 45; + + CreateProjectEntry create_project_entry = 46; + RenameProjectEntry rename_project_entry = 47; + CopyProjectEntry copy_project_entry = 48; + DeleteProjectEntry delete_project_entry = 49; + ProjectEntryResponse project_entry_response = 50; + ExpandProjectEntry expand_project_entry = 51; + ExpandProjectEntryResponse expand_project_entry_response = 52; + ExpandAllForProjectEntry expand_all_for_project_entry = 291; + ExpandAllForProjectEntryResponse expand_all_for_project_entry_response = 292; + UpdateDiagnosticSummary update_diagnostic_summary = 53; + StartLanguageServer start_language_server = 54; + UpdateLanguageServer update_language_server = 55; + + OpenBufferById open_buffer_by_id = 56; + OpenBufferByPath open_buffer_by_path = 57; + OpenBufferResponse open_buffer_response = 58; + CreateBufferForPeer create_buffer_for_peer = 59; + UpdateBuffer update_buffer = 60; + UpdateBufferFile update_buffer_file = 61; + SaveBuffer save_buffer = 62; + BufferSaved buffer_saved = 63; + BufferReloaded buffer_reloaded = 64; + ReloadBuffers reload_buffers = 65; + ReloadBuffersResponse reload_buffers_response = 66; + SynchronizeBuffers synchronize_buffers = 67; + SynchronizeBuffersResponse synchronize_buffers_response = 68; + FormatBuffers format_buffers = 69; + FormatBuffersResponse format_buffers_response = 70; + GetCompletions get_completions = 71; + GetCompletionsResponse get_completions_response = 72; + ResolveCompletionDocumentation resolve_completion_documentation = 73; + ResolveCompletionDocumentationResponse resolve_completion_documentation_response = 74; + ApplyCompletionAdditionalEdits apply_completion_additional_edits = 75; + ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 76; + GetCodeActions get_code_actions = 77; + GetCodeActionsResponse get_code_actions_response = 78; + GetHover get_hover = 79; + GetHoverResponse get_hover_response = 80; + ApplyCodeAction apply_code_action = 81; + ApplyCodeActionResponse apply_code_action_response = 82; + PrepareRename prepare_rename = 83; + PrepareRenameResponse prepare_rename_response = 84; + PerformRename perform_rename = 85; + PerformRenameResponse perform_rename_response = 86; + + UpdateContacts update_contacts = 89; + ShowContacts show_contacts = 91; + + GetUsers get_users = 92; + FuzzySearchUsers fuzzy_search_users = 93; + UsersResponse users_response = 94; + RequestContact request_contact = 95; + RespondToContactRequest respond_to_contact_request = 96; + RemoveContact remove_contact = 97; + + Follow follow = 98; + FollowResponse follow_response = 99; + UpdateFollowers update_followers = 100; + Unfollow unfollow = 101; + UpdateDiffBases update_diff_bases = 104; + + OnTypeFormatting on_type_formatting = 105; + OnTypeFormattingResponse on_type_formatting_response = 106; + + UpdateWorktreeSettings update_worktree_settings = 107; + + InlayHints inlay_hints = 108; + InlayHintsResponse inlay_hints_response = 109; + ResolveInlayHint resolve_inlay_hint = 110; + ResolveInlayHintResponse resolve_inlay_hint_response = 111; + RefreshInlayHints refresh_inlay_hints = 112; + + CreateChannel create_channel = 113; + CreateChannelResponse create_channel_response = 114; + InviteChannelMember invite_channel_member = 115; + RemoveChannelMember remove_channel_member = 116; + RespondToChannelInvite respond_to_channel_invite = 117; + UpdateChannels update_channels = 118; + JoinChannel join_channel = 119; + DeleteChannel delete_channel = 120; + GetChannelMembers get_channel_members = 121; + GetChannelMembersResponse get_channel_members_response = 122; + SetChannelMemberRole set_channel_member_role = 123; + RenameChannel rename_channel = 124; + RenameChannelResponse rename_channel_response = 125; + SubscribeToChannels subscribe_to_channels = 207; + + JoinChannelBuffer join_channel_buffer = 126; + JoinChannelBufferResponse join_channel_buffer_response = 127; + UpdateChannelBuffer update_channel_buffer = 128; + LeaveChannelBuffer leave_channel_buffer = 129; + UpdateChannelBufferCollaborators update_channel_buffer_collaborators = 130; + RejoinChannelBuffers rejoin_channel_buffers = 131; + RejoinChannelBuffersResponse rejoin_channel_buffers_response = 132; + AckBufferOperation ack_buffer_operation = 133; + + JoinChannelChat join_channel_chat = 134; + JoinChannelChatResponse join_channel_chat_response = 135; + LeaveChannelChat leave_channel_chat = 136; + SendChannelMessage send_channel_message = 137; + SendChannelMessageResponse send_channel_message_response = 138; + ChannelMessageSent channel_message_sent = 139; + GetChannelMessages get_channel_messages = 140; + GetChannelMessagesResponse get_channel_messages_response = 141; + RemoveChannelMessage remove_channel_message = 142; + AckChannelMessage ack_channel_message = 143; + GetChannelMessagesById get_channel_messages_by_id = 144; + + MoveChannel move_channel = 147; + ReorderChannel reorder_channel = 349; + SetChannelVisibility set_channel_visibility = 148; + + AddNotification add_notification = 149; + GetNotifications get_notifications = 150; + GetNotificationsResponse get_notifications_response = 151; + DeleteNotification delete_notification = 152; + MarkNotificationRead mark_notification_read = 153; + LspExtExpandMacro lsp_ext_expand_macro = 154; + LspExtExpandMacroResponse lsp_ext_expand_macro_response = 155; + SetRoomParticipantRole set_room_participant_role = 156; + + UpdateUserChannels update_user_channels = 157; + + GetImplementation get_implementation = 162; + GetImplementationResponse get_implementation_response = 163; + + UpdateChannelMessage update_channel_message = 170; + ChannelMessageUpdate channel_message_update = 171; + + BlameBuffer blame_buffer = 172; + BlameBufferResponse blame_buffer_response = 173; + + UpdateNotification update_notification = 174; + + RestartLanguageServers restart_language_servers = 208; + + RejoinRemoteProjects rejoin_remote_projects = 186; + RejoinRemoteProjectsResponse rejoin_remote_projects_response = 187; - OpenNewBuffer open_new_buffer = 196; + OpenNewBuffer open_new_buffer = 196; - TaskContextForLocation task_context_for_location = 203; - TaskContext task_context = 204; + TaskContextForLocation task_context_for_location = 203; + TaskContext task_context = 204; - LinkedEditingRange linked_editing_range = 209; - LinkedEditingRangeResponse linked_editing_range_response = 210; + LinkedEditingRange linked_editing_range = 209; + LinkedEditingRangeResponse linked_editing_range_response = 210; - AdvertiseContexts advertise_contexts = 211; - OpenContext open_context = 212; - OpenContextResponse open_context_response = 213; - CreateContext create_context = 232; - CreateContextResponse create_context_response = 233; - UpdateContext update_context = 214; - SynchronizeContexts synchronize_contexts = 215; - SynchronizeContextsResponse synchronize_contexts_response = 216; + AdvertiseContexts advertise_contexts = 211; + OpenContext open_context = 212; + OpenContextResponse open_context_response = 213; + CreateContext create_context = 232; + CreateContextResponse create_context_response = 233; + UpdateContext update_context = 214; + SynchronizeContexts synchronize_contexts = 215; + SynchronizeContextsResponse synchronize_contexts_response = 216; - GetSignatureHelp get_signature_help = 217; - GetSignatureHelpResponse get_signature_help_response = 218; + GetSignatureHelp get_signature_help = 217; + GetSignatureHelpResponse get_signature_help_response = 218; - ListRemoteDirectory list_remote_directory = 219; - ListRemoteDirectoryResponse list_remote_directory_response = 220; - AddWorktree add_worktree = 222; - AddWorktreeResponse add_worktree_response = 223; + ListRemoteDirectory list_remote_directory = 219; + ListRemoteDirectoryResponse list_remote_directory_response = 220; + AddWorktree add_worktree = 222; + AddWorktreeResponse add_worktree_response = 223; - LspExtSwitchSourceHeader lsp_ext_switch_source_header = 241; - LspExtSwitchSourceHeaderResponse lsp_ext_switch_source_header_response = 242; + LspExtSwitchSourceHeader lsp_ext_switch_source_header = 241; + LspExtSwitchSourceHeaderResponse lsp_ext_switch_source_header_response = 242; - FindSearchCandidates find_search_candidates = 243; + FindSearchCandidates find_search_candidates = 243; - CloseBuffer close_buffer = 245; + CloseBuffer close_buffer = 245; - ShutdownRemoteServer shutdown_remote_server = 257; + ShutdownRemoteServer shutdown_remote_server = 257; - RemoveWorktree remove_worktree = 258; + RemoveWorktree remove_worktree = 258; - LanguageServerLog language_server_log = 260; + LanguageServerLog language_server_log = 260; - Toast toast = 261; - HideToast hide_toast = 262; + Toast toast = 261; + HideToast hide_toast = 262; - OpenServerSettings open_server_settings = 263; + OpenServerSettings open_server_settings = 263; - GetPermalinkToLine get_permalink_to_line = 264; - GetPermalinkToLineResponse get_permalink_to_line_response = 265; + GetPermalinkToLine get_permalink_to_line = 264; + GetPermalinkToLineResponse get_permalink_to_line_response = 265; - FlushBufferedMessages flush_buffered_messages = 267; + FlushBufferedMessages flush_buffered_messages = 267; - LanguageServerPromptRequest language_server_prompt_request = 268; - LanguageServerPromptResponse language_server_prompt_response = 269; + LanguageServerPromptRequest language_server_prompt_request = 268; + LanguageServerPromptResponse language_server_prompt_response = 269; - GitBranchesResponse git_branches_response = 271; + GitBranchesResponse git_branches_response = 271; - UpdateGitBranch update_git_branch = 272; + UpdateGitBranch update_git_branch = 272; - ListToolchains list_toolchains = 273; - ListToolchainsResponse list_toolchains_response = 274; - ActivateToolchain activate_toolchain = 275; - ActiveToolchain active_toolchain = 276; - ActiveToolchainResponse active_toolchain_response = 277; + ListToolchains list_toolchains = 273; + ListToolchainsResponse list_toolchains_response = 274; + ActivateToolchain activate_toolchain = 275; + ActiveToolchain active_toolchain = 276; + ActiveToolchainResponse active_toolchain_response = 277; - GetPathMetadata get_path_metadata = 278; - GetPathMetadataResponse get_path_metadata_response = 279; + GetPathMetadata get_path_metadata = 278; + GetPathMetadataResponse get_path_metadata_response = 279; - CancelLanguageServerWork cancel_language_server_work = 282; + CancelLanguageServerWork cancel_language_server_work = 282; - LspExtOpenDocs lsp_ext_open_docs = 283; - LspExtOpenDocsResponse lsp_ext_open_docs_response = 284; + LspExtOpenDocs lsp_ext_open_docs = 283; + LspExtOpenDocsResponse lsp_ext_open_docs_response = 284; - SyncExtensions sync_extensions = 285; - SyncExtensionsResponse sync_extensions_response = 286; - InstallExtension install_extension = 287; + SyncExtensions sync_extensions = 285; + SyncExtensionsResponse sync_extensions_response = 286; + InstallExtension install_extension = 287; - OpenUnstagedDiff open_unstaged_diff = 288; - OpenUnstagedDiffResponse open_unstaged_diff_response = 289; + OpenUnstagedDiff open_unstaged_diff = 288; + OpenUnstagedDiffResponse open_unstaged_diff_response = 289; - RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290; + RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290; - Stage stage = 293; - Unstage unstage = 294; - Commit commit = 295; - OpenCommitMessageBuffer open_commit_message_buffer = 296; + Stage stage = 293; + Unstage unstage = 294; + Commit commit = 295; + OpenCommitMessageBuffer open_commit_message_buffer = 296; - OpenUncommittedDiff open_uncommitted_diff = 297; - OpenUncommittedDiffResponse open_uncommitted_diff_response = 298; + OpenUncommittedDiff open_uncommitted_diff = 297; + OpenUncommittedDiffResponse open_uncommitted_diff_response = 298; - SetIndexText set_index_text = 299; + SetIndexText set_index_text = 299; - GitShow git_show = 300; - GitReset git_reset = 301; - GitCommitDetails git_commit_details = 302; - GitCheckoutFiles git_checkout_files = 303; + GitShow git_show = 300; + GitReset git_reset = 301; + GitCommitDetails git_commit_details = 302; + GitCheckoutFiles git_checkout_files = 303; - Push push = 304; - Fetch fetch = 305; - GetRemotes get_remotes = 306; - GetRemotesResponse get_remotes_response = 307; - Pull pull = 308; + Push push = 304; + Fetch fetch = 305; + GetRemotes get_remotes = 306; + GetRemotesResponse get_remotes_response = 307; + Pull pull = 308; - ApplyCodeActionKind apply_code_action_kind = 309; - ApplyCodeActionKindResponse apply_code_action_kind_response = 310; + ApplyCodeActionKind apply_code_action_kind = 309; + ApplyCodeActionKindResponse apply_code_action_kind_response = 310; - RemoteMessageResponse remote_message_response = 311; + RemoteMessageResponse remote_message_response = 311; - GitGetBranches git_get_branches = 312; - GitCreateBranch git_create_branch = 313; - GitChangeBranch git_change_branch = 314; + GitGetBranches git_get_branches = 312; + GitCreateBranch git_create_branch = 313; + GitChangeBranch git_change_branch = 314; - CheckForPushedCommits check_for_pushed_commits = 315; - CheckForPushedCommitsResponse check_for_pushed_commits_response = 316; + CheckForPushedCommits check_for_pushed_commits = 315; + CheckForPushedCommitsResponse check_for_pushed_commits_response = 316; - AskPassRequest ask_pass_request = 317; - AskPassResponse ask_pass_response = 318; + AskPassRequest ask_pass_request = 317; + AskPassResponse ask_pass_response = 318; - GitDiff git_diff = 319; - GitDiffResponse git_diff_response = 320; - GitInit git_init = 321; + GitDiff git_diff = 319; + GitDiffResponse git_diff_response = 320; + GitInit git_init = 321; - CodeLens code_lens = 322; - GetCodeLens get_code_lens = 323; - GetCodeLensResponse get_code_lens_response = 324; - RefreshCodeLens refresh_code_lens = 325; + CodeLens code_lens = 322; + GetCodeLens get_code_lens = 323; + GetCodeLensResponse get_code_lens_response = 324; + RefreshCodeLens refresh_code_lens = 325; - ToggleBreakpoint toggle_breakpoint = 326; - BreakpointsForFile breakpoints_for_file = 327; + ToggleBreakpoint toggle_breakpoint = 326; + BreakpointsForFile breakpoints_for_file = 327; - UpdateRepository update_repository = 328; - RemoveRepository remove_repository = 329; + UpdateRepository update_repository = 328; + RemoveRepository remove_repository = 329; - GetDocumentSymbols get_document_symbols = 330; - GetDocumentSymbolsResponse get_document_symbols_response = 331; + GetDocumentSymbols get_document_symbols = 330; + GetDocumentSymbolsResponse get_document_symbols_response = 331; - LoadCommitDiff load_commit_diff = 334; - LoadCommitDiffResponse load_commit_diff_response = 335; + LoadCommitDiff load_commit_diff = 334; + LoadCommitDiffResponse load_commit_diff_response = 335; - StopLanguageServers stop_language_servers = 336; + StopLanguageServers stop_language_servers = 336; - LspExtRunnables lsp_ext_runnables = 337; - LspExtRunnablesResponse lsp_ext_runnables_response = 338; + LspExtRunnables lsp_ext_runnables = 337; + LspExtRunnablesResponse lsp_ext_runnables_response = 338; - GetDebugAdapterBinary get_debug_adapter_binary = 339; - DebugAdapterBinary debug_adapter_binary = 340; - RunDebugLocators run_debug_locators = 341; - DebugRequest debug_request = 342; + GetDebugAdapterBinary get_debug_adapter_binary = 339; + DebugAdapterBinary debug_adapter_binary = 340; + RunDebugLocators run_debug_locators = 341; + DebugRequest debug_request = 342; - LspExtGoToParentModule lsp_ext_go_to_parent_module = 343; - LspExtGoToParentModuleResponse lsp_ext_go_to_parent_module_response = 344; - LspExtCancelFlycheck lsp_ext_cancel_flycheck = 345; - LspExtRunFlycheck lsp_ext_run_flycheck = 346; - LspExtClearFlycheck lsp_ext_clear_flycheck = 347; + LspExtGoToParentModule lsp_ext_go_to_parent_module = 343; + LspExtGoToParentModuleResponse lsp_ext_go_to_parent_module_response = 344; + LspExtCancelFlycheck lsp_ext_cancel_flycheck = 345; + LspExtRunFlycheck lsp_ext_run_flycheck = 346; + LspExtClearFlycheck lsp_ext_clear_flycheck = 347; - LogToDebugConsole log_to_debug_console = 348; + LogToDebugConsole log_to_debug_console = 348; - GetDocumentDiagnostics get_document_diagnostics = 350; - GetDocumentDiagnosticsResponse get_document_diagnostics_response = 351; - PullWorkspaceDiagnostics pull_workspace_diagnostics = 352; + GetDocumentDiagnostics get_document_diagnostics = 350; + GetDocumentDiagnosticsResponse get_document_diagnostics_response = 351; + PullWorkspaceDiagnostics pull_workspace_diagnostics = 352; - GetDocumentColor get_document_color = 353; - GetDocumentColorResponse get_document_color_response = 354; - GetColorPresentation get_color_presentation = 355; - GetColorPresentationResponse get_color_presentation_response = 356; + GetDocumentColor get_document_color = 353; + GetDocumentColorResponse get_document_color_response = 354; + GetColorPresentation get_color_presentation = 355; + GetColorPresentationResponse get_color_presentation_response = 356; - Stash stash = 357; - StashPop stash_pop = 358; + Stash stash = 357; + StashPop stash_pop = 358; - GetDefaultBranch get_default_branch = 359; - GetDefaultBranchResponse get_default_branch_response = 360; + GetDefaultBranch get_default_branch = 359; + GetDefaultBranchResponse get_default_branch_response = 360; - GetCrashFiles get_crash_files = 361; - GetCrashFilesResponse get_crash_files_response = 362; + GetCrashFiles get_crash_files = 361; + GetCrashFilesResponse get_crash_files_response = 362; - GitClone git_clone = 363; - GitCloneResponse git_clone_response = 364; + GitClone git_clone = 363; + GitCloneResponse git_clone_response = 364; - LspQuery lsp_query = 365; - LspQueryResponse lsp_query_response = 366; - ToggleLspLogs toggle_lsp_logs = 367; + LspQuery lsp_query = 365; + LspQueryResponse lsp_query_response = 366; + ToggleLspLogs toggle_lsp_logs = 367; - UpdateUserSettings update_user_settings = 368; + UpdateUserSettings update_user_settings = 368; - GetProcesses get_processes = 369; - GetProcessesResponse get_processes_response = 370; + GetProcesses get_processes = 369; + GetProcessesResponse get_processes_response = 370; - ResolveToolchain resolve_toolchain = 371; - ResolveToolchainResponse resolve_toolchain_response = 372; + ResolveToolchain resolve_toolchain = 371; + ResolveToolchainResponse resolve_toolchain_response = 372; - GetAgentServerCommand get_agent_server_command = 373; - AgentServerCommand agent_server_command = 374; + GetAgentServerCommand get_agent_server_command = 373; + AgentServerCommand agent_server_command = 374; - ExternalAgentsUpdated external_agents_updated = 375; - ExternalAgentLoadingStatusUpdated external_agent_loading_status_updated = 376; - NewExternalAgentVersionAvailable new_external_agent_version_available = 377; + ExternalAgentsUpdated external_agents_updated = 375; + ExternalAgentLoadingStatusUpdated external_agent_loading_status_updated = 376; + NewExternalAgentVersionAvailable new_external_agent_version_available = 377; - StashDrop stash_drop = 378; - StashApply stash_apply = 379; + StashDrop stash_drop = 378; + StashApply stash_apply = 379; - GitRenameBranch git_rename_branch = 380; + GitRenameBranch git_rename_branch = 380; - RemoteStarted remote_started = 381; + RemoteStarted remote_started = 381; - GetDirectoryEnvironment get_directory_environment = 382; - DirectoryEnvironment directory_environment = 383; + GetDirectoryEnvironment get_directory_environment = 382; + DirectoryEnvironment directory_environment = 383; - GetTreeDiff get_tree_diff = 384; - GetTreeDiffResponse get_tree_diff_response = 385; + GetTreeDiff get_tree_diff = 384; + GetTreeDiffResponse get_tree_diff_response = 385; - GetBlobContent get_blob_content = 386; - GetBlobContentResponse get_blob_content_response = 387; + GetBlobContent get_blob_content = 386; + GetBlobContentResponse get_blob_content_response = 387; - GitWorktreesResponse git_worktrees_response = 388; - GitGetWorktrees git_get_worktrees = 389; - GitCreateWorktree git_create_worktree = 390; + GitWorktreesResponse git_worktrees_response = 388; + GitGetWorktrees git_get_worktrees = 389; + GitCreateWorktree git_create_worktree = 390; - OpenImageByPath open_image_by_path = 391; - OpenImageResponse open_image_response = 392; - CreateImageForPeer create_image_for_peer = 393; + OpenImageByPath open_image_by_path = 391; + OpenImageResponse open_image_response = 392; + CreateImageForPeer create_image_for_peer = 393; + GitFileHistory git_file_history = 397; + GitFileHistoryResponse git_file_history_response = 398; - GitFileHistory git_file_history = 397; - GitFileHistoryResponse git_file_history_response = 398; + RunGitHook run_git_hook = 399; - RunGitHook run_git_hook = 399; + GitDeleteBranch git_delete_branch = 400; - GitDeleteBranch git_delete_branch = 400; + ExternalExtensionAgentsUpdated external_extension_agents_updated = 401; - ExternalExtensionAgentsUpdated external_extension_agents_updated = 401; + GitCreateRemote git_create_remote = 402; + GitRemoveRemote git_remove_remote = 403; - GitCreateRemote git_create_remote = 402; - GitRemoveRemote git_remove_remote = 403; + TrustWorktrees trust_worktrees = 404; + RestrictWorktrees restrict_worktrees = 405; - TrustWorktrees trust_worktrees = 404; - RestrictWorktrees restrict_worktrees = 405; + ShareAgentThread share_agent_thread = 406; + GetSharedAgentThread get_shared_agent_thread = 407; + GetSharedAgentThreadResponse get_shared_agent_thread_response = 408; - ShareAgentThread share_agent_thread = 406; - GetSharedAgentThread get_shared_agent_thread = 407; - GetSharedAgentThreadResponse get_shared_agent_thread_response = 408; + FindSearchCandidatesChunk find_search_candidates_chunk = 409; + FindSearchCandidatesCancelled find_search_candidates_cancelled = 410; + GetContextServerCommand get_context_server_command = 411; + ContextServerCommand context_server_command = 412; - FindSearchCandidatesChunk find_search_candidates_chunk = 409; - FindSearchCandidatesCancelled find_search_candidates_cancelled = 410; - GetContextServerCommand get_context_server_command = 411; - ContextServerCommand context_server_command = 412; + AllocateWorktreeId allocate_worktree_id = 413; + AllocateWorktreeIdResponse allocate_worktree_id_response = 414; - AllocateWorktreeId allocate_worktree_id = 413; - AllocateWorktreeIdResponse allocate_worktree_id_response = 414; + DownloadFileByPath download_file_by_path = 415; + DownloadFileResponse download_file_response = 416; + CreateFileForPeer create_file_for_peer = 417; - DownloadFileByPath download_file_by_path = 415; - DownloadFileResponse download_file_response = 416; - CreateFileForPeer create_file_for_peer = 417; + SemanticTokens semantic_tokens = 418; + SemanticTokensResponse semantic_tokens_response = 419; + RefreshSemanticTokens refresh_semantic_tokens = 420; + GetFoldingRanges get_folding_ranges = 421; + GetFoldingRangesResponse get_folding_ranges_response = 422; - SemanticTokens semantic_tokens = 418; - SemanticTokensResponse semantic_tokens_response = 419; - RefreshSemanticTokens refresh_semantic_tokens = 420; - GetFoldingRanges get_folding_ranges = 421; - GetFoldingRangesResponse get_folding_ranges_response = 422; + GetRemoteProfilingData get_remote_profiling_data = 423; + GetRemoteProfilingDataResponse get_remote_profiling_data_response = 424; - GetRemoteProfilingData get_remote_profiling_data = 423; - GetRemoteProfilingDataResponse get_remote_profiling_data_response = 424; - - SpawnKernel spawn_kernel = 426; - SpawnKernelResponse spawn_kernel_response = 427; - KillKernel kill_kernel = 428; // current max - } + SpawnKernel spawn_kernel = 426; + SpawnKernelResponse spawn_kernel_response = 427; + KillKernel kill_kernel = 428; // current max + } - reserved 87 to 88; - reserved 90; - reserved 102 to 103; - reserved 158 to 161; - reserved 164; - reserved 166 to 169; - reserved 175 to 185; - reserved 188 to 195; - reserved 197; - reserved 198 to 202; - reserved 205 to 206; - reserved 221; - reserved 224 to 231; - reserved 234 to 236; - reserved 239 to 240; - reserved 244; - reserved 246 to 256; - reserved 259; - reserved 270; - reserved 280 to 281; - reserved 332 to 333; - reserved 394 to 396; + reserved 87 to 88; + reserved 90; + reserved 102 to 103; + reserved 158 to 161; + reserved 164; + reserved 166 to 169; + reserved 175 to 185; + reserved 188 to 195; + reserved 197; + reserved 198 to 202; + reserved 205 to 206; + reserved 221; + reserved 224 to 231; + reserved 234 to 236; + reserved 239 to 240; + reserved 244; + reserved 246 to 256; + reserved 259; + reserved 270; + reserved 280 to 281; + reserved 332 to 333; + reserved 394 to 396; + reserved 429 to 430; } message Hello { - PeerId peer_id = 1; + PeerId peer_id = 1; } message Ping {} @@ -512,37 +511,37 @@ message Ping {} message Ack {} message Error { - string message = 1; - ErrorCode code = 2; - repeated string tags = 3; + string message = 1; + ErrorCode code = 2; + repeated string tags = 3; } enum ErrorCode { - Internal = 0; - NoSuchChannel = 1; - Disconnected = 2; - SignedOut = 3; - UpgradeRequired = 4; - Forbidden = 5; - NeedsCla = 7; - NotARootChannel = 8; - BadPublicNesting = 9; - CircularNesting = 10; - WrongMoveTarget = 11; - UnsharedItem = 12; - NoSuchProject = 13; - DevServerProjectPathDoesNotExist = 16; - RemoteUpgradeRequired = 17; - RateLimitExceeded = 18; - CommitFailed = 19; - reserved 6; - reserved 14 to 15; + Internal = 0; + NoSuchChannel = 1; + Disconnected = 2; + SignedOut = 3; + UpgradeRequired = 4; + Forbidden = 5; + NeedsCla = 7; + NotARootChannel = 8; + BadPublicNesting = 9; + CircularNesting = 10; + WrongMoveTarget = 11; + UnsharedItem = 12; + NoSuchProject = 13; + DevServerProjectPathDoesNotExist = 16; + RemoteUpgradeRequired = 17; + RateLimitExceeded = 18; + CommitFailed = 19; + reserved 6; + reserved 14 to 15; } message EndStream {} message Test { - uint64 id = 1; + uint64 id = 1; } message FlushBufferedMessages {} @@ -552,19 +551,19 @@ message FlushBufferedMessagesResponse {} message RemoteStarted {} message SpawnKernel { - string kernel_name = 1; - string working_directory = 2; - uint64 project_id = 3; - string command = 4; - repeated string args = 5; + string kernel_name = 1; + string working_directory = 2; + uint64 project_id = 3; + string command = 4; + repeated string args = 5; } message SpawnKernelResponse { - string kernel_id = 1; - string connection_file = 2; + string kernel_id = 1; + string connection_file = 2; } message KillKernel { - string kernel_id = 1; - uint64 project_id = 2; + string kernel_id = 1; + uint64 project_id = 2; } diff --git a/crates/proto/src/error.rs b/crates/proto/src/error.rs index d83b0fc499ba9dddb1d6417307fea9eaed9fdfd7..f551e8c3fc4d7023f5d9d43c3dc6eb51ffe2bb46 100644 --- a/crates/proto/src/error.rs +++ b/crates/proto/src/error.rs @@ -159,6 +159,12 @@ pub struct RpcError { /// in the app; however it is useful for chaining .message() and .with_tag() on /// ErrorCode. impl RpcError { + /// Returns the raw server-provided error message without any RPC framing + /// (e.g. without the "RPC request X failed: " prefix that `Display` adds). + pub fn raw_message(&self) -> &str { + &self.msg + } + /// from_proto converts a crate::Error into an anyhow::Error containing /// an RpcError. pub fn from_proto(error: &crate::Error, request: &str) -> anyhow::Error { diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 110a702437d463d6f296510c8f4a3a68d28d7d60..548e08eccb49c19551984e6acdd086d78927d614 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -750,12 +750,7 @@ impl PickerDelegate for RecentProjectsDelegate { self.selected_index = ix; } - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { matches!( self.filtered_entries.get(ix), Some(ProjectPickerEntry::OpenFolder { .. } | ProjectPickerEntry::RecentProject(_)) @@ -1258,17 +1253,16 @@ impl PickerDelegate for RecentProjectsDelegate { .gap_1() .border_t_1() .border_color(cx.theme().colors().border_variant) - .child( + .child({ + let open_action = workspace::Open { + create_new_window: self.create_new_window, + }; Button::new("open_local_folder", "Open Local Project") - .key_binding(KeyBinding::for_action_in( - &workspace::Open, - &focus_handle, - cx, - )) - .on_click(|_, window, cx| { - window.dispatch_action(workspace::Open.boxed_clone(), cx) - }), - ) + .key_binding(KeyBinding::for_action_in(&open_action, &focus_handle, cx)) + .on_click(move |_, window, cx| { + window.dispatch_action(open_action.boxed_clone(), cx) + }) + }) .child( Button::new("open_remote_folder", "Open Remote Project") .key_binding(KeyBinding::for_action( @@ -1359,6 +1353,7 @@ impl PickerDelegate for RecentProjectsDelegate { ) .menu({ let focus_handle = focus_handle.clone(); + let create_new_window = self.create_new_window; move |window, cx| { Some(ContextMenu::build(window, cx, { @@ -1367,7 +1362,7 @@ impl PickerDelegate for RecentProjectsDelegate { menu.context(focus_handle) .action( "Open Local Project", - workspace::Open.boxed_clone(), + workspace::Open { create_new_window }.boxed_clone(), ) .action( "Open Remote Project", diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 8bddcf37270e56932e75635fcd35616d12309b6e..a94f7b1d57eaef8657fb0d448480f84c97ce7e70 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -1161,12 +1161,11 @@ impl RemoteServerProjects { workspace.toggle_modal(window, cx, |window, cx| { RemoteConnectionModal::new(&connection_options, Vec::new(), window, cx) }); - let prompt = workspace - .active_modal::(cx) - .unwrap() - .read(cx) - .prompt - .clone(); + // can be None if another copy of this modal opened in the meantime + let Some(modal) = workspace.active_modal::(cx) else { + return; + }; + let prompt = modal.read(cx).prompt.clone(); let connect = connect( ConnectionIdentifier::setup(), @@ -1849,6 +1848,7 @@ impl RemoteServerProjects { ) { let replace_window = window.window_handle().downcast::(); + let app_state = Arc::downgrade(&app_state); cx.spawn_in(window, async move |entity, cx| { let (connection, starting_dir) = match start_dev_container_with_config(context, config).await { @@ -1882,6 +1882,9 @@ impl RemoteServerProjects { }) .log_err(); + let Some(app_state) = app_state.upgrade() else { + return; + }; let result = open_remote_project( connection.into(), vec![starting_dir].into_iter().map(PathBuf::from).collect(), diff --git a/crates/remote/Cargo.toml b/crates/remote/Cargo.toml index 50026904a8f1ae9bf1954b8c41383487f59a001b..c08561954ebc0ba47a7bf1ab58092275161679a0 100644 --- a/crates/remote/Cargo.toml +++ b/crates/remote/Cargo.toml @@ -48,3 +48,4 @@ which.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } +util = { workspace = true, features = ["test-support"] } diff --git a/crates/remote/src/transport.rs b/crates/remote/src/transport.rs index 09bb22ddbe2b303b767255fd7ab02b54d9b17b2f..8d0f212cfc4f9544d0a827a41aefc3a8af07ee72 100644 --- a/crates/remote/src/transport.rs +++ b/crates/remote/src/transport.rs @@ -1,3 +1,5 @@ +use std::io::Write; + use crate::{ RemoteArch, RemoteOs, RemotePlatform, json_log::LogRecord, @@ -137,7 +139,12 @@ fn handle_rpc_messages_over_child_process_stdio( if let Ok(record) = serde_json::from_slice::(content) { record.log(log::logger()) } else { - eprintln!("(remote) {}", String::from_utf8_lossy(content)); + std::io::stderr() + .write_fmt(format_args!( + "(remote) {}\n", + String::from_utf8_lossy(content) + )) + .ok(); } } stderr_buffer.drain(0..start_ix); diff --git a/crates/remote/src/transport/docker.rs b/crates/remote/src/transport/docker.rs index 1bcf80880ab17ddea63bd56fb54acfddc48db2dd..74076b58e35bd1ea7759927bad255925e7f7d9b9 100644 --- a/crates/remote/src/transport/docker.rs +++ b/crates/remote/src/transport/docker.rs @@ -635,7 +635,7 @@ impl RemoteConnection for DockerExecConnection { for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] { if let Some(value) = std::env::var(env_var).ok() { docker_args.push("-e".to_string()); - docker_args.push(format!("{}='{}'", env_var, value)); + docker_args.push(format!("{env_var}={value}")); } } diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index 83733306e7a1c96209f12158263f719c22abf54c..42cfc8f86dc34712e6b2cd0e4b5d8f379e443834 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -94,6 +94,14 @@ impl Default for SshConnectionHost { } } +fn bracket_ipv6(host: &str) -> String { + if host.contains(':') && !host.starts_with('[') { + format!("[{}]", host) + } else { + host.to_string() + } +} + #[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] pub struct SshConnectionOptions { pub host: SshConnectionHost, @@ -344,7 +352,12 @@ impl RemoteConnection for SshRemoteConnection { args.push("-N".into()); for (local_port, host, remote_port) in forwards { args.push("-L".into()); - args.push(format!("{local_port}:{host}:{remote_port}")); + args.push(format!( + "{}:{}:{}", + local_port, + bracket_ipv6(&host), + remote_port + )); } args.push(socket.connection_options.ssh_destination()); Ok(CommandTemplate { @@ -450,7 +463,7 @@ impl RemoteConnection for SshRemoteConnection { let mut proxy_args = vec![]; for env_var in VARS { if let Some(value) = std::env::var(env_var).ok() { - proxy_args.push(format!("{}='{}'", env_var, value)); + proxy_args.push(format!("{env_var}={value}")); } } proxy_args.push(remote_binary_path.display(self.path_style()).into_owned()); @@ -1342,33 +1355,71 @@ fn parse_port_number(port_str: &str) -> Result { .with_context(|| format!("parsing port number: {port_str}")) } +fn split_port_forward_tokens(spec: &str) -> Result> { + let mut tokens = Vec::new(); + let mut chars = spec.chars().peekable(); + + while chars.peek().is_some() { + if chars.peek() == Some(&'[') { + chars.next(); + let mut bracket_content = String::new(); + loop { + match chars.next() { + Some(']') => break, + Some(ch) => bracket_content.push(ch), + None => anyhow::bail!("Unmatched '[' in port forward spec: {spec}"), + } + } + tokens.push(bracket_content); + if chars.peek() == Some(&':') { + chars.next(); + } + } else { + let mut token = String::new(); + for ch in chars.by_ref() { + if ch == ':' { + break; + } + token.push(ch); + } + tokens.push(token); + } + } + + Ok(tokens) +} + fn parse_port_forward_spec(spec: &str) -> Result { - let parts: Vec<&str> = spec.split(':').collect(); + let tokens = if spec.contains('[') { + split_port_forward_tokens(spec)? + } else { + spec.split(':').map(String::from).collect() + }; - match *parts { - [a, b, c, d] => { - let local_port = parse_port_number(b)?; - let remote_port = parse_port_number(d)?; + match tokens.len() { + 4 => { + let local_port = parse_port_number(&tokens[1])?; + let remote_port = parse_port_number(&tokens[3])?; Ok(SshPortForwardOption { - local_host: Some(a.to_string()), + local_host: Some(tokens[0].clone()), local_port, - remote_host: Some(c.to_string()), + remote_host: Some(tokens[2].clone()), remote_port, }) } - [a, b, c] => { - let local_port = parse_port_number(a)?; - let remote_port = parse_port_number(c)?; + 3 => { + let local_port = parse_port_number(&tokens[0])?; + let remote_port = parse_port_number(&tokens[2])?; Ok(SshPortForwardOption { local_host: None, local_port, - remote_host: Some(b.to_string()), + remote_host: Some(tokens[1].clone()), remote_port, }) } - _ => anyhow::bail!("Invalid port forward format"), + _ => anyhow::bail!("Invalid port forward format: {spec}"), } } @@ -1534,7 +1585,10 @@ impl SshConnectionOptions { format!( "-L{}:{}:{}:{}", - local_host, pf.local_port, remote_host, pf.remote_port + bracket_ipv6(local_host), + pf.local_port, + bracket_ipv6(remote_host), + pf.remote_port ) })); } @@ -1612,12 +1666,11 @@ fn build_command_posix( write!(exec, "exec env ")?; for (k, v) in input_env.iter() { - write!( - exec, - "{}={} ", - k, - ssh_shell_kind.try_quote(v).context("shell quoting")? - )?; + let assignment = format!("{k}={v}"); + let assignment = ssh_shell_kind + .try_quote(&assignment) + .context("shell quoting")?; + write!(exec, "{assignment} ")?; } if let Some(input_program) = input_program { @@ -1641,7 +1694,12 @@ fn build_command_posix( if let Some((local_port, host, remote_port)) = port_forward { args.push("-L".into()); - args.push(format!("{local_port}:{host}:{remote_port}")); + args.push(format!( + "{}:{}:{}", + local_port, + bracket_ipv6(&host), + remote_port + )); } // -q suppresses the "Connection to ... closed." message that SSH prints when @@ -1731,7 +1789,12 @@ fn build_command_windows( if let Some((local_port, host, remote_port)) = port_forward { args.push("-L".into()); - args.push(format!("{local_port}:{host}:{remote_port}")); + args.push(format!( + "{}:{}:{}", + local_port, + bracket_ipv6(&host), + remote_port + )); } // -q suppresses the "Connection to ... closed." message that SSH prints when @@ -1818,7 +1881,7 @@ mod tests { "-q", "-t", "user@host", - "cd \"$HOME/work\" && exec env INPUT_VA=val remote_program arg1 arg2" + "cd \"$HOME/work\" && exec env 'INPUT_VA=val' remote_program arg1 arg2" ] ); assert_eq!(command.env, env); @@ -1854,7 +1917,7 @@ mod tests { "-q", "-t", "user@host", - "cd && exec env INPUT_VA=val /bin/fish -l" + "cd && exec env 'INPUT_VA=val' /bin/fish -l" ] ); assert_eq!(command.env, env); @@ -1862,6 +1925,38 @@ mod tests { Ok(()) } + #[test] + fn test_build_command_quotes_env_assignment() -> Result<()> { + let mut input_env = HashMap::default(); + input_env.insert("ZED$(echo foo)".to_string(), "value".to_string()); + + let command = build_command_posix( + Some("remote_program".to_string()), + &[], + &input_env, + None, + None, + HashMap::default(), + PathStyle::Posix, + "/bin/bash", + ShellKind::Posix, + vec![], + "user@host", + Interactive::No, + )?; + + let remote_command = command + .args + .last() + .context("missing remote command argument")?; + assert!( + remote_command.contains("exec env 'ZED$(echo foo)=value' remote_program"), + "expected env assignment to be quoted, got: {remote_command}" + ); + + Ok(()) + } + #[test] fn scp_args_exclude_port_forward_flags() { let options = SshConnectionOptions { @@ -1938,4 +2033,79 @@ mod tests { Ok(()) } + + #[test] + fn test_parse_port_forward_spec_ipv6() -> Result<()> { + let pf = parse_port_forward_spec("[::1]:8080:[::1]:80")?; + assert_eq!(pf.local_host, Some("::1".to_string())); + assert_eq!(pf.local_port, 8080); + assert_eq!(pf.remote_host, Some("::1".to_string())); + assert_eq!(pf.remote_port, 80); + + let pf = parse_port_forward_spec("8080:[::1]:80")?; + assert_eq!(pf.local_host, None); + assert_eq!(pf.local_port, 8080); + assert_eq!(pf.remote_host, Some("::1".to_string())); + assert_eq!(pf.remote_port, 80); + + let pf = parse_port_forward_spec("[2001:db8::1]:3000:[fe80::1]:4000")?; + assert_eq!(pf.local_host, Some("2001:db8::1".to_string())); + assert_eq!(pf.local_port, 3000); + assert_eq!(pf.remote_host, Some("fe80::1".to_string())); + assert_eq!(pf.remote_port, 4000); + + let pf = parse_port_forward_spec("127.0.0.1:8080:localhost:80")?; + assert_eq!(pf.local_host, Some("127.0.0.1".to_string())); + assert_eq!(pf.local_port, 8080); + assert_eq!(pf.remote_host, Some("localhost".to_string())); + assert_eq!(pf.remote_port, 80); + + Ok(()) + } + + #[test] + fn test_port_forward_ipv6_formatting() { + let options = SshConnectionOptions { + host: "example.com".into(), + port_forwards: Some(vec![SshPortForwardOption { + local_host: Some("::1".to_string()), + local_port: 8080, + remote_host: Some("::1".to_string()), + remote_port: 80, + }]), + ..Default::default() + }; + + let args = options.additional_args(); + assert!( + args.iter().any(|arg| arg == "-L[::1]:8080:[::1]:80"), + "expected bracketed IPv6 in -L flag: {args:?}" + ); + } + + #[test] + fn test_build_command_with_ipv6_port_forward() -> Result<()> { + let command = build_command_posix( + None, + &[], + &HashMap::default(), + None, + Some((8080, "::1".to_owned(), 80)), + HashMap::default(), + PathStyle::Posix, + "/bin/bash", + ShellKind::Posix, + vec![], + "user@host", + Interactive::No, + )?; + + assert!( + command.args.iter().any(|arg| arg == "8080:[::1]:80"), + "expected bracketed IPv6 in port forward arg: {:?}", + command.args + ); + + Ok(()) + } } diff --git a/crates/remote/src/transport/wsl.rs b/crates/remote/src/transport/wsl.rs index 2eb2aea59abdbe24a3dae168d4399aaa59a9c6e3..5a37e1c65bfe11221b60499779c57f0ce7dca364 100644 --- a/crates/remote/src/transport/wsl.rs +++ b/crates/remote/src/transport/wsl.rs @@ -450,13 +450,10 @@ impl RemoteConnection for WslRemoteConnection { let mut exec = String::from("exec env "); - for (k, v) in env.iter() { - write!( - exec, - "{}={} ", - k, - shell_kind.try_quote(v).context("shell quoting")? - )?; + for (key, value) in env.iter() { + let assignment = format!("{key}={value}"); + let assignment = shell_kind.try_quote(&assignment).context("shell quoting")?; + write!(exec, "{assignment} ")?; } if let Some(program) = program { diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index d1430831eef5feb716eab098593573a17336cbb9..ee729a80eaa9eff56eee7f3bcb8fe6eaf31f0c41 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -82,6 +82,7 @@ minidumper.workspace = true [target.'cfg(windows)'.dependencies] windows.workspace = true +gpui = { workspace = true, features = ["windows-manifest"] } [dev-dependencies] action_log.workspace = true diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index a744f733e72aef7cb7a1f878d14412c8f9b742e3..7f9953c8a4e746d9586b663330badb38149cfb64 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -2,12 +2,11 @@ /// The tests in this file assume that server_cx is running on Windows too. /// We neead to find a way to test Windows-Non-Windows interactions. use crate::headless_project::HeadlessProject; -use agent::{AgentTool, ReadFileTool, ReadFileToolInput, Templates, Thread, ToolCallEventStream}; +use agent::{AgentTool, ReadFileTool, ReadFileToolInput, ToolCallEventStream, ToolInput}; use client::{Client, UserStore}; use clock::FakeSystemClock; use collections::{HashMap, HashSet}; -use language_model::{LanguageModelToolResultContent, fake_provider::FakeLanguageModel}; -use prompt_store::ProjectContext; +use language_model::LanguageModelToolResultContent; use extension::ExtensionHostProxy; use fs::{FakeFs, Fs}; @@ -1939,30 +1938,19 @@ async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mu let action_log = cx.new(|_| action_log::ActionLog::new(project.clone())); - // Create a minimal thread for the ReadFileTool - let context_server_registry = - cx.new(|cx| agent::ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let input = ReadFileToolInput { path: "project/b.txt".into(), start_line: None, end_line: None, }; - let read_tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let read_tool = Arc::new(ReadFileTool::new(project, action_log, true)); let (event_stream, _) = ToolCallEventStream::test(); - let exists_result = cx.update(|cx| read_tool.clone().run(input, event_stream.clone(), cx)); + let exists_result = cx.update(|cx| { + read_tool + .clone() + .run(ToolInput::resolved(input), event_stream.clone(), cx) + }); let output = exists_result.await.unwrap(); assert_eq!(output, LanguageModelToolResultContent::Text("B".into())); @@ -1971,7 +1959,8 @@ async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mu start_line: None, end_line: None, }; - let does_not_exist_result = cx.update(|cx| read_tool.run(input, event_stream, cx)); + let does_not_exist_result = + cx.update(|cx| read_tool.run(ToolInput::resolved(input), event_stream, cx)); does_not_exist_result.await.unwrap_err(); } @@ -1998,7 +1987,7 @@ async fn test_remote_external_agent_server( .map(|name| name.to_string()) .collect::>() }); - pretty_assertions::assert_eq!(names, ["codex", "gemini", "claude"]); + pretty_assertions::assert_eq!(names, Vec::::new()); server_cx.update_global::(|settings_store, cx| { settings_store .set_server_settings( @@ -2029,15 +2018,14 @@ async fn test_remote_external_agent_server( .map(|name| name.to_string()) .collect::>() }); - pretty_assertions::assert_eq!(names, ["gemini", "codex", "claude", "foo"]); - let (command, root, login) = project + pretty_assertions::assert_eq!(names, ["foo"]); + let command = project .update(cx, |project, cx| { project.agent_server_store().update(cx, |store, cx| { store .get_external_agent(&"foo".into()) .unwrap() .get_command( - None, HashMap::from_iter([("OTHER_VAR".into(), "other-val".into())]), None, None, @@ -2053,13 +2041,12 @@ async fn test_remote_external_agent_server( path: "mock".into(), args: vec!["foo-cli".into(), "--flag".into()], env: Some(HashMap::from_iter([ + ("NO_BROWSER".into(), "1".into()), ("VAR".into(), "val".into()), ("OTHER_VAR".into(), "other-val".into()) ])) } ); - assert_eq!(&PathBuf::from(root), paths::home_dir()); - assert!(login.is_none()); } pub async fn init_test( diff --git a/crates/remote_server/src/server.rs b/crates/remote_server/src/server.rs index 6784f5fc1d221989aeaf1ecbd34da65f8f923a87..bc39e4635e96110f5e9179ba744afc6f93f8e341 100644 --- a/crates/remote_server/src/server.rs +++ b/crates/remote_server/src/server.rs @@ -356,9 +356,18 @@ fn start_server( let (mut stdin_msg_tx, mut stdin_msg_rx) = mpsc::unbounded::(); cx.background_spawn(async move { - while let Ok(msg) = read_message(&mut stdin_stream, &mut input_buffer).await { - if (stdin_msg_tx.send(msg).await).is_err() { - break; + loop { + match read_message(&mut stdin_stream, &mut input_buffer).await { + Ok(msg) => { + if (stdin_msg_tx.send(msg).await).is_err() { + log::info!("stdin message channel closed, stopping stdin reader"); + break; + } + } + Err(error) => { + log::warn!("stdin read failed: {error:?}"); + break; + } } } }).detach(); diff --git a/crates/repl/Cargo.toml b/crates/repl/Cargo.toml index 7bf63657bdea126d7a3f77681e587521356f9eb1..c2d6f745d9272651bd90bcdfdc689263958b8b09 100644 --- a/crates/repl/Cargo.toml +++ b/crates/repl/Cargo.toml @@ -47,6 +47,7 @@ runtimelib.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true +shlex.workspace = true smol.workspace = true telemetry.workspace = true terminal.workspace = true diff --git a/crates/repl/src/kernels/wsl_kernel.rs b/crates/repl/src/kernels/wsl_kernel.rs index 1cdb774008d6a40e57b0abeeec73e294896c221a..34340c74feeb76cc4822a6ca5d669693cc448334 100644 --- a/crates/repl/src/kernels/wsl_kernel.rs +++ b/crates/repl/src/kernels/wsl_kernel.rs @@ -21,6 +21,7 @@ use std::{ path::PathBuf, sync::Arc, }; + use uuid::Uuid; // Find a set of open ports. This creates a listener with port set to 0. The listener will be closed at the end when it goes out of scope. @@ -56,6 +57,15 @@ impl Debug for WslRunningKernel { } } +fn quote_posix_shell_arguments(arguments: &[String]) -> Result { + let mut quoted_arguments = Vec::with_capacity(arguments.len()); + for argument in arguments { + let quoted = shlex::try_quote(argument).map(|quoted| quoted.into_owned())?; + quoted_arguments.push(quoted); + } + Ok(quoted_arguments.join(" ")) +} + impl WslRunningKernel { pub fn new( kernel_specification: WslKernelSpecification, @@ -129,9 +139,8 @@ impl WslRunningKernel { // `wsl -d --exec ...` // But we need to replace {connection_file} with wsl_connection_path. - let argv = kernel_specification.kernelspec.argv; anyhow::ensure!( - !argv.is_empty(), + !kernel_specification.kernelspec.argv.is_empty(), "Empty argv in kernelspec {}", kernel_specification.name ); @@ -182,50 +191,57 @@ impl WslRunningKernel { // We use bash -lc to run in a login shell for proper environment setup let mut kernel_args: Vec = Vec::new(); - if let Some(env) = &kernel_specification.kernelspec.env { - if !env.is_empty() { - kernel_args.push("env".to_string()); - for (k, v) in env { - kernel_args.push(format!("{}={}", k, v)); + let resolved_argv: Vec = kernel_specification + .kernelspec + .argv + .iter() + .map(|arg| { + if arg == "{connection_file}" { + wsl_connection_path.clone() + } else { + arg.clone() } + }) + .collect(); + + let executable = resolved_argv.first().map(String::as_str); + let needs_python_resolution = executable.map_or(false, |executable| { + executable == "python" || executable == "python3" || !executable.starts_with('/') + }); + + let mut env_assignments: Vec = Vec::new(); + if let Some(env) = &kernel_specification.kernelspec.env { + env_assignments.reserve(env.len()); + for (key, value) in env { + let assignment = format!("{key}={value}"); + let assignment = shlex::try_quote(&assignment) + .map(|quoted| quoted.into_owned())?; + env_assignments.push(assignment); } - } - for arg in argv { - if arg == "{connection_file}" { - kernel_args.push(wsl_connection_path.clone()); - } else { - kernel_args.push(arg.clone()); + if !env_assignments.is_empty() { + kernel_args.push("env".to_string()); + kernel_args.extend(env_assignments.iter().cloned()); } } - // because first command is python/python3 we need make sure it's present in the env - let first_cmd = kernel_args.first().map(|arg| { - arg.split_whitespace().next().unwrap_or(arg) - }); - - let needs_python_resolution = first_cmd.map_or(false, |cmd| { - cmd == "python" || cmd == "python3" || !cmd.starts_with('/') - }); + kernel_args.extend(resolved_argv.iter().cloned()); let shell_command = if needs_python_resolution { // 1. Check for .venv/bin/python or .venv/bin/python3 in working directory // 2. Fall back to system python3 or python - let rest_args: Vec = kernel_args.iter().skip(1).cloned().collect(); - let rest_string = rest_args - .iter() - .map(|arg| { - if arg.contains(' ') || arg.contains('\'') || arg.contains('"') { - format!("'{}'", arg.replace('\'', "'\\''")) - } else { - arg.clone() - } - }) - .collect::>() - .join(" "); + let rest_args: Vec = resolved_argv.iter().skip(1).cloned().collect(); + let arg_string = quote_posix_shell_arguments(&rest_args)?; + let set_env_command = if env_assignments.is_empty() { + String::new() + } else { + format!("export {}; ", env_assignments.join(" ")) + }; let cd_command = if let Some(wd) = wsl_working_directory.as_ref() { - format!("cd '{}' && ", wd.replace('\'', "'\\''")) + let quoted_wd = shlex::try_quote(wd) + .map(|quoted| quoted.into_owned())?; + format!("cd {quoted_wd} && ") } else { String::new() }; @@ -233,6 +249,7 @@ impl WslRunningKernel { format!( "set -e; \ + {} \ {} \ echo \"Working directory: $(pwd)\" >&2; \ if [ -x .venv/bin/python ]; then \ @@ -254,20 +271,10 @@ impl WslRunningKernel { echo 'PATH:' \"$PATH\" >&2; \ exit 127; \ fi", - cd_command, rest_string, rest_string, rest_string, rest_string + cd_command, set_env_command, arg_string, arg_string, arg_string, arg_string ) } else { - kernel_args - .iter() - .map(|arg| { - if arg.contains(' ') || arg.contains('\'') || arg.contains('"') { - format!("'{}'", arg.replace('\'', "'\\''")) - } else { - arg.clone() - } - }) - .collect::>() - .join(" ") + quote_posix_shell_arguments(&kernel_args)? }; cmd.arg("bash") diff --git a/crates/repl/src/notebook/cell.rs b/crates/repl/src/notebook/cell.rs index 5559458da4a5c7212982fcb25d2496e39d039547..200424742aff113d637fe9aca30999c0f95e79a5 100644 --- a/crates/repl/src/notebook/cell.rs +++ b/crates/repl/src/notebook/cell.rs @@ -1,12 +1,11 @@ -#![allow(unused, dead_code)] use std::sync::Arc; use std::time::{Duration, Instant}; -use editor::{Editor, EditorMode, MultiBuffer}; +use editor::{Editor, EditorMode, MultiBuffer, SizingBehavior}; use futures::future::Shared; use gpui::{ App, Entity, EventEmitter, Focusable, Hsla, InteractiveElement, RetainAllImageCache, - StatefulInteractiveElement, Task, TextStyleRefinement, image_cache, prelude::*, + StatefulInteractiveElement, Task, TextStyleRefinement, prelude::*, }; use language::{Buffer, Language, LanguageRegistry}; use markdown::{Markdown, MarkdownElement, MarkdownStyle}; @@ -235,7 +234,7 @@ pub trait RenderableCell: Render { fn source(&self) -> &String; fn selected(&self) -> bool; fn set_selected(&mut self, selected: bool) -> &mut Self; - fn selected_bg_color(&self, window: &mut Window, cx: &mut Context) -> Hsla { + fn selected_bg_color(&self, _window: &mut Window, cx: &mut Context) -> Hsla { if self.selected() { let mut color = cx.theme().colors().element_hover; color.fade_out(0.5); @@ -252,7 +251,7 @@ pub trait RenderableCell: Render { fn cell_position_spacer( &self, is_first: bool, - window: &mut Window, + _window: &mut Window, cx: &mut Context, ) -> Option { let cell_position = self.cell_position(); @@ -327,7 +326,6 @@ pub struct MarkdownCell { editing: bool, selected: bool, cell_position: Option, - languages: Arc, _editor_subscription: gpui::Subscription, } @@ -357,9 +355,10 @@ impl MarkdownCell { let editor = cx.new(|cx| { let mut editor = Editor::new( - EditorMode::AutoHeight { - min_lines: 1, - max_lines: Some(1024), + EditorMode::Full { + scale_ui_elements_with_buffer_font_size: false, + show_active_line_background: false, + sizing_behavior: SizingBehavior::SizeByContent, }, multi_buffer, None, @@ -378,12 +377,12 @@ impl MarkdownCell { editor.set_show_gutter(false, cx); editor.set_text_style_refinement(refinement); + editor.set_use_modal_editing(true); editor }); let markdown = cx.new(|cx| Markdown::new(source.clone().into(), None, None, cx)); - let cell_id = id.clone(); let editor_subscription = cx.subscribe(&editor, move |this, _editor, event, cx| match event { editor::EditorEvent::Blurred => { @@ -407,7 +406,6 @@ impl MarkdownCell { editing: start_editing, selected: false, cell_position: None, - languages, _editor_subscription: editor_subscription, } } @@ -458,8 +456,6 @@ impl MarkdownCell { .unwrap_or_default(); self.source = source.clone(); - let languages = self.languages.clone(); - self.markdown.update(cx, |markdown, cx| { markdown.reset(source.into(), cx); }); @@ -603,7 +599,7 @@ pub struct CodeCell { outputs: Vec, selected: bool, cell_position: Option, - language_task: Task<()>, + _language_task: Task<()>, execution_start_time: Option, execution_duration: Option, is_executing: bool, @@ -625,9 +621,10 @@ impl CodeCell { let editor_view = cx.new(|cx| { let mut editor = Editor::new( - EditorMode::AutoHeight { - min_lines: 1, - max_lines: Some(1024), + EditorMode::Full { + scale_ui_elements_with_buffer_font_size: false, + show_active_line_background: false, + sizing_behavior: SizingBehavior::SizeByContent, }, multi_buffer, None, @@ -646,6 +643,7 @@ impl CodeCell { editor.set_show_gutter(false, cx); editor.set_text_style_refinement(refinement); + editor.set_use_modal_editing(true); editor }); @@ -665,10 +663,10 @@ impl CodeCell { outputs: Vec::new(), selected: false, cell_position: None, - language_task, execution_start_time: None, execution_duration: None, is_executing: false, + _language_task: language_task, } } @@ -700,9 +698,10 @@ impl CodeCell { let editor_view = cx.new(|cx| { let mut editor = Editor::new( - EditorMode::AutoHeight { - min_lines: 1, - max_lines: Some(1024), + EditorMode::Full { + scale_ui_elements_with_buffer_font_size: false, + show_active_line_background: false, + sizing_behavior: SizingBehavior::SizeByContent, }, multi_buffer, None, @@ -722,6 +721,7 @@ impl CodeCell { editor.set_text(source.clone(), window, cx); editor.set_show_gutter(false, cx); editor.set_text_style_refinement(refinement); + editor.set_use_modal_editing(true); editor }); @@ -741,10 +741,10 @@ impl CodeCell { outputs, selected: false, cell_position: None, - language_task, execution_start_time: None, execution_duration: None, is_executing: false, + _language_task: language_task, } } @@ -872,15 +872,7 @@ impl CodeCell { cx.notify(); } - fn output_control(&self) -> Option { - if self.has_outputs() { - Some(CellControlType::ClearCell) - } else { - None - } - } - - pub fn gutter_output(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + pub fn gutter_output(&self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let is_selected = self.selected(); div() @@ -941,7 +933,7 @@ impl RenderableCell for CodeCell { &self.source } - fn control(&self, window: &mut Window, cx: &mut Context) -> Option { + fn control(&self, _window: &mut Window, cx: &mut Context) -> Option { let control_type = if self.has_outputs() { CellControlType::RerunCell } else { @@ -1031,8 +1023,7 @@ impl RenderableCell for CodeCell { } impl RunnableCell for CodeCell { - fn run(&mut self, window: &mut Window, cx: &mut Context) { - println!("Running code cell: {}", self.id); + fn run(&mut self, _window: &mut Window, cx: &mut Context) { cx.emit(CellEvent::Run(self.id.clone())); } @@ -1055,11 +1046,8 @@ impl Render for CodeCell { } else { None }; - let output_max_width = plain::max_width_for_columns( - ReplSettings::get_global(cx).output_max_width_columns, - window, - cx, - ); + let output_max_width = + plain::max_width_for_columns(ReplSettings::get_global(cx).max_columns, window, cx); // get the language from the editor's buffer let language_name = self .editor @@ -1117,71 +1105,6 @@ impl Render for CodeCell { ), ), ) - // Output portion - .child( - h_flex() - .w_full() - .pr_6() - .rounded_xs() - .items_start() - .gap(DynamicSpacing::Base08.rems(cx)) - .bg(self.selected_bg_color(window, cx)) - .child(self.gutter_output(window, cx)) - .child( - div().py_1p5().w_full().child( - div() - .flex() - .size_full() - .flex_1() - .py_3() - .px_5() - .rounded_lg() - .border_1() - .child( - div() - .id((ElementId::from(self.id.to_string()), "output-scroll")) - .w_full() - .when_some(output_max_width, |div, max_w| { - div.max_w(max_w).overflow_x_scroll() - }) - .when_some(output_max_height, |div, max_h| { - div.max_h(max_h).overflow_y_scroll() - }) - .children(self.outputs.iter().map(|output| { - let content = match output { - Output::Plain { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Markdown { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Stream { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Image { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Message(message) => Some( - div().child(message.clone()).into_any_element(), - ), - Output::Table { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Json { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::ErrorOutput(error_view) => { - error_view.render(window, cx) - } - Output::ClearOutputWaitMarker => None, - }; - - div().children(content) - })), - ), - ), - ), - ) .when( self.has_outputs() || self.execution_duration.is_some() || self.is_executing, |this| { @@ -1256,41 +1179,23 @@ impl Render for CodeCell { }, ) // output at bottom - .child(div().w_full().children(self.outputs.iter().map( - |output| { - let content = match output { - Output::Plain { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Markdown { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Stream { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Image { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Message(message) => Some( - div() - .child(message.clone()) - .into_any_element(), - ), - Output::Table { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Json { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::ErrorOutput(error_view) => { - error_view.render(window, cx) - } - Output::ClearOutputWaitMarker => None, - }; - - div().children(content) - }, - ))), + .child( + div() + .id(( + ElementId::from(self.id.to_string()), + "output-scroll", + )) + .w_full() + .when_some(output_max_width, |div, max_width| { + div.max_w(max_width).overflow_x_scroll() + }) + .when_some(output_max_height, |div, max_height| { + div.max_h(max_height).overflow_y_scroll() + }) + .children(self.outputs.iter().map(|output| { + div().children(output.content(window, cx)) + })), + ), ), ), ) diff --git a/crates/repl/src/notebook/notebook_ui.rs b/crates/repl/src/notebook/notebook_ui.rs index af8e3e625a4170b9ad265838e977d077ce4fe86f..87f18708a1988c70d66dc4cef5355d4cbcb11dba 100644 --- a/crates/repl/src/notebook/notebook_ui.rs +++ b/crates/repl/src/notebook/notebook_ui.rs @@ -5,6 +5,7 @@ use std::{path::PathBuf, sync::Arc}; use anyhow::{Context as _, Result}; use client::proto::ViewId; use collections::HashMap; +use editor::DisplayPoint; use feature_flags::{FeatureFlagAppExt as _, NotebookFeatureFlag}; use futures::FutureExt; use futures::future::Shared; @@ -40,6 +41,7 @@ use picker::Picker; use runtimelib::{ExecuteRequest, JupyterMessage, JupyterMessageContent}; use ui::PopoverMenuHandle; use zed_actions::editor::{MoveDown, MoveUp}; +use zed_actions::notebook::{NotebookMoveDown, NotebookMoveUp}; actions!( notebook, @@ -1295,6 +1297,127 @@ impl Render for NotebookEditor { } } })) + .on_action(cx.listener(|this, _: &NotebookMoveDown, window, cx| { + let Some(cell_id) = this.cell_order.get(this.selected_cell_index) else { + return; + }; + let Some(cell) = this.cell_map.get(cell_id) else { + return; + }; + + let editor = match cell { + Cell::Code(cell) => cell.read(cx).editor().clone(), + Cell::Markdown(cell) => cell.read(cx).editor().clone(), + _ => return, + }; + + let is_at_last_line = editor.update(cx, |editor, cx| { + let display_snapshot = editor.display_snapshot(cx); + let selections = editor.selections.all_display(&display_snapshot); + if let Some(selection) = selections.last() { + let head = selection.head(); + let cursor_row = head.row(); + let max_row = display_snapshot.max_point().row(); + + cursor_row >= max_row + } else { + false + } + }); + + if is_at_last_line { + this.select_next(&menu::SelectNext, window, cx); + if let Some(cell_id) = this.cell_order.get(this.selected_cell_index) { + if let Some(cell) = this.cell_map.get(cell_id) { + match cell { + Cell::Code(cell) => { + let editor = cell.read(cx).editor().clone(); + editor.update(cx, |editor, cx| { + editor.move_to_beginning(&Default::default(), window, cx); + }); + editor.focus_handle(cx).focus(window, cx); + } + Cell::Markdown(cell) => { + cell.update(cx, |cell, cx| { + cell.set_editing(true); + cx.notify(); + }); + let editor = cell.read(cx).editor().clone(); + editor.update(cx, |editor, cx| { + editor.move_to_beginning(&Default::default(), window, cx); + }); + editor.focus_handle(cx).focus(window, cx); + } + _ => {} + } + } + } + } else { + editor.update(cx, |editor, cx| { + editor.move_down(&Default::default(), window, cx); + }); + } + })) + .on_action(cx.listener(|this, _: &NotebookMoveUp, window, cx| { + let Some(cell_id) = this.cell_order.get(this.selected_cell_index) else { + return; + }; + let Some(cell) = this.cell_map.get(cell_id) else { + return; + }; + + let editor = match cell { + Cell::Code(cell) => cell.read(cx).editor().clone(), + Cell::Markdown(cell) => cell.read(cx).editor().clone(), + _ => return, + }; + + let is_at_first_line = editor.update(cx, |editor, cx| { + let display_snapshot = editor.display_snapshot(cx); + let selections = editor.selections.all_display(&display_snapshot); + if let Some(selection) = selections.first() { + let head = selection.head(); + let cursor_row = head.row(); + + cursor_row.0 == 0 + } else { + false + } + }); + + if is_at_first_line { + this.select_previous(&menu::SelectPrevious, window, cx); + if let Some(cell_id) = this.cell_order.get(this.selected_cell_index) { + if let Some(cell) = this.cell_map.get(cell_id) { + match cell { + Cell::Code(cell) => { + let editor = cell.read(cx).editor().clone(); + editor.update(cx, |editor, cx| { + editor.move_to_end(&Default::default(), window, cx); + }); + editor.focus_handle(cx).focus(window, cx); + } + Cell::Markdown(cell) => { + cell.update(cx, |cell, cx| { + cell.set_editing(true); + cx.notify(); + }); + let editor = cell.read(cx).editor().clone(); + editor.update(cx, |editor, cx| { + editor.move_to_end(&Default::default(), window, cx); + }); + editor.focus_handle(cx).focus(window, cx); + } + _ => {} + } + } + } + } else { + editor.update(cx, |editor, cx| { + editor.move_up(&Default::default(), window, cx); + }); + } + })) .on_action( cx.listener(|this, action, window, cx| this.restart_kernel(action, window, cx)), ) @@ -1391,6 +1514,9 @@ impl project::ProjectItem for NotebookItem { nbformat::upgrade_legacy_notebook(legacy_notebook)? } + nbformat::Notebook::V3(v3_notebook) => { + nbformat::upgrade_v3_notebook(v3_notebook)? + } } }; @@ -1668,6 +1794,9 @@ impl Item for NotebookEditor { Ok(nbformat::Notebook::Legacy(legacy_notebook)) => { nbformat::upgrade_legacy_notebook(legacy_notebook)? } + Ok(nbformat::Notebook::V3(v3_notebook)) => { + nbformat::upgrade_v3_notebook(v3_notebook)? + } Err(e) => { anyhow::bail!("Failed to parse notebook: {:?}", e); } diff --git a/crates/repl/src/outputs.rs b/crates/repl/src/outputs.rs index 8be8c57cceee84435a6d99ba5c611d24c563bec3..f6d2bc4d3173ce64700b7b5ac45301df0fe0ab53 100644 --- a/crates/repl/src/outputs.rs +++ b/crates/repl/src/outputs.rs @@ -253,18 +253,8 @@ impl Output { ) } - pub fn render( - &self, - workspace: WeakEntity, - window: &mut Window, - cx: &mut Context, - ) -> impl IntoElement + use<> { - let max_width = plain::max_width_for_columns( - ReplSettings::get_global(cx).output_max_width_columns, - window, - cx, - ); - let content = match self { + pub fn content(&self, window: &mut Window, cx: &mut App) -> Option { + match self { Self::Plain { content, .. } => Some(content.clone().into_any_element()), Self::Markdown { content, .. } => Some(content.clone().into_any_element()), Self::Stream { content, .. } => Some(content.clone().into_any_element()), @@ -274,21 +264,36 @@ impl Output { Self::Json { content, .. } => Some(content.clone().into_any_element()), Self::ErrorOutput(error_view) => error_view.render(window, cx), Self::ClearOutputWaitMarker => None, - }; + } + } - let needs_horizontal_scroll = matches!(self, Self::Table { .. } | Self::Image { .. }); + pub fn render( + &self, + workspace: WeakEntity, + window: &mut Window, + cx: &mut Context, + ) -> impl IntoElement + use<> { + let max_width = + plain::max_width_for_columns(ReplSettings::get_global(cx).max_columns, window, cx); + let content = self.content(window, cx); + + let needs_horizontal_scroll = matches!(self, Self::Table { .. }); h_flex() .id("output-content") .w_full() - .when_some(max_width, |this, max_w| this.max_w(max_w)) - .overflow_x_scroll() + .when_else( + needs_horizontal_scroll, + |this| this.overflow_x_scroll(), + |this| this.overflow_x_hidden(), + ) .items_start() .child( div() .when(!needs_horizontal_scroll, |el| { el.flex_1().w_full().overflow_x_hidden() }) + .when_some(max_width, |el, max_width| el.max_w(max_width)) .children(content), ) .children(match self { diff --git a/crates/repl/src/outputs/image.rs b/crates/repl/src/outputs/image.rs index 9d1ffa3d2065281cd69e67b2faf960c9aa690bcb..e5444be3d779c9541fcadd55b9255d3e25da0cba 100644 --- a/crates/repl/src/outputs/image.rs +++ b/crates/repl/src/outputs/image.rs @@ -3,10 +3,10 @@ use base64::{ Engine as _, alphabet, engine::{DecodePaddingMode, GeneralPurpose, GeneralPurposeConfig}, }; -use gpui::{App, ClipboardItem, Image, ImageFormat, RenderImage, Window, img}; +use gpui::{App, ClipboardItem, Image, ImageFormat, Pixels, RenderImage, Window, img}; use settings::Settings as _; use std::sync::Arc; -use ui::{IntoElement, Styled, div, prelude::*}; +use ui::{IntoElement, Styled, prelude::*}; use crate::outputs::{OutputContent, plain}; use crate::repl_settings::ReplSettings; @@ -113,7 +113,7 @@ impl Render for ImageView { let settings = ReplSettings::get_global(cx); let line_height = window.line_height(); - let max_width = plain::max_width_for_columns(settings.output_max_width_columns, window, cx); + let max_width = plain::max_width_for_columns(settings.max_columns, window, cx); let max_height = if settings.output_max_height_lines > 0 { Some(line_height * settings.output_max_height_lines as f32) @@ -125,7 +125,7 @@ impl Render for ImageView { let image = self.image.clone(); - div().h(height).w(width).child(img(image)) + img(image).w(width).h(height) } } diff --git a/crates/repl/src/outputs/plain.rs b/crates/repl/src/outputs/plain.rs index 0db2f811fb9ca3b82114db23826e37fe699bd3a0..71e2624f8ad7b0172a86793d5d81b38339b04f36 100644 --- a/crates/repl/src/outputs/plain.rs +++ b/crates/repl/src/outputs/plain.rs @@ -22,7 +22,7 @@ use alacritty_terminal::{ term::Config, vte::ansi::Processor, }; -use gpui::{Bounds, ClipboardItem, Entity, FontStyle, TextStyle, WhiteSpace, canvas, size}; +use gpui::{Bounds, ClipboardItem, Entity, FontStyle, Pixels, TextStyle, WhiteSpace, canvas, size}; use language::Buffer; use settings::Settings as _; use terminal::terminal_settings::TerminalSettings; diff --git a/crates/repl/src/repl_editor.rs b/crates/repl/src/repl_editor.rs index 6e061c3e2e37aa94074f17f94791ad147f56f344..56b79e20ffca74ab3f9f9c7948a7caeffc4ad4ce 100644 --- a/crates/repl/src/repl_editor.rs +++ b/crates/repl/src/repl_editor.rs @@ -636,12 +636,9 @@ fn language_supported(language: &Arc, cx: &mut App) -> bool { let store = ReplStore::global(cx); let store_read = store.read(cx); - // Since we're just checking for general language support, we only need to look at - // the pure Jupyter kernels - these are all the globally available ones - store_read.pure_jupyter_kernel_specifications().any(|spec| { - // Convert to lowercase for case-insensitive comparison since kernels might report "python" while our language is "Python" - spec.language().as_ref().to_lowercase() == language.name().as_ref().to_lowercase() - }) + store_read + .pure_jupyter_kernel_specifications() + .any(|spec| language.matches_kernel_language(spec.language().as_ref())) } fn get_language(editor: WeakEntity, cx: &mut App) -> Option> { diff --git a/crates/repl/src/repl_settings.rs b/crates/repl/src/repl_settings.rs index 302164a5b360157edceff1b1f2e18f6c6fd7a50b..5fd7623bb71e6446b8cacd6029108e481efc8680 100644 --- a/crates/repl/src/repl_settings.rs +++ b/crates/repl/src/repl_settings.rs @@ -27,11 +27,6 @@ pub struct ReplSettings { /// /// Default: 0 pub output_max_height_lines: usize, - /// Maximum number of columns of output to display before scaling images. - /// Set to 0 to disable output width limits. - /// - /// Default: 0 - pub output_max_width_columns: usize, } impl Settings for ReplSettings { @@ -44,7 +39,6 @@ impl Settings for ReplSettings { inline_output: repl.inline_output.unwrap_or(true), inline_output_max_length: repl.inline_output_max_length.unwrap_or(50), output_max_height_lines: repl.output_max_height_lines.unwrap_or(0), - output_max_width_columns: repl.output_max_width_columns.unwrap_or(0), } } } diff --git a/crates/repl/src/repl_store.rs b/crates/repl/src/repl_store.rs index 1c6ce99c2177260c1b9aaf1733326ddbda85a64f..8da94eaa7fe40e28a1d6336a648d7eae5c6767ae 100644 --- a/crates/repl/src/repl_store.rs +++ b/crates/repl/src/repl_store.rs @@ -289,7 +289,6 @@ impl ReplStore { } let language_at_cursor = language_at_cursor?; - let language_name = language_at_cursor.code_fence_block_name().to_lowercase(); // Prefer the recommended (active toolchain) kernel if it has ipykernel if let Some(active_path) = self.active_python_toolchain_path(worktree_id) { @@ -297,7 +296,7 @@ impl ReplStore { .kernel_specifications_for_worktree(worktree_id) .find(|spec| { spec.has_ipykernel() - && spec.language().as_ref().to_lowercase() == language_name + && language_at_cursor.matches_kernel_language(spec.language().as_ref()) && spec.path().as_ref() == active_path.as_ref() }) .cloned(); @@ -312,7 +311,7 @@ impl ReplStore { .find(|spec| { matches!(spec, KernelSpecification::PythonEnv(_)) && spec.has_ipykernel() - && spec.language().as_ref().to_lowercase() == language_name + && language_at_cursor.matches_kernel_language(spec.language().as_ref()) }) .cloned(); if python_env.is_some() { @@ -350,10 +349,10 @@ impl ReplStore { return Some(found_by_name); } - let language_name = language_at_cursor.code_fence_block_name().to_lowercase(); self.kernel_specifications_for_worktree(worktree_id) .find(|spec| { - spec.has_ipykernel() && spec.language().as_ref().to_lowercase() == language_name + spec.has_ipykernel() + && language_at_cursor.matches_kernel_language(spec.language().as_ref()) }) .cloned() } diff --git a/crates/reqwest_client/Cargo.toml b/crates/reqwest_client/Cargo.toml index 2f23ed3072f4d21d1ff053cb829931ae407f6d5b..41fcd1f5d2f8ca1c78b0a2261a7c48566999e0de 100644 --- a/crates/reqwest_client/Cargo.toml +++ b/crates/reqwest_client/Cargo.toml @@ -20,13 +20,15 @@ anyhow.workspace = true bytes.workspace = true futures.workspace = true http_client.workspace = true -http_client_tls.workspace = true serde.workspace = true log.workspace = true tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } regex.workspace = true reqwest.workspace = true -util.workspace = true +gpui_util.workspace = true + +[target.'cfg(not(target_family = "wasm"))'.dependencies] +http_client_tls.workspace = true [dev-dependencies] gpui.workspace = true diff --git a/crates/reqwest_client/src/reqwest_client.rs b/crates/reqwest_client/src/reqwest_client.rs index 7c8ab84bd40fa76075a8cd377e942a5c73094b22..3239a48904a6c4e71c5b94a7de1b78426da7c51d 100644 --- a/crates/reqwest_client/src/reqwest_client.rs +++ b/crates/reqwest_client/src/reqwest_client.rs @@ -2,7 +2,7 @@ use std::error::Error; use std::sync::{LazyLock, OnceLock}; use std::{borrow::Cow, mem, pin::Pin, task::Poll, time::Duration}; -use util::defer; +use gpui_util::defer; use anyhow::anyhow; use bytes::{BufMut, Bytes, BytesMut}; diff --git a/crates/rope/src/chunk.rs b/crates/rope/src/chunk.rs index e5a3ed045a7e44e2208941e908718bdf7ee5b00a..594f8f5c67e2e151c1ba933b59344d8542f381e1 100644 --- a/crates/rope/src/chunk.rs +++ b/crates/rope/src/chunk.rs @@ -102,6 +102,11 @@ impl Chunk { self.append(Chunk::new(text).as_slice()); } + #[inline(always)] + pub fn prepend_str(&mut self, text: &str) { + self.prepend(Chunk::new(text).as_slice()); + } + #[inline(always)] pub fn append(&mut self, slice: ChunkSlice) { if slice.is_empty() { @@ -116,6 +121,28 @@ impl Chunk { self.text.push_str(slice.text); } + #[inline(always)] + pub fn prepend(&mut self, slice: ChunkSlice) { + if slice.is_empty() { + return; + } + if self.text.is_empty() { + *self = Chunk::new(slice.text); + return; + } + + let shift = slice.text.len(); + self.chars = slice.chars | (self.chars << shift); + self.chars_utf16 = slice.chars_utf16 | (self.chars_utf16 << shift); + self.newlines = slice.newlines | (self.newlines << shift); + self.tabs = slice.tabs | (self.tabs << shift); + + let mut new_text = ArrayString::::new(); + new_text.push_str(slice.text); + new_text.push_str(&self.text); + self.text = new_text; + } + #[inline(always)] pub fn as_slice(&self) -> ChunkSlice<'_> { ChunkSlice { @@ -890,6 +917,24 @@ mod tests { verify_chunk(chunk1.as_slice(), &(str1 + &str2[start_offset..end_offset])); } + #[gpui::test(iterations = 1000)] + fn test_prepend_random_strings(mut rng: StdRng) { + let len1 = rng.random_range(0..=MAX_BASE); + let len2 = rng.random_range(0..=MAX_BASE).saturating_sub(len1); + let str1 = random_string_with_utf8_len(&mut rng, len1); + let str2 = random_string_with_utf8_len(&mut rng, len2); + let mut chunk1 = Chunk::new(&str1); + let chunk2 = Chunk::new(&str2); + let char_offsets = char_offsets_with_end(&str2); + let start_index = rng.random_range(0..char_offsets.len()); + let start_offset = char_offsets[start_index]; + let end_offset = char_offsets[rng.random_range(start_index..char_offsets.len())]; + let slice = chunk2.slice(start_offset..end_offset); + let prefix_text = &str2[start_offset..end_offset]; + chunk1.prepend(slice); + verify_chunk(chunk1.as_slice(), &(prefix_text.to_owned() + &str1)); + } + /// Return the byte offsets for each character in a string. /// /// These are valid offsets to split the string. diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 7ab273be7bfa3fa84a608c69174cfcc6a038eac5..5b599bad51c2f571cca11625be0b290e7e748504 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -167,6 +167,11 @@ impl Rope { (), ); + if text.is_empty() { + self.check_invariants(); + return; + } + #[cfg(all(test, not(rust_analyzer)))] const NUM_CHUNKS: usize = 16; #[cfg(not(all(test, not(rust_analyzer))))] @@ -269,6 +274,23 @@ impl Rope { } pub fn push_front(&mut self, text: &str) { + if text.is_empty() { + return; + } + if self.is_empty() { + self.push(text); + return; + } + if self + .chunks + .first() + .is_some_and(|c| c.text.len() + text.len() <= chunk::MAX_BASE) + { + self.chunks + .update_first(|first_chunk| first_chunk.prepend_str(text), ()); + self.check_invariants(); + return; + } let suffix = mem::replace(self, Rope::from(text)); self.append(suffix); } @@ -548,6 +570,48 @@ impl Rope { } } + pub fn starts_with(&self, pattern: &str) -> bool { + if pattern.len() > self.len() { + return false; + } + let mut remaining = pattern; + for chunk in self.chunks_in_range(0..self.len()) { + let Some(chunk) = chunk.get(..remaining.len().min(chunk.len())) else { + return false; + }; + if remaining.starts_with(chunk) { + remaining = &remaining[chunk.len()..]; + if remaining.is_empty() { + return true; + } + } else { + return false; + } + } + remaining.is_empty() + } + + pub fn ends_with(&self, pattern: &str) -> bool { + if pattern.len() > self.len() { + return false; + } + let mut remaining = pattern; + for chunk in self.reversed_chunks_in_range(0..self.len()) { + let Some(chunk) = chunk.get(chunk.len() - remaining.len().min(chunk.len())..) else { + return false; + }; + if remaining.ends_with(chunk) { + remaining = &remaining[..remaining.len() - chunk.len()]; + if remaining.is_empty() { + return true; + } + } else { + return false; + } + } + remaining.is_empty() + } + pub fn line_len(&self, row: u32) -> u32 { self.clip_point(Point::new(row, u32::MAX), Bias::Left) .column @@ -2168,6 +2232,74 @@ mod tests { assert!(!rope.reversed_chunks_in_range(0..0).equals_str("foo")); } + #[test] + fn test_starts_with() { + let text = "Hello, world! 🌍🌎🌏"; + let rope = Rope::from(text); + + assert!(rope.starts_with("")); + assert!(rope.starts_with("H")); + assert!(rope.starts_with("Hello")); + assert!(rope.starts_with("Hello, world! 🌍🌎🌏")); + assert!(!rope.starts_with("ello")); + assert!(!rope.starts_with("Hello, world! 🌍🌎🌏!")); + + let empty_rope = Rope::from(""); + assert!(empty_rope.starts_with("")); + assert!(!empty_rope.starts_with("a")); + } + + #[test] + fn test_ends_with() { + let text = "Hello, world! 🌍🌎🌏"; + let rope = Rope::from(text); + + assert!(rope.ends_with("")); + assert!(rope.ends_with("🌏")); + assert!(rope.ends_with("🌍🌎🌏")); + assert!(rope.ends_with("Hello, world! 🌍🌎🌏")); + assert!(!rope.ends_with("🌎")); + assert!(!rope.ends_with("!Hello, world! 🌍🌎🌏")); + + let empty_rope = Rope::from(""); + assert!(empty_rope.ends_with("")); + assert!(!empty_rope.ends_with("a")); + } + + #[test] + fn test_starts_with_ends_with_random() { + let mut rng = StdRng::seed_from_u64(0); + for _ in 0..100 { + let len = rng.random_range(0..100); + let text: String = RandomCharIter::new(&mut rng).take(len).collect(); + let rope = Rope::from(text.as_str()); + + for _ in 0..10 { + let start = rng.random_range(0..=text.len()); + let start = text.ceil_char_boundary(start); + let end = rng.random_range(start..=text.len()); + let end = text.ceil_char_boundary(end); + let prefix = &text[..end]; + let suffix = &text[start..]; + + assert_eq!( + rope.starts_with(prefix), + text.starts_with(prefix), + "starts_with mismatch for {:?} in {:?}", + prefix, + text + ); + assert_eq!( + rope.ends_with(suffix), + text.ends_with(suffix), + "ends_with mismatch for {:?} in {:?}", + suffix, + text + ); + } + } + } + #[test] fn test_is_char_boundary() { let fixture = "地"; @@ -2229,6 +2361,119 @@ mod tests { } } + #[test] + fn test_push_front_empty_text_on_empty_rope() { + let mut rope = Rope::new(); + rope.push_front(""); + assert_eq!(rope.text(), ""); + assert_eq!(rope.len(), 0); + } + + #[test] + fn test_push_front_empty_text_on_nonempty_rope() { + let mut rope = Rope::from("hello"); + rope.push_front(""); + assert_eq!(rope.text(), "hello"); + } + + #[test] + fn test_push_front_on_empty_rope() { + let mut rope = Rope::new(); + rope.push_front("hello"); + assert_eq!(rope.text(), "hello"); + assert_eq!(rope.len(), 5); + assert_eq!(rope.max_point(), Point::new(0, 5)); + } + + #[test] + fn test_push_front_single_space() { + let mut rope = Rope::from("hint"); + rope.push_front(" "); + assert_eq!(rope.text(), " hint"); + assert_eq!(rope.len(), 5); + } + + #[gpui::test(iterations = 50)] + fn test_push_front_random(mut rng: StdRng) { + let initial_len = rng.random_range(0..=64); + let initial_text: String = RandomCharIter::new(&mut rng).take(initial_len).collect(); + let mut rope = Rope::from(initial_text.as_str()); + + let mut expected = initial_text; + + for _ in 0..rng.random_range(1..=10) { + let prefix_len = rng.random_range(0..=32); + let prefix: String = RandomCharIter::new(&mut rng).take(prefix_len).collect(); + + rope.push_front(&prefix); + expected.insert_str(0, &prefix); + + assert_eq!( + rope.text(), + expected, + "text mismatch after push_front({:?})", + prefix + ); + assert_eq!(rope.len(), expected.len()); + + let actual_summary = rope.summary(); + let expected_summary = TextSummary::from(expected.as_str()); + assert_eq!( + actual_summary.len, expected_summary.len, + "len mismatch for {:?}", + expected + ); + assert_eq!( + actual_summary.lines, expected_summary.lines, + "lines mismatch for {:?}", + expected + ); + assert_eq!( + actual_summary.chars, expected_summary.chars, + "chars mismatch for {:?}", + expected + ); + assert_eq!( + actual_summary.longest_row, expected_summary.longest_row, + "longest_row mismatch for {:?}", + expected + ); + + // Verify offset-to-point and point-to-offset round-trip at boundaries. + for (ix, _) in expected.char_indices().chain(Some((expected.len(), '\0'))) { + assert_eq!( + rope.point_to_offset(rope.offset_to_point(ix)), + ix, + "offset round-trip failed at {} for {:?}", + ix, + expected + ); + } + } + } + + #[gpui::test(iterations = 50)] + fn test_push_front_large_prefix(mut rng: StdRng) { + let initial_len = rng.random_range(0..=32); + let initial_text: String = RandomCharIter::new(&mut rng).take(initial_len).collect(); + let mut rope = Rope::from(initial_text.as_str()); + + let prefix_len = rng.random_range(64..=256); + let prefix: String = RandomCharIter::new(&mut rng).take(prefix_len).collect(); + + rope.push_front(&prefix); + let expected = format!("{}{}", prefix, initial_text); + + assert_eq!(rope.text(), expected); + assert_eq!(rope.len(), expected.len()); + + let actual_summary = rope.summary(); + let expected_summary = TextSummary::from(expected.as_str()); + assert_eq!(actual_summary.len, expected_summary.len); + assert_eq!(actual_summary.lines, expected_summary.lines); + assert_eq!(actual_summary.chars, expected_summary.chars); + } + fn clip_offset(text: &str, mut offset: usize, bias: Bias) -> usize { while !text.is_char_boundary(offset) { match bias { diff --git a/crates/rules_library/src/rules_library.rs b/crates/rules_library/src/rules_library.rs index b3aa0301f204e97e6b1acda2a5cff4479b51c590..73bf5fdd8fcaaf1437013d300102a9e593823c7b 100644 --- a/crates/rules_library/src/rules_library.rs +++ b/crates/rules_library/src/rules_library.rs @@ -222,7 +222,7 @@ impl PickerDelegate for RulePickerDelegate { cx.notify(); } - fn can_select(&mut self, ix: usize, _: &mut Window, _: &mut Context>) -> bool { + fn can_select(&self, ix: usize, _: &mut Window, _: &mut Context>) -> bool { match self.filtered_entries.get(ix) { Some(RulePickerEntry::Rule(_)) => true, Some(RulePickerEntry::Header(_)) | Some(RulePickerEntry::Separator) | None => false, @@ -1106,6 +1106,7 @@ impl RulesLibrary { temperature: None, thinking_allowed: true, thinking_effort: None, + speed: None, }, cx, ) diff --git a/crates/scheduler/Cargo.toml b/crates/scheduler/Cargo.toml index c1c791c06736297a284dcc16396f5e5d040c7bad..bc97b17b281275ffaad2f8626d000e87fe8ec42e 100644 --- a/crates/scheduler/Cargo.toml +++ b/crates/scheduler/Cargo.toml @@ -23,3 +23,4 @@ flume = "0.11" futures.workspace = true parking_lot.workspace = true rand.workspace = true +web-time.workspace = true diff --git a/crates/scheduler/src/clock.rs b/crates/scheduler/src/clock.rs index 017643c4eb7ffe46db48b5efb43d006bf155a03c..8c989165b679746c68e6c0295e8706ab77373d29 100644 --- a/crates/scheduler/src/clock.rs +++ b/crates/scheduler/src/clock.rs @@ -1,6 +1,8 @@ use chrono::{DateTime, Utc}; use parking_lot::Mutex; -use std::time::{Duration, Instant}; +use std::time::Duration; + +pub use web_time::Instant; pub trait Clock { fn utc_now(&self) -> DateTime; diff --git a/crates/scheduler/src/executor.rs b/crates/scheduler/src/executor.rs index 34e543645aba5a9a347e7d337fe0e65a23957c8c..76df2e69f66398e3709e1db58a847b1cd0079fc4 100644 --- a/crates/scheduler/src/executor.rs +++ b/crates/scheduler/src/executor.rs @@ -1,4 +1,4 @@ -use crate::{Priority, RunnableMeta, Scheduler, SessionId, Timer}; +use crate::{Instant, Priority, RunnableMeta, Scheduler, SessionId, Timer}; use std::{ future::Future, marker::PhantomData, @@ -12,7 +12,7 @@ use std::{ }, task::{Context, Poll}, thread::{self, ThreadId}, - time::{Duration, Instant}, + time::Duration, }; #[derive(Clone)] @@ -372,8 +372,9 @@ where impl Drop for Checked { fn drop(&mut self) { - assert!( - self.id == thread_id(), + assert_eq!( + self.id, + thread_id(), "local task dropped by a thread that didn't spawn it. Task spawned at {}", self.location ); diff --git a/crates/scheduler/src/test_scheduler.rs b/crates/scheduler/src/test_scheduler.rs index 7cce194f5f0427706fe531ae82c883de453c83cf..e4c330dcd162ad6512da05c9e66449fd7da36083 100644 --- a/crates/scheduler/src/test_scheduler.rs +++ b/crates/scheduler/src/test_scheduler.rs @@ -1,6 +1,6 @@ use crate::{ - BackgroundExecutor, Clock, ForegroundExecutor, Priority, RunnableMeta, Scheduler, SessionId, - TestClock, Timer, + BackgroundExecutor, Clock, ForegroundExecutor, Instant, Priority, RunnableMeta, Scheduler, + SessionId, TestClock, Timer, }; use async_task::Runnable; use backtrace::{Backtrace, BacktraceFrame}; @@ -26,7 +26,7 @@ use std::{ }, task::{Context, Poll, RawWaker, RawWakerVTable, Waker}, thread::{self, Thread}, - time::{Duration, Instant}, + time::Duration, }; const PENDING_TRACES_VAR_NAME: &str = "PENDING_TRACES"; @@ -57,7 +57,7 @@ impl TestScheduler { .map(|seed| seed.parse().unwrap()) .unwrap_or(0); - (seed..num_iterations as u64) + (seed..seed + num_iterations as u64) .map(|seed| { let mut unwind_safe_f = AssertUnwindSafe(&mut f); eprintln!("Running seed: {seed}"); @@ -335,6 +335,28 @@ impl TestScheduler { false } + /// Drops all runnable tasks from the scheduler. + /// + /// This is used by the leak detector to ensure that all tasks have been dropped as tasks may keep entities alive otherwise. + /// Why do we even have tasks left when tests finish you may ask. The reason for that is simple, the scheduler itself is the executor and it retains the scheduled runnables. + /// A lot of tasks, including every foreground task contain an executor handle that keeps the test scheduler alive, causing a reference cycle, thus the need for this function right now. + pub fn drain_tasks(&self) { + // dropping runnables may reschedule tasks + // due to drop impls with executors in them + // so drop until we reach a fixpoint + loop { + let mut state = self.state.lock(); + if state.runnables.is_empty() && state.timers.is_empty() { + break; + } + let runnables = std::mem::take(&mut state.runnables); + let timers = std::mem::take(&mut state.timers); + drop(state); + drop(timers); + drop(runnables); + } + } + pub fn advance_clock_to_next_timer(&self) -> bool { if let Some(timer) = self.state.lock().timers.first() { self.clock.advance(timer.expiration - self.clock.now()); diff --git a/crates/scheduler/src/tests.rs b/crates/scheduler/src/tests.rs index dc24fed68d7cb1c83953f4de38bb4392d3b61029..03fe8075f91fff2d72b9bb1c0d4d389a69d9c3bf 100644 --- a/crates/scheduler/src/tests.rs +++ b/crates/scheduler/src/tests.rs @@ -290,6 +290,31 @@ fn test_helper_methods() { assert_eq!(results, vec![10, 10, 10]); } +#[test] +fn test_many_with_arbitrary_seed() { + for seed in [0u64, 1, 5, 42] { + let mut seeds_seen = Vec::new(); + let iterations = 3usize; + + for current_seed in seed..seed + iterations as u64 { + let scheduler = Arc::new(TestScheduler::new(TestSchedulerConfig::with_seed( + current_seed, + ))); + let captured_seed = current_seed; + scheduler + .foreground() + .block_on(async { seeds_seen.push(captured_seed) }); + scheduler.run(); + } + + assert_eq!( + seeds_seen, + (seed..seed + iterations as u64).collect::>(), + "Expected {iterations} iterations starting at seed {seed}" + ); + } +} + #[test] fn test_block_with_timeout() { // Test case: future completes within timeout diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 42b2344316e8d9abe19380b8a4aaaf3538fae9d1..35cd25dc389d522fc2a3d0ed88b8e06a9e181e67 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -18,7 +18,7 @@ use editor::{ }; use futures::channel::oneshot; use gpui::{ - Action, App, ClickEvent, Context, Entity, EventEmitter, Focusable, InteractiveElement as _, + App, ClickEvent, Context, Entity, EventEmitter, Focusable, InteractiveElement as _, IntoElement, KeyContext, ParentElement as _, Render, ScrollHandle, Styled, Subscription, Task, WeakEntity, Window, div, }; @@ -117,18 +117,17 @@ impl Render for BufferSearchBar { .toggle_state(!is_split) .tooltip(Tooltip::element(move |_, cx| { v_flex() - .gap_1() - .child(Label::new("Unified")) + .child("Unified") .child( h_flex() .gap_0p5() - .text_sm() + .text_ui_sm(cx) .text_color(Color::Muted.color(cx)) .children(render_modifiers( &gpui::Modifiers::secondary_key(), PlatformStyle::platform(), None, - Some(TextSize::Default.rems(cx).into()), + Some(TextSize::Small.rems(cx).into()), false, )) .child("click to set as default"), @@ -168,18 +167,17 @@ impl Render for BufferSearchBar { .toggle_state(is_split) .tooltip(Tooltip::element(move |_, cx| { v_flex() - .gap_1() - .child(Label::new("Split")) + .child("Split") .child( h_flex() .gap_0p5() - .text_sm() + .text_ui_sm(cx) .text_color(Color::Muted.color(cx)) .children(render_modifiers( &gpui::Modifiers::secondary_key(), PlatformStyle::platform(), None, - Some(TextSize::Default.rems(cx).into()), + Some(TextSize::Small.rems(cx).into()), false, )) .child("click to set as default"), @@ -245,9 +243,9 @@ impl Render for BufferSearchBar { cx, ) }) - .on_click(|_event, window, cx| { - window.dispatch_action(ToggleFoldAll.boxed_clone(), cx) - }) + .on_click(cx.listener(|this, _: &ClickEvent, window, cx| { + this.toggle_fold_all(&ToggleFoldAll, window, cx); + })) }; if self.dismissed { @@ -1876,7 +1874,7 @@ mod tests { use super::*; use editor::{ - DisplayPoint, Editor, ExcerptRange, MultiBuffer, SearchSettings, SelectionEffects, + DisplayPoint, Editor, MultiBuffer, PathKey, SearchSettings, SelectionEffects, display_map::DisplayRow, test::editor_test_context::EditorTestContext, }; use gpui::{Hsla, TestAppContext, UpdateGlobal, VisualTestContext}; @@ -1934,14 +1932,18 @@ mod tests { let mut buffer = MultiBuffer::new(language::Capability::ReadWrite); //[ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))] - buffer.push_excerpts( + buffer.set_excerpts_for_path( + PathKey::sorted(0), buffer1, - [ExcerptRange::new(Point::new(0, 0)..Point::new(3, 0))], + [Point::new(0, 0)..Point::new(3, 0)], + 0, cx, ); - buffer.push_excerpts( + buffer.set_excerpts_for_path( + PathKey::sorted(1), buffer2, - [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))], + [Point::new(0, 0)..Point::new(1, 0)], + 0, cx, ); diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 9049c95eb9529b9a490687e1130af273b7496970..62b577c44520a6922798076cf085defea46d8688 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -44,9 +44,9 @@ pub use keymap_file::{ pub use settings_file::*; pub use settings_json::*; pub use settings_store::{ - InvalidSettingsError, LSP_SETTINGS_SCHEMA_URL_PREFIX, LocalSettingsKind, LocalSettingsPath, - MigrationStatus, Settings, SettingsFile, SettingsJsonSchemaParams, SettingsKey, - SettingsLocation, SettingsParseResult, SettingsStore, + DefaultSemanticTokenRules, InvalidSettingsError, LSP_SETTINGS_SCHEMA_URL_PREFIX, + LocalSettingsKind, LocalSettingsPath, MigrationStatus, Settings, SettingsFile, + SettingsJsonSchemaParams, SettingsKey, SettingsLocation, SettingsParseResult, SettingsStore, }; pub use vscode_import::{VsCodeSettings, VsCodeSettingsSource}; diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 411f57375a2303e5e2c30e182365f526989891a4..26425faf113a9dc0f52ad04809dc71c2f89eeb69 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -241,6 +241,11 @@ impl LocalSettingsPath { impl Global for SettingsStore {} +#[derive(Default)] +pub struct DefaultSemanticTokenRules(pub SemanticTokenRules); + +impl gpui::Global for DefaultSemanticTokenRules {} + #[doc(hidden)] #[derive(Debug)] pub struct SettingValue { @@ -275,29 +280,22 @@ pub struct SettingsJsonSchemaParams<'a> { impl SettingsStore { pub fn new(cx: &mut App, default_settings: &str) -> Self { - Self::new_with_semantic_tokens(cx, default_settings, &crate::default_semantic_token_rules()) + Self::new_with_semantic_tokens(cx, default_settings) } - pub fn new_with_semantic_tokens( - cx: &mut App, - default_settings: &str, - default_semantic_tokens: &str, - ) -> Self { + pub fn new_with_semantic_tokens(cx: &mut App, default_settings: &str) -> Self { let (setting_file_updates_tx, mut setting_file_updates_rx) = mpsc::unbounded(); - let mut default_settings: SettingsContent = + let default_settings: SettingsContent = SettingsContent::parse_json_with_comments(default_settings).unwrap(); - if let Ok(semantic_token_rules) = - crate::parse_json_with_comments::(default_semantic_tokens) - { - let global_lsp = default_settings - .global_lsp_settings - .get_or_insert_with(Default::default); - let existing_rules = global_lsp - .semantic_token_rules - .get_or_insert_with(Default::default); - existing_rules.rules.extend(semantic_token_rules.rules); + if !cx.has_global::() { + cx.set_global::( + crate::parse_json_with_comments::( + &crate::default_semantic_token_rules(), + ) + .map(DefaultSemanticTokenRules) + .unwrap_or_default(), + ); } - let default_settings: Rc = default_settings.into(); let mut this = Self { setting_values: Default::default(), @@ -868,18 +866,30 @@ impl SettingsStore { /// Sets language-specific semantic token rules. /// /// These rules are registered by language modules (e.g. the Rust language module) - /// and are stored separately from the global rules. They are only applied to - /// buffers of the matching language by the `SemanticTokenStylizer`. + /// or by third-party extensions (via `semantic_token_rules.json` in their language + /// directories). They are stored separately from the global rules and are only + /// applied to buffers of the matching language by the `SemanticTokenStylizer`. /// - /// These should be registered before any `SemanticTokenStylizer` instances are - /// created (typically during `languages::init`), as existing cached stylizers - /// are not automatically invalidated. + /// This triggers a settings recomputation so that observers (e.g. `LspStore`) + /// are notified and can invalidate cached stylizers. pub fn set_language_semantic_token_rules( &mut self, language: SharedString, rules: SemanticTokenRules, + cx: &mut App, ) { self.language_semantic_token_rules.insert(language, rules); + self.recompute_values(None, cx); + } + + /// Removes language-specific semantic token rules for the given language. + /// + /// This should be called when an extension that registered rules for a language + /// is unloaded. Triggers a settings recomputation so that observers (e.g. + /// `LspStore`) are notified and can invalidate cached stylizers. + pub fn remove_language_semantic_token_rules(&mut self, language: &str, cx: &mut App) { + self.language_semantic_token_rules.remove(language); + self.recompute_values(None, cx); } /// Returns the language-specific semantic token rules for the given language, @@ -1696,7 +1706,7 @@ mod tests { r#"{ "languages": { "JSON": { - "auto_indent": true + "auto_indent": "syntax_aware" } } }"# @@ -1706,12 +1716,12 @@ mod tests { .languages_mut() .get_mut("JSON") .unwrap() - .auto_indent = Some(false); + .auto_indent = Some(crate::AutoIndentMode::None); settings.languages_mut().insert( "Rust".into(), LanguageSettingsContent { - auto_indent: Some(true), + auto_indent: Some(crate::AutoIndentMode::SyntaxAware), ..Default::default() }, ); @@ -1719,10 +1729,10 @@ mod tests { r#"{ "languages": { "Rust": { - "auto_indent": true + "auto_indent": "syntax_aware" }, "JSON": { - "auto_indent": false + "auto_indent": "none" } } }"# diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index d0643be3bbee82be02c9c461a5f18ba62893a3cd..8a5a497d265c02787d6944915c0dba56e2381a79 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -801,6 +801,7 @@ impl VsCodeSettings { starts_open: None, sticky_scroll: None, auto_open: None, + diagnostic_badges: None, }; if let (Some(false), Some(false)) = ( diff --git a/crates/settings_content/src/agent.rs b/crates/settings_content/src/agent.rs index b03a6efbe5771b5ca47e3711492eab5106ba11f8..87e117b8b0bbdd9a789bae18c3f9dce98a6f1bc0 100644 --- a/crates/settings_content/src/agent.rs +++ b/crates/settings_content/src/agent.rs @@ -290,6 +290,7 @@ impl JsonSchema for LanguageModelProviderSetting { "openai", "openrouter", "vercel", + "vercel_ai_gateway", "x_ai", "zed.dev" ] @@ -316,73 +317,21 @@ impl From<&str> for LanguageModelProviderSetting { #[with_fallible_options] #[derive(Default, PartialEq, Deserialize, Serialize, Clone, JsonSchema, MergeFrom, Debug)] -pub struct AllAgentServersSettings { - pub gemini: Option, - pub claude: Option, - pub codex: Option, - - /// Custom agent servers configured by the user - #[serde(flatten)] - pub custom: HashMap, +#[serde(transparent)] +pub struct AllAgentServersSettings(pub HashMap); + +impl std::ops::Deref for AllAgentServersSettings { + type Target = HashMap; + + fn deref(&self) -> &Self::Target { + &self.0 + } } -#[with_fallible_options] -#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, MergeFrom, Debug, PartialEq)] -pub struct BuiltinAgentServerSettings { - /// Absolute path to a binary to be used when launching this agent. - /// - /// This can be used to run a specific binary without automatic downloads or searching `$PATH`. - #[serde(rename = "command")] - pub path: Option, - /// If a binary is specified in `command`, it will be passed these arguments. - pub args: Option>, - /// If a binary is specified in `command`, it will be passed these environment variables. - pub env: Option>, - /// Whether to skip searching `$PATH` for an agent server binary when - /// launching this agent. - /// - /// This has no effect if a `command` is specified. Otherwise, when this is - /// `false`, Zed will search `$PATH` for an agent server binary and, if one - /// is found, use it for threads with this agent. If no agent binary is - /// found on `$PATH`, Zed will automatically install and use its own binary. - /// When this is `true`, Zed will not search `$PATH`, and will always use - /// its own binary. - /// - /// Default: true - pub ignore_system_version: Option, - /// The default mode to use for this agent. - /// - /// Note: Not only all agents support modes. - /// - /// Default: None - pub default_mode: Option, - /// The default model to use for this agent. - /// - /// This should be the model ID as reported by the agent. - /// - /// Default: None - pub default_model: Option, - /// The favorite models for this agent. - /// - /// These are the model IDs as reported by the agent. - /// - /// Default: [] - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub favorite_models: Vec, - /// Default values for session config options. - /// - /// This is a map from config option ID to value ID. - /// - /// Default: {} - #[serde(default, skip_serializing_if = "HashMap::is_empty")] - pub default_config_options: HashMap, - /// Favorited values for session config options. - /// - /// This is a map from config option ID to a list of favorited value IDs. - /// - /// Default: {} - #[serde(default, skip_serializing_if = "HashMap::is_empty")] - pub favorite_config_option_values: HashMap>, +impl std::ops::DerefMut for AllAgentServersSettings { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } } #[with_fallible_options] diff --git a/crates/settings_content/src/language.rs b/crates/settings_content/src/language.rs index ab526c405a4b34962c298d68365cb828975628b1..fba636ee28be121a15da4b3d50046c53c0bdd5b3 100644 --- a/crates/settings_content/src/language.rs +++ b/crates/settings_content/src/language.rs @@ -81,7 +81,6 @@ pub enum EditPredictionProvider { None, #[default] Copilot, - Supermaven, Zed, Codestral, Ollama, @@ -91,7 +90,7 @@ pub enum EditPredictionProvider { Experimental(&'static str), } -pub const EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME: &str = "zeta2"; +const EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME: &str = "zeta2"; impl<'de> Deserialize<'de> for EditPredictionProvider { fn deserialize(deserializer: D) -> Result @@ -103,7 +102,6 @@ impl<'de> Deserialize<'de> for EditPredictionProvider { pub enum Content { None, Copilot, - Supermaven, Zed, Codestral, Ollama, @@ -116,7 +114,6 @@ impl<'de> Deserialize<'de> for EditPredictionProvider { Ok(match Content::deserialize(deserializer)? { Content::None => EditPredictionProvider::None, Content::Copilot => EditPredictionProvider::Copilot, - Content::Supermaven => EditPredictionProvider::Supermaven, Content::Zed => EditPredictionProvider::Zed, Content::Codestral => EditPredictionProvider::Codestral, Content::Ollama => EditPredictionProvider::Ollama, @@ -126,9 +123,7 @@ impl<'de> Deserialize<'de> for EditPredictionProvider { Content::Experimental(name) if name == EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME => { - EditPredictionProvider::Experimental( - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, - ) + EditPredictionProvider::Zed } Content::Experimental(name) => { return Err(D::Error::custom(format!( @@ -146,7 +141,6 @@ impl EditPredictionProvider { EditPredictionProvider::Zed => true, EditPredictionProvider::None | EditPredictionProvider::Copilot - | EditPredictionProvider::Supermaven | EditPredictionProvider::Codestral | EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi @@ -160,14 +154,10 @@ impl EditPredictionProvider { match self { EditPredictionProvider::Zed => Some("Zed AI"), EditPredictionProvider::Copilot => Some("GitHub Copilot"), - EditPredictionProvider::Supermaven => Some("Supermaven"), EditPredictionProvider::Codestral => Some("Codestral"), EditPredictionProvider::Sweep => Some("Sweep"), EditPredictionProvider::Mercury => Some("Mercury"), - EditPredictionProvider::Experimental( - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, - ) => Some("Zeta2"), - EditPredictionProvider::None | EditPredictionProvider::Experimental(_) => None, + EditPredictionProvider::Experimental(_) | EditPredictionProvider::None => None, EditPredictionProvider::Ollama => Some("Ollama"), EditPredictionProvider::OpenAiCompatibleApi => Some("OpenAI-Compatible API"), } @@ -245,6 +235,7 @@ pub enum EditPredictionPromptFormat { #[default] Infer, Zeta, + Zeta2, CodeLlama, StarCoder, DeepseekCoder, @@ -378,6 +369,32 @@ pub enum EditPredictionsMode { Eager, } +/// Controls the soft-wrapping behavior in the editor. +#[derive( + Copy, + Clone, + Debug, + Serialize, + Deserialize, + PartialEq, + Eq, + JsonSchema, + MergeFrom, + strum::VariantArray, + strum::VariantNames, +)] +#[serde(rename_all = "snake_case")] +pub enum AutoIndentMode { + /// Adjusts indentation based on syntax context when typing. + /// Uses tree-sitter to analyze code structure and indent accordingly. + SyntaxAware, + /// Preserve the indentation of the current line when creating new lines, + /// but don't adjust based on syntax context. + PreserveIndent, + /// No automatic indentation. New lines start at column 0. + None, +} + /// Controls the soft-wrapping behavior in the editor. #[derive( Copy, @@ -580,10 +597,14 @@ pub struct LanguageSettingsContent { /// /// Default: true pub linked_edits: Option, - /// Whether indentation should be adjusted based on the context whilst typing. + /// Controls automatic indentation behavior when typing. /// - /// Default: true - pub auto_indent: Option, + /// - "syntax_aware": Adjusts indentation based on syntax context (default) + /// - "preserve_indent": Preserves current line's indentation on new lines + /// - "none": No automatic indentation + /// + /// Default: syntax_aware + pub auto_indent: Option, /// Whether indentation of pasted content should be adjusted based on the context. /// /// Default: true diff --git a/crates/settings_content/src/language_model.rs b/crates/settings_content/src/language_model.rs index 4d5e89f9ab7d1e647e82d22767ec2a9b91b80d6d..6af419119d819931f3ad826ff416f1b47c89824f 100644 --- a/crates/settings_content/src/language_model.rs +++ b/crates/settings_content/src/language_model.rs @@ -20,6 +20,7 @@ pub struct AllLanguageModelSettingsContent { pub openai: Option, pub openai_compatible: Option, OpenAiCompatibleSettingsContent>>, pub vercel: Option, + pub vercel_ai_gateway: Option, pub x_ai: Option, #[serde(rename = "zed.dev")] pub zed_dot_dev: Option, @@ -301,6 +302,25 @@ pub struct VercelAvailableModel { pub max_completion_tokens: Option, } +#[with_fallible_options] +#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)] +pub struct VercelAiGatewaySettingsContent { + pub api_url: Option, + pub available_models: Option>, +} + +#[with_fallible_options] +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)] +pub struct VercelAiGatewayAvailableModel { + pub name: String, + pub display_name: Option, + pub max_tokens: u64, + pub max_output_tokens: Option, + pub max_completion_tokens: Option, + #[serde(default)] + pub capabilities: OpenAiCompatibleModelCapabilities, +} + #[with_fallible_options] #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)] pub struct GoogleSettingsContent { diff --git a/crates/settings_content/src/settings_content.rs b/crates/settings_content/src/settings_content.rs index 788917b5ebb0fc0f4ba29e29fc95b0da148c6f0f..5a4e87c384d802f3de4c96c07f65cf163c3a6d1a 100644 --- a/crates/settings_content/src/settings_content.rs +++ b/crates/settings_content/src/settings_content.rs @@ -619,6 +619,11 @@ pub struct GitPanelSettingsContent { /// /// Default: false pub tree_view: Option, + + /// Whether to show the addition/deletion change count next to each file in the Git panel. + /// + /// Default: false + pub diff_stats: Option, } #[derive( @@ -711,10 +716,6 @@ pub struct FileFinderSettingsContent { /// /// Default: true pub skip_focus_for_active_in_search: Option, - /// Determines whether to show the git status in the file finder - /// - /// Default: true - pub git_status: Option, /// Whether to use gitignored files when searching. /// Only the file Zed had indexed will be used, not necessary all the gitignored files. /// @@ -1147,11 +1148,6 @@ pub struct ReplSettingsContent { /// /// Default: 0 pub output_max_height_lines: Option, - /// Maximum number of columns of output to display before scaling images. - /// Set to 0 to disable output width limits. - /// - /// Default: 0 - pub output_max_width_columns: Option, } /// Settings for configuring the which-key popup behaviour. diff --git a/crates/settings_content/src/theme.rs b/crates/settings_content/src/theme.rs index c63536b0749afd055ab4883fc3169b2d1151e3f2..623e463cfb6c7da13f30b7df04b20177f598c893 100644 --- a/crates/settings_content/src/theme.rs +++ b/crates/settings_content/src/theme.rs @@ -1033,6 +1033,9 @@ pub struct ThemeColorsContent { /// Background color for Vim Visual Block mode indicator. #[serde(rename = "vim.visual_block.background")] pub vim_visual_block_background: Option, + /// Background color for Vim yank highlight. + #[serde(rename = "vim.yank.background")] + pub vim_yank_background: Option, /// Background color for Vim Helix Normal mode indicator. #[serde(rename = "vim.helix_normal.background")] pub vim_helix_normal_background: Option, diff --git a/crates/settings_content/src/workspace.rs b/crates/settings_content/src/workspace.rs index 3778ccc0373f4b937a08e3a435de40ad6a6d2cff..7262a83b384665b0bcd868bf14dbfaa2928a35c1 100644 --- a/crates/settings_content/src/workspace.rs +++ b/crates/settings_content/src/workspace.rs @@ -739,6 +739,10 @@ pub struct ProjectPanelSettingsContent { /// /// Default: directories_first pub sort_mode: Option, + /// Whether to show error and warning count badges next to file names in the project panel. + /// + /// Default: true + pub diagnostic_badges: Option, } #[derive( diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 738eff917bc57a7a2543f9c31494af02883299d1..dbac4d7ba350fcff07016a2ccfa483f3d84472c7 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -3154,7 +3154,7 @@ fn search_and_files_page() -> SettingsPage { ] } - fn file_finder_section() -> [SettingsPageItem; 6] { + fn file_finder_section() -> [SettingsPageItem; 5] { [ SettingsPageItem::SectionHeader("File Finder"), // todo: null by default @@ -3242,24 +3242,6 @@ fn search_and_files_page() -> SettingsPage { metadata: None, files: USER, }), - SettingsPageItem::SettingItem(SettingItem { - title: "Git Status", - description: "Show the Git status in the file finder.", - field: Box::new(SettingField { - json_path: Some("file_finder.git_status"), - pick: |settings_content| { - settings_content.file_finder.as_ref()?.git_status.as_ref() - }, - write: |settings_content, value| { - settings_content - .file_finder - .get_or_insert_default() - .git_status = value; - }, - }), - metadata: None, - files: USER, - }), ] } @@ -4256,7 +4238,7 @@ fn window_and_layout_page() -> SettingsPage { } fn panels_page() -> SettingsPage { - fn project_panel_section() -> [SettingsPageItem; 21] { + fn project_panel_section() -> [SettingsPageItem; 22] { [ SettingsPageItem::SectionHeader("Project Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -4556,6 +4538,28 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Diagnostic Badges", + description: "Show error and warning count badges next to file names in the project panel.", + field: Box::new(SettingField { + json_path: Some("project_panel.diagnostic_badges"), + pick: |settings_content| { + settings_content + .project_panel + .as_ref()? + .diagnostic_badges + .as_ref() + }, + write: |settings_content, value| { + settings_content + .project_panel + .get_or_insert_default() + .diagnostic_badges = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Sticky Scroll", description: "Whether to stick parent directories at top of the project panel.", @@ -5017,7 +5021,7 @@ fn panels_page() -> SettingsPage { ] } - fn git_panel_section() -> [SettingsPageItem; 10] { + fn git_panel_section() -> [SettingsPageItem; 11] { [ SettingsPageItem::SectionHeader("Git Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -5159,6 +5163,24 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Diff Stats", + description: "Whether to show the addition/deletion change count next to each file in the Git panel.", + field: Box::new(SettingField { + json_path: Some("git_panel.diff_stats"), + pick: |settings_content| { + settings_content.git_panel.as_ref()?.diff_stats.as_ref() + }, + write: |settings_content, value| { + settings_content + .git_panel + .get_or_insert_default() + .diff_stats = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Scroll Bar", description: "How and when the scrollbar should be displayed.", @@ -7383,7 +7405,7 @@ fn language_settings_data() -> Box<[SettingsPageItem]> { }), SettingsPageItem::SettingItem(SettingItem { title: "Auto Indent", - description: "Whether indentation should be adjusted based on the context whilst typing.", + description: "Controls automatic indentation behavior when typing.", field: Box::new(SettingField { json_path: Some("languages.$(language).auto_indent"), pick: |settings_content| { diff --git a/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs b/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs index 338fe4de14f1f7e9060fafe865253f09f0bdc481..32c4bee84bd1f72263ed28bcd44d7e6349c4b24c 100644 --- a/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs +++ b/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs @@ -2,6 +2,7 @@ use codestral::{CODESTRAL_API_URL, codestral_api_key_state, codestral_api_url}; use edit_prediction::{ ApiKeyState, mercury::{MERCURY_CREDENTIALS_URL, mercury_api_token}, + open_ai_compatible::{open_ai_compatible_api_token, open_ai_compatible_api_url}, sweep_ai::{SWEEP_CREDENTIALS_URL, sweep_api_token}, }; use edit_prediction_ui::{get_available_providers, set_completion_provider}; @@ -33,7 +34,9 @@ pub(crate) fn render_edit_prediction_setup_page( render_api_key_provider( IconName::Inception, "Mercury", - "https://platform.inceptionlabs.ai/dashboard/api-keys".into(), + ApiKeyDocs::Link { + dashboard_url: "https://platform.inceptionlabs.ai/dashboard/api-keys".into(), + }, mercury_api_token(cx), |_cx| MERCURY_CREDENTIALS_URL, None, @@ -46,7 +49,9 @@ pub(crate) fn render_edit_prediction_setup_page( render_api_key_provider( IconName::SweepAi, "Sweep", - "https://app.sweep.dev/".into(), + ApiKeyDocs::Link { + dashboard_url: "https://app.sweep.dev/".into(), + }, sweep_api_token(cx), |_cx| SWEEP_CREDENTIALS_URL, Some( @@ -68,7 +73,9 @@ pub(crate) fn render_edit_prediction_setup_page( render_api_key_provider( IconName::AiMistral, "Codestral", - "https://console.mistral.ai/codestral".into(), + ApiKeyDocs::Link { + dashboard_url: "https://console.mistral.ai/codestral".into(), + }, codestral_api_key_state(cx), |cx| codestral_api_url(cx), Some( @@ -87,7 +94,31 @@ pub(crate) fn render_edit_prediction_setup_page( .into_any_element(), ), Some(render_ollama_provider(settings_window, window, cx).into_any_element()), - Some(render_open_ai_compatible_provider(settings_window, window, cx).into_any_element()), + Some( + render_api_key_provider( + IconName::AiOpenAiCompat, + "OpenAI Compatible API", + ApiKeyDocs::Custom { + message: "Set an API key here. It will be sent as Authorization: Bearer {key}." + .into(), + }, + open_ai_compatible_api_token(cx), + |cx| open_ai_compatible_api_url(cx), + Some( + settings_window + .render_sub_page_items_section( + open_ai_compatible_settings().iter().enumerate(), + true, + window, + cx, + ) + .into_any_element(), + ), + window, + cx, + ) + .into_any_element(), + ), ]; div() @@ -162,10 +193,15 @@ fn render_provider_dropdown(window: &mut Window, cx: &mut App) -> AnyElement { .into_any_element() } +enum ApiKeyDocs { + Link { dashboard_url: SharedString }, + Custom { message: SharedString }, +} + fn render_api_key_provider( icon: IconName, title: &'static str, - link: SharedString, + docs: ApiKeyDocs, api_key_state: Entity, current_url: fn(&mut App) -> SharedString, additional_fields: Option, @@ -209,25 +245,32 @@ fn render_api_key_provider( .icon(icon) .no_padding(true); let button_link_label = format!("{} dashboard", title); - let description = h_flex() - .min_w_0() - .gap_0p5() - .child( - Label::new("Visit the") + let description = match docs { + ApiKeyDocs::Custom { message } => h_flex().min_w_0().gap_0p5().child( + Label::new(message) .size(LabelSize::Small) .color(Color::Muted), - ) - .child( - ButtonLink::new(button_link_label, link) - .no_icon(true) - .label_size(LabelSize::Small) - .label_color(Color::Muted), - ) - .child( - Label::new("to generate an API key.") - .size(LabelSize::Small) - .color(Color::Muted), - ); + ), + ApiKeyDocs::Link { dashboard_url } => h_flex() + .min_w_0() + .gap_0p5() + .child( + Label::new("Visit the") + .size(LabelSize::Small) + .color(Color::Muted), + ) + .child( + ButtonLink::new(button_link_label, dashboard_url) + .no_icon(true) + .label_size(LabelSize::Small) + .label_color(Color::Muted), + ) + .child( + Label::new("to generate an API key.") + .size(LabelSize::Small) + .color(Color::Muted), + ), + }; let configured_card_label = if is_from_env_var { "API Key Set in Environment Variable" } else { @@ -484,34 +527,6 @@ fn ollama_settings() -> Box<[SettingsPageItem]> { ]) } -fn render_open_ai_compatible_provider( - settings_window: &SettingsWindow, - window: &mut Window, - cx: &mut Context, -) -> impl IntoElement { - let open_ai_compatible_settings = open_ai_compatible_settings(); - let additional_fields = settings_window - .render_sub_page_items_section( - open_ai_compatible_settings.iter().enumerate(), - true, - window, - cx, - ) - .into_any_element(); - - v_flex() - .id("open-ai-compatible") - .min_w_0() - .pt_8() - .gap_1p5() - .child( - SettingsSectionHeader::new("OpenAI Compatible API") - .icon(IconName::AiOpenAiCompat) - .no_padding(true), - ) - .child(div().px_neg_8().child(additional_fields)) -} - fn open_ai_compatible_settings() -> Box<[SettingsPageItem]> { Box::new([ SettingsPageItem::SettingItem(SettingItem { diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index 9ac338f7b849a53c402a0cea6b79ddc6496df0f2..9d7fe83736be8d1d9ed79d85708c5ed0574b7e3a 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -474,6 +474,7 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) @@ -1574,8 +1575,10 @@ impl SettingsWindow { }; this_weak - .update(cx, |this, cx| { - this.fetch_files(window, cx); + .update(cx, |_, cx| { + cx.defer_in(window, |settings_window, window, cx| { + settings_window.fetch_files(window, cx) + }); cx.observe_release_in(&project, window, |_, _, window, cx| { cx.defer_in(window, |this, window, cx| this.fetch_files(window, cx)); }) diff --git a/crates/sidebar/Cargo.toml b/crates/sidebar/Cargo.toml index 6165a41c68894df9ad60110663562df713a24470..f0722a5791f6eecf873703bc5337890329d310c8 100644 --- a/crates/sidebar/Cargo.toml +++ b/crates/sidebar/Cargo.toml @@ -13,30 +13,38 @@ path = "src/sidebar.rs" [features] default = [] -test-support = [] [dependencies] acp_thread.workspace = true +agent.workspace = true +agent-client-protocol.workspace = true agent_ui.workspace = true chrono.workspace = true +editor.workspace = true fs.workspace = true -fuzzy.workspace = true gpui.workspace = true -picker.workspace = true +menu.workspace = true project.workspace = true recent_projects.workspace = true +settings.workspace = true theme.workspace = true ui.workspace = true -ui_input.workspace = true util.workspace = true workspace.workspace = true +zed_actions.workspace = true [dev-dependencies] +acp_thread = { workspace = true, features = ["test-support"] } +agent = { workspace = true, features = ["test-support"] } +agent_ui = { workspace = true, features = ["test-support"] } +assistant_text_thread = { workspace = true, features = ["test-support"] } editor.workspace = true +language_model = { workspace = true, features = ["test-support"] } +recent_projects = { workspace = true, features = ["test-support"] } +serde_json.workspace = true feature_flags.workspace = true fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] } -recent_projects = { workspace = true, features = ["test-support"] } settings = { workspace = true, features = ["test-support"] } -workspace = { workspace = true, features = ["test-support"] } +workspace = { workspace = true, features = ["test-support"] } \ No newline at end of file diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 24974512cda12276b5fcdc51ebd71d091782dff6..8c68a332162d990503bf1e4881a69611f4b31c8c 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -1,711 +1,189 @@ use acp_thread::ThreadStatus; -use agent_ui::{AgentPanel, AgentPanelEvent}; -use chrono::{Datelike, Local, NaiveDate, TimeDelta}; - -use fs::Fs; -use fuzzy::StringMatchCandidate; +use agent::ThreadStore; +use agent_client_protocol as acp; +use agent_ui::{AgentPanel, AgentPanelEvent, NewThread}; +use chrono::Utc; +use editor::{Editor, EditorElement, EditorStyle}; use gpui::{ - App, Context, Entity, EventEmitter, FocusHandle, Focusable, Pixels, Render, SharedString, - Subscription, Task, Window, px, + AnyElement, App, Context, Entity, EventEmitter, FocusHandle, Focusable, FontStyle, ListState, + Pixels, Render, SharedString, Subscription, TextStyle, WeakEntity, Window, actions, list, + prelude::*, px, relative, rems, }; -use picker::{Picker, PickerDelegate}; +use menu::{Cancel, Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious}; use project::Event as ProjectEvent; -use recent_projects::{RecentProjectEntry, get_recent_projects}; -use std::fmt::Display; - +use settings::Settings; use std::collections::{HashMap, HashSet}; - -use std::path::{Path, PathBuf}; -use std::sync::Arc; -use theme::ActiveTheme; +use std::mem; +use theme::{ActiveTheme, ThemeSettings}; use ui::utils::TRAFFIC_LIGHT_PADDING; use ui::{ - AgentThreadStatus, Divider, DividerColor, KeyBinding, ListSubHeader, Tab, ThreadItem, Tooltip, - prelude::*, + AgentThreadStatus, HighlightedLabel, IconButtonShape, KeyBinding, ListItem, PopoverMenu, Tab, + ThreadItem, Tooltip, WithScrollbar, prelude::*, }; -use ui_input::ErasedEditor; -use util::ResultExt as _; +use util::path_list::PathList; use workspace::{ - FocusWorkspaceSidebar, MultiWorkspace, NewWorkspaceInWindow, Sidebar as WorkspaceSidebar, - SidebarEvent, ToggleWorkspaceSidebar, Workspace, + FocusWorkspaceSidebar, MultiWorkspace, Sidebar as WorkspaceSidebar, SidebarEvent, + ToggleWorkspaceSidebar, Workspace, }; +use zed_actions::editor::{MoveDown, MoveUp}; + +actions!( + agents_sidebar, + [ + /// Collapses the selected entry in the workspace sidebar. + CollapseSelectedEntry, + /// Expands the selected entry in the workspace sidebar. + ExpandSelectedEntry, + ] +); + +const DEFAULT_WIDTH: Pixels = px(320.0); +const MIN_WIDTH: Pixels = px(200.0); +const MAX_WIDTH: Pixels = px(800.0); +const DEFAULT_THREADS_SHOWN: usize = 5; #[derive(Clone, Debug)] -struct AgentThreadInfo { +struct ActiveThreadInfo { + session_id: acp::SessionId, title: SharedString, status: AgentThreadStatus, icon: IconName, + icon_from_external_svg: Option, + is_background: bool, } -const DEFAULT_WIDTH: Pixels = px(320.0); -const MIN_WIDTH: Pixels = px(200.0); -const MAX_WIDTH: Pixels = px(800.0); -const MAX_MATCHES: usize = 100; - -#[derive(Clone)] -struct WorkspaceThreadEntry { - index: usize, - worktree_label: SharedString, - full_path: SharedString, - thread_info: Option, -} - -impl WorkspaceThreadEntry { - fn new(index: usize, workspace: &Entity, cx: &App) -> Self { - let workspace_ref = workspace.read(cx); - - let worktrees: Vec<_> = workspace_ref - .worktrees(cx) - .filter(|worktree| worktree.read(cx).is_visible()) - .map(|worktree| worktree.read(cx).abs_path()) - .collect(); - - let worktree_names: Vec = worktrees - .iter() - .filter_map(|path| { - path.file_name() - .map(|name| name.to_string_lossy().to_string()) - }) - .collect(); - - let worktree_label: SharedString = if worktree_names.is_empty() { - format!("Workspace {}", index + 1).into() - } else { - worktree_names.join(", ").into() - }; - - let full_path: SharedString = worktrees - .iter() - .map(|path| path.to_string_lossy().to_string()) - .collect::>() - .join("\n") - .into(); - - let thread_info = Self::thread_info(workspace, cx); - +impl From<&ActiveThreadInfo> for acp_thread::AgentSessionInfo { + fn from(info: &ActiveThreadInfo) -> Self { Self { - index, - worktree_label, - full_path, - thread_info, - } - } - - fn thread_info(workspace: &Entity, cx: &App) -> Option { - let agent_panel = workspace.read(cx).panel::(cx)?; - let agent_panel_ref = agent_panel.read(cx); - - let thread_view = agent_panel_ref.as_active_thread_view(cx)?.read(cx); - let thread = thread_view.thread.read(cx); - - let icon = thread_view.agent_icon; - let title = thread.title(); - - let status = if thread.is_waiting_for_confirmation() { - AgentThreadStatus::WaitingForConfirmation - } else if thread.had_error() { - AgentThreadStatus::Error - } else { - match thread.status() { - ThreadStatus::Generating => AgentThreadStatus::Running, - ThreadStatus::Idle => AgentThreadStatus::Completed, - } - }; - Some(AgentThreadInfo { - title, - status, - icon, - }) - } -} - -#[derive(Clone)] -enum SidebarEntry { - Separator(SharedString), - WorkspaceThread(WorkspaceThreadEntry), - RecentProject(RecentProjectEntry), -} - -impl SidebarEntry { - fn searchable_text(&self) -> &str { - match self { - SidebarEntry::Separator(_) => "", - SidebarEntry::WorkspaceThread(entry) => entry.worktree_label.as_ref(), - SidebarEntry::RecentProject(entry) => entry.name.as_ref(), + session_id: info.session_id.clone(), + cwd: None, + title: Some(info.title.clone()), + updated_at: Some(Utc::now()), + meta: None, } } } -#[derive(Clone)] -struct SidebarMatch { - entry: SidebarEntry, - positions: Vec, +#[derive(Clone, Debug)] +#[allow(dead_code)] +enum ListEntry { + ProjectHeader { + path_list: PathList, + label: SharedString, + highlight_positions: Vec, + }, + Thread { + session_info: acp_thread::AgentSessionInfo, + icon: IconName, + icon_from_external_svg: Option, + status: AgentThreadStatus, + diff_stats: Option<(usize, usize)>, + workspace_index: usize, + is_live: bool, + is_background: bool, + highlight_positions: Vec, + }, + ViewMore { + path_list: PathList, + remaining_count: usize, + }, + NewThread { + path_list: PathList, + }, } -struct WorkspacePickerDelegate { - multi_workspace: Entity, - entries: Vec, - active_workspace_index: usize, - workspace_thread_count: usize, - /// All recent projects including what's filtered out of entries - /// used to add unopened projects to entries on rebuild - recent_projects: Vec, - recent_project_thread_titles: HashMap, - matches: Vec, - selected_index: usize, - query: String, - hovered_thread_item: Option, - notified_workspaces: HashSet, +#[derive(Default)] +struct SidebarContents { + entries: Vec, + notified_threads: HashSet, } -impl WorkspacePickerDelegate { - fn new(multi_workspace: Entity) -> Self { - Self { - multi_workspace, - entries: Vec::new(), - active_workspace_index: 0, - workspace_thread_count: 0, - recent_projects: Vec::new(), - recent_project_thread_titles: HashMap::new(), - matches: Vec::new(), - selected_index: 0, - query: String::new(), - hovered_thread_item: None, - notified_workspaces: HashSet::new(), - } +impl SidebarContents { + fn is_thread_notified(&self, session_id: &acp::SessionId) -> bool { + self.notified_threads.contains(session_id) } +} - fn set_entries( - &mut self, - workspace_threads: Vec, - active_workspace_index: usize, - cx: &App, - ) { - if let Some(hovered_index) = self.hovered_thread_item { - let still_exists = workspace_threads - .iter() - .any(|thread| thread.index == hovered_index); - if !still_exists { - self.hovered_thread_item = None; - } - } - - let old_statuses: HashMap = self - .entries - .iter() - .filter_map(|entry| match entry { - SidebarEntry::WorkspaceThread(thread) => thread - .thread_info - .as_ref() - .map(|info| (thread.index, info.status)), - _ => None, - }) - .collect(); +fn fuzzy_match_positions(query: &str, candidate: &str) -> Option> { + let mut positions = Vec::new(); + let mut query_chars = query.chars().peekable(); - for thread in &workspace_threads { - if let Some(info) = &thread.thread_info { - if info.status == AgentThreadStatus::Completed - && thread.index != active_workspace_index - { - if old_statuses.get(&thread.index) == Some(&AgentThreadStatus::Running) { - self.notified_workspaces.insert(thread.index); - } - } + for (byte_idx, candidate_char) in candidate.char_indices() { + if let Some(&query_char) = query_chars.peek() { + if candidate_char.eq_ignore_ascii_case(&query_char) { + positions.push(byte_idx); + query_chars.next(); } + } else { + break; } - - if self.active_workspace_index != active_workspace_index { - self.notified_workspaces.remove(&active_workspace_index); - } - self.active_workspace_index = active_workspace_index; - self.workspace_thread_count = workspace_threads.len(); - self.rebuild_entries(workspace_threads, cx); } - fn set_recent_projects(&mut self, recent_projects: Vec, cx: &App) { - self.recent_project_thread_titles.clear(); - - self.recent_projects = recent_projects; - - let workspace_threads: Vec = self - .entries - .iter() - .filter_map(|entry| match entry { - SidebarEntry::WorkspaceThread(thread) => Some(thread.clone()), - _ => None, - }) - .collect(); - self.rebuild_entries(workspace_threads, cx); - } - - fn open_workspace_path_sets(&self, cx: &App) -> Vec>> { - self.multi_workspace - .read(cx) - .workspaces() - .iter() - .map(|workspace| { - let mut paths = workspace.read(cx).root_paths(cx); - paths.sort(); - paths - }) - .collect() - } - - fn rebuild_entries(&mut self, workspace_threads: Vec, cx: &App) { - let open_path_sets = self.open_workspace_path_sets(cx); - - self.entries.clear(); - - if !workspace_threads.is_empty() { - self.entries - .push(SidebarEntry::Separator("Active Workspaces".into())); - for thread in workspace_threads { - self.entries.push(SidebarEntry::WorkspaceThread(thread)); - } - } - - let recent: Vec<_> = self - .recent_projects - .iter() - .filter(|project| { - let mut project_paths: Vec<&Path> = - project.paths.iter().map(|p| p.as_path()).collect(); - project_paths.sort(); - !open_path_sets.iter().any(|open_paths| { - open_paths.len() == project_paths.len() - && open_paths - .iter() - .zip(&project_paths) - .all(|(a, b)| a.as_ref() == *b) - }) - }) - .cloned() - .collect(); - - if !recent.is_empty() { - let today = Local::now().naive_local().date(); - let mut current_bucket: Option = None; - - for project in recent { - let entry_date = project.timestamp.with_timezone(&Local).naive_local().date(); - let bucket = TimeBucket::from_dates(today, entry_date); - - if current_bucket != Some(bucket) { - current_bucket = Some(bucket); - self.entries - .push(SidebarEntry::Separator(bucket.to_string().into())); - } - - self.entries.push(SidebarEntry::RecentProject(project)); - } - } + if query_chars.peek().is_none() { + Some(positions) + } else { + None } } -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -enum TimeBucket { - Today, - Yesterday, - ThisWeek, - PastWeek, - All, -} - -impl TimeBucket { - fn from_dates(reference: NaiveDate, date: NaiveDate) -> Self { - if date == reference { - return TimeBucket::Today; - } - - if date == reference - TimeDelta::days(1) { - return TimeBucket::Yesterday; - } - - let week = date.iso_week(); - - if reference.iso_week() == week { - return TimeBucket::ThisWeek; - } - - let last_week = (reference - TimeDelta::days(7)).iso_week(); - - if week == last_week { - return TimeBucket::PastWeek; +fn workspace_path_list_and_label( + workspace: &Entity, + cx: &App, +) -> (PathList, SharedString) { + let workspace_ref = workspace.read(cx); + let mut paths = Vec::new(); + let mut names = Vec::new(); + + for worktree in workspace_ref.worktrees(cx) { + let worktree_ref = worktree.read(cx); + if !worktree_ref.is_visible() { + continue; } - - TimeBucket::All - } -} - -impl Display for TimeBucket { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - TimeBucket::Today => write!(f, "Today"), - TimeBucket::Yesterday => write!(f, "Yesterday"), - TimeBucket::ThisWeek => write!(f, "This Week"), - TimeBucket::PastWeek => write!(f, "Past Week"), - TimeBucket::All => write!(f, "All"), + let abs_path = worktree_ref.abs_path(); + paths.push(abs_path.to_path_buf()); + if let Some(name) = abs_path.file_name() { + names.push(name.to_string_lossy().to_string()); } } -} -fn open_recent_project(paths: Vec, window: &mut Window, cx: &mut App) { - let Some(handle) = window.window_handle().downcast::() else { - return; + let label: SharedString = if names.is_empty() { + // TODO: Can we do something better in this case? + "Empty Workspace".into() + } else { + names.join(", ").into() }; - cx.defer(move |cx| { - if let Some(task) = handle - .update(cx, |multi_workspace, window, cx| { - multi_workspace.open_project(paths, window, cx) - }) - .log_err() - { - task.detach_and_log_err(cx); - } - }); + (PathList::new(&paths), label) } -impl PickerDelegate for WorkspacePickerDelegate { - type ListItem = AnyElement; - - fn match_count(&self) -> usize { - self.matches.len() - } - - fn selected_index(&self) -> usize { - self.selected_index - } - - fn set_selected_index( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) { - self.selected_index = ix; - } - - fn can_select( - &mut self, - ix: usize, - _window: &mut Window, - _cx: &mut Context>, - ) -> bool { - match self.matches.get(ix) { - Some(SidebarMatch { - entry: SidebarEntry::Separator(_), - .. - }) => false, - _ => true, - } - } - - fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { - "Search…".into() - } - - fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option { - if self.query.is_empty() { - None - } else { - Some("No threads match your search.".into()) - } - } - - fn update_matches( - &mut self, - query: String, - window: &mut Window, - cx: &mut Context>, - ) -> Task<()> { - let query_changed = self.query != query; - self.query = query.clone(); - if query_changed { - self.hovered_thread_item = None; - } - let entries = self.entries.clone(); - - if query.is_empty() { - self.matches = entries - .into_iter() - .map(|entry| SidebarMatch { - entry, - positions: Vec::new(), - }) - .collect(); - - let separator_offset = if self.workspace_thread_count > 0 { - 1 - } else { - 0 - }; - self.selected_index = (self.active_workspace_index + separator_offset) - .min(self.matches.len().saturating_sub(1)); - return Task::ready(()); - } - - let executor = cx.background_executor().clone(); - cx.spawn_in(window, async move |picker, cx| { - let matches = cx - .background_spawn(async move { - let data_entries: Vec<(usize, &SidebarEntry)> = entries - .iter() - .enumerate() - .filter(|(_, entry)| !matches!(entry, SidebarEntry::Separator(_))) - .collect(); - - let candidates: Vec = data_entries - .iter() - .enumerate() - .map(|(candidate_index, (_, entry))| { - StringMatchCandidate::new(candidate_index, entry.searchable_text()) - }) - .collect(); - - let search_matches = fuzzy::match_strings( - &candidates, - &query, - false, - true, - MAX_MATCHES, - &Default::default(), - executor, - ) - .await; - - let mut workspace_matches = Vec::new(); - let mut project_matches = Vec::new(); - - for search_match in search_matches { - let (original_index, _) = data_entries[search_match.candidate_id]; - let entry = entries[original_index].clone(); - let sidebar_match = SidebarMatch { - positions: search_match.positions, - entry: entry.clone(), - }; - match entry { - SidebarEntry::WorkspaceThread(_) => { - workspace_matches.push(sidebar_match) - } - SidebarEntry::RecentProject(_) => project_matches.push(sidebar_match), - SidebarEntry::Separator(_) => {} - } - } - - let mut result = Vec::new(); - if !workspace_matches.is_empty() { - result.push(SidebarMatch { - entry: SidebarEntry::Separator("Active Workspaces".into()), - positions: Vec::new(), - }); - result.extend(workspace_matches); - } - if !project_matches.is_empty() { - result.push(SidebarMatch { - entry: SidebarEntry::Separator("Recent Projects".into()), - positions: Vec::new(), - }); - result.extend(project_matches); - } - result - }) - .await; - - picker - .update_in(cx, |picker, _window, _cx| { - picker.delegate.matches = matches; - if picker.delegate.matches.is_empty() { - picker.delegate.selected_index = 0; - } else { - let first_selectable = picker - .delegate - .matches - .iter() - .position(|m| !matches!(m.entry, SidebarEntry::Separator(_))) - .unwrap_or(0); - picker.delegate.selected_index = first_selectable; - } - }) - .log_err(); +fn workspace_index_for_path_list( + workspaces: &[Entity], + path_list: &PathList, + cx: &App, +) -> Option { + workspaces + .iter() + .enumerate() + .find_map(|(index, workspace)| { + let (candidate, _) = workspace_path_list_and_label(workspace, cx); + (candidate == *path_list).then_some(index) }) - } - - fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context>) { - let Some(selected_match) = self.matches.get(self.selected_index) else { - return; - }; - - match &selected_match.entry { - SidebarEntry::Separator(_) => {} - SidebarEntry::WorkspaceThread(thread_entry) => { - let target_index = thread_entry.index; - self.multi_workspace.update(cx, |multi_workspace, cx| { - multi_workspace.activate_index(target_index, window, cx); - }); - } - SidebarEntry::RecentProject(project_entry) => { - let paths = project_entry.paths.clone(); - open_recent_project(paths, window, cx); - } - } - } - - fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context>) {} - - fn render_match( - &self, - index: usize, - selected: bool, - _window: &mut Window, - cx: &mut Context>, - ) -> Option { - let match_entry = self.matches.get(index)?; - let SidebarMatch { entry, positions } = match_entry; - - match entry { - SidebarEntry::Separator(title) => Some( - v_flex() - .when(index > 0, |this| { - this.mt_1() - .gap_2() - .child(Divider::horizontal().color(DividerColor::BorderFaded)) - }) - .child(ListSubHeader::new(title.clone()).inset(true)) - .into_any_element(), - ), - SidebarEntry::WorkspaceThread(thread_entry) => { - let worktree_label = thread_entry.worktree_label.clone(); - let full_path = thread_entry.full_path.clone(); - let thread_info = thread_entry.thread_info.clone(); - let workspace_index = thread_entry.index; - let multi_workspace = self.multi_workspace.clone(); - let workspace_count = self.multi_workspace.read(cx).workspaces().len(); - let is_hovered = self.hovered_thread_item == Some(workspace_index); - - let remove_btn = IconButton::new( - format!("remove-workspace-{}", workspace_index), - IconName::Close, - ) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .tooltip(Tooltip::text("Remove Workspace")) - .on_click({ - let multi_workspace = multi_workspace; - move |_, window, cx| { - multi_workspace.update(cx, |mw, cx| { - mw.remove_workspace(workspace_index, window, cx); - }); - } - }); - - let has_notification = self.notified_workspaces.contains(&workspace_index); - let thread_subtitle = thread_info.as_ref().map(|info| info.title.clone()); - let status = thread_info - .as_ref() - .map_or(AgentThreadStatus::default(), |info| info.status); - let running = matches!( - status, - AgentThreadStatus::Running | AgentThreadStatus::WaitingForConfirmation - ); - - Some( - ThreadItem::new( - ("workspace-item", thread_entry.index), - thread_subtitle.unwrap_or("New Thread".into()), - ) - .icon( - thread_info - .as_ref() - .map_or(IconName::ZedAgent, |info| info.icon), - ) - .running(running) - .generation_done(has_notification) - .status(status) - .selected(selected) - .worktree(worktree_label.clone()) - .worktree_highlight_positions(positions.clone()) - .when(workspace_count > 1, |item| item.action_slot(remove_btn)) - .hovered(is_hovered) - .on_hover(cx.listener(move |picker, is_hovered, _window, cx| { - let mut changed = false; - if *is_hovered { - if picker.delegate.hovered_thread_item != Some(workspace_index) { - picker.delegate.hovered_thread_item = Some(workspace_index); - changed = true; - } - } else if picker.delegate.hovered_thread_item == Some(workspace_index) { - picker.delegate.hovered_thread_item = None; - changed = true; - } - if changed { - cx.notify(); - } - })) - .when(!full_path.is_empty(), |this| { - this.tooltip(move |_, cx| { - Tooltip::with_meta(worktree_label.clone(), None, full_path.clone(), cx) - }) - }) - .into_any_element(), - ) - } - SidebarEntry::RecentProject(project_entry) => { - let name = project_entry.name.clone(); - let full_path = project_entry.full_path.clone(); - let item_id: SharedString = - format!("recent-project-{:?}", project_entry.workspace_id).into(); - - Some( - ThreadItem::new(item_id, name.clone()) - .icon(IconName::Folder) - .selected(selected) - .highlight_positions(positions.clone()) - .tooltip(move |_, cx| { - Tooltip::with_meta(name.clone(), None, full_path.clone(), cx) - }) - .into_any_element(), - ) - } - } - } - - fn render_editor( - &self, - editor: &Arc, - window: &mut Window, - cx: &mut Context>, - ) -> Div { - h_flex() - .h(Tab::container_height(cx)) - .w_full() - .px_2() - .gap_2() - .justify_between() - .border_b_1() - .border_color(cx.theme().colors().border) - .child( - Icon::new(IconName::MagnifyingGlass) - .color(Color::Muted) - .size(IconSize::Small), - ) - .child(editor.render(window, cx)) - } } pub struct Sidebar { - multi_workspace: Entity, + multi_workspace: WeakEntity, width: Pixels, - picker: Entity>, - _subscription: Subscription, + focus_handle: FocusHandle, + filter_editor: Entity, + list_state: ListState, + contents: SidebarContents, + selection: Option, + collapsed_groups: HashSet, + expanded_groups: HashSet, + _subscriptions: Vec, _project_subscriptions: Vec, _agent_panel_subscriptions: Vec, - _thread_subscriptions: Vec, - #[cfg(any(test, feature = "test-support"))] - test_thread_infos: HashMap, - #[cfg(any(test, feature = "test-support"))] - test_recent_project_thread_titles: HashMap, - _fetch_recent_projects: Task<()>, + _thread_store_subscription: Option, } impl EventEmitter for Sidebar {} @@ -716,15 +194,17 @@ impl Sidebar { window: &mut Window, cx: &mut Context, ) -> Self { - let delegate = WorkspacePickerDelegate::new(multi_workspace.clone()); - let picker = cx.new(|cx| { - Picker::list(delegate, window, cx) - .max_height(None) - .show_scrollbar(true) - .modal(false) + let focus_handle = cx.focus_handle(); + cx.on_focus_in(&focus_handle, window, Self::focus_in) + .detach(); + + let filter_editor = cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_placeholder_text("Search threads…", window, cx); + editor }); - let subscription = cx.observe_in( + let observe_subscription = cx.observe_in( &multi_workspace, window, |this, _multi_workspace, window, cx| { @@ -732,38 +212,46 @@ impl Sidebar { }, ); - let fetch_recent_projects = { - let picker = picker.downgrade(); - let fs = ::global(cx); - cx.spawn_in(window, async move |_this, cx| { - let projects = get_recent_projects(None, None, fs).await; - - cx.update(|window, cx| { - if let Some(picker) = picker.upgrade() { - picker.update(cx, |picker, cx| { - picker.delegate.set_recent_projects(projects, cx); - let query = picker.query(cx); - picker.update_matches(query, window, cx); + let filter_subscription = cx.subscribe(&filter_editor, |this: &mut Self, _, event, cx| { + if let editor::EditorEvent::BufferEdited = event { + let query = this.filter_editor.read(cx).text(cx); + if !query.is_empty() { + this.selection.take(); + } + this.rebuild_contents(cx); + this.list_state.reset(this.contents.entries.len()); + if !query.is_empty() { + this.selection = this + .contents + .entries + .iter() + .position(|entry| matches!(entry, ListEntry::Thread { .. })) + .or_else(|| { + if this.contents.entries.is_empty() { + None + } else { + Some(0) + } }); - } - }) - .log_err(); - }) - }; + } + cx.notify(); + } + }); let mut this = Self { - multi_workspace, + multi_workspace: multi_workspace.downgrade(), width: DEFAULT_WIDTH, - picker, - _subscription: subscription, + focus_handle, + filter_editor, + list_state: ListState::new(0, gpui::ListAlignment::Top, px(1000.)), + contents: SidebarContents::default(), + selection: None, + collapsed_groups: HashSet::new(), + expanded_groups: HashSet::new(), + _subscriptions: vec![observe_subscription, filter_subscription], _project_subscriptions: Vec::new(), _agent_panel_subscriptions: Vec::new(), - _thread_subscriptions: Vec::new(), - #[cfg(any(test, feature = "test-support"))] - test_thread_infos: HashMap::new(), - #[cfg(any(test, feature = "test-support"))] - test_recent_project_thread_titles: HashMap::new(), - _fetch_recent_projects: fetch_recent_projects, + _thread_store_subscription: None, }; this.update_entries(window, cx); this @@ -774,8 +262,10 @@ impl Sidebar { window: &mut Window, cx: &mut Context, ) -> Vec { - let projects: Vec<_> = self - .multi_workspace + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return Vec::new(); + }; + let projects: Vec<_> = multi_workspace .read(cx) .workspaces() .iter() @@ -801,80 +291,15 @@ impl Sidebar { .collect() } - fn build_workspace_thread_entries( - &self, - multi_workspace: &MultiWorkspace, - cx: &App, - ) -> (Vec, usize) { - #[allow(unused_mut)] - let mut entries: Vec = multi_workspace - .workspaces() - .iter() - .enumerate() - .map(|(index, workspace)| WorkspaceThreadEntry::new(index, workspace, cx)) - .collect(); - - #[cfg(any(test, feature = "test-support"))] - for (index, info) in &self.test_thread_infos { - if let Some(entry) = entries.get_mut(*index) { - entry.thread_info = Some(info.clone()); - } - } - - (entries, multi_workspace.active_workspace_index()) - } - - #[cfg(any(test, feature = "test-support"))] - pub fn set_test_recent_projects( - &self, - projects: Vec, - cx: &mut Context, - ) { - self.picker.update(cx, |picker, _cx| { - picker.delegate.recent_projects = projects; - }); - } - - #[cfg(any(test, feature = "test-support"))] - pub fn set_test_thread_info( - &mut self, - index: usize, - title: SharedString, - status: AgentThreadStatus, - ) { - self.test_thread_infos.insert( - index, - AgentThreadInfo { - title, - status, - icon: IconName::ZedAgent, - }, - ); - } - - #[cfg(any(test, feature = "test-support"))] - pub fn set_test_recent_project_thread_title( + fn subscribe_to_agent_panels( &mut self, - full_path: SharedString, - title: SharedString, - cx: &mut Context, - ) { - self.test_recent_project_thread_titles - .insert(full_path.clone(), title.clone()); - self.picker.update(cx, |picker, _cx| { - picker - .delegate - .recent_project_thread_titles - .insert(full_path, title); - }); - } - - fn subscribe_to_agent_panels( - &mut self, - window: &mut Window, + window: &mut Window, cx: &mut Context, ) -> Vec { - let workspaces: Vec<_> = self.multi_workspace.read(cx).workspaces().to_vec(); + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return Vec::new(); + }; + let workspaces: Vec<_> = multi_workspace.read(cx).workspaces().to_vec(); workspaces .iter() @@ -888,8 +313,6 @@ impl Sidebar { }, ) } else { - // Panel hasn't loaded yet — observe the workspace so we - // re-subscribe once the panel appears on its dock. cx.observe_in(workspace, window, |this, _, window, cx| { this.update_entries(window, cx); }) @@ -898,376 +321,2816 @@ impl Sidebar { .collect() } - fn subscribe_to_threads( - &mut self, - window: &mut Window, - cx: &mut Context, - ) -> Vec { - let workspaces: Vec<_> = self.multi_workspace.read(cx).workspaces().to_vec(); - - workspaces - .iter() - .filter_map(|workspace| { - let agent_panel = workspace.read(cx).panel::(cx)?; - let thread = agent_panel.read(cx).active_agent_thread(cx)?; - Some(cx.observe_in(&thread, window, |this, _, window, cx| { + fn subscribe_to_thread_store(&mut self, window: &mut Window, cx: &mut Context) { + if self._thread_store_subscription.is_some() { + return; + } + if let Some(thread_store) = ThreadStore::try_global(cx) { + self._thread_store_subscription = + Some(cx.observe_in(&thread_store, window, |this, _, window, cx| { this.update_entries(window, cx); - })) + })); + } + } + + fn all_thread_infos_for_workspace( + workspace: &Entity, + cx: &App, + ) -> Vec { + let Some(agent_panel) = workspace.read(cx).panel::(cx) else { + return Vec::new(); + }; + let agent_panel_ref = agent_panel.read(cx); + + agent_panel_ref + .parent_threads(cx) + .into_iter() + .map(|thread_view| { + let thread_view_ref = thread_view.read(cx); + let thread = thread_view_ref.thread.read(cx); + + let icon = thread_view_ref.agent_icon; + let icon_from_external_svg = thread_view_ref.agent_icon_from_external_svg.clone(); + let title = thread.title(); + let session_id = thread.session_id().clone(); + let is_background = agent_panel_ref.is_background_thread(&session_id); + + let status = if thread.is_waiting_for_confirmation() { + AgentThreadStatus::WaitingForConfirmation + } else if thread.had_error() { + AgentThreadStatus::Error + } else { + match thread.status() { + ThreadStatus::Generating => AgentThreadStatus::Running, + ThreadStatus::Idle => AgentThreadStatus::Completed, + } + }; + + ActiveThreadInfo { + session_id, + title, + status, + icon, + icon_from_external_svg, + is_background, + } }) .collect() } - /// Reconciles the sidebar's displayed entries with the current state of all - /// workspaces and their agent threads. + fn rebuild_contents(&mut self, cx: &App) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + let mw = multi_workspace.read(cx); + let workspaces = mw.workspaces().to_vec(); + let active_workspace = mw.workspaces().get(mw.active_workspace_index()).cloned(); + let active_workspace_index = active_workspace + .and_then(|active| { + workspaces + .iter() + .position(|w| w.entity_id() == active.entity_id()) + }) + .unwrap_or(0); + + let thread_store = ThreadStore::try_global(cx); + let query = self.filter_editor.read(cx).text(cx); + + let previous = mem::take(&mut self.contents); + + let old_statuses: HashMap = previous + .entries + .iter() + .filter_map(|entry| match entry { + ListEntry::Thread { + session_info, + status, + is_live: true, + .. + } => Some((session_info.session_id.clone(), *status)), + _ => None, + }) + .collect(); + + let mut entries = Vec::new(); + let mut notified_threads = previous.notified_threads; + + for (index, workspace) in workspaces.iter().enumerate() { + let (path_list, label) = workspace_path_list_and_label(workspace, cx); + + let is_collapsed = self.collapsed_groups.contains(&path_list); + let should_load_threads = !is_collapsed || !query.is_empty(); + + let mut threads: Vec = Vec::new(); + + if should_load_threads { + if let Some(ref thread_store) = thread_store { + for meta in thread_store.read(cx).threads_for_paths(&path_list) { + threads.push(ListEntry::Thread { + session_info: meta.into(), + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::default(), + diff_stats: None, + workspace_index: index, + is_live: false, + is_background: false, + highlight_positions: Vec::new(), + }); + } + } + + let live_infos = Self::all_thread_infos_for_workspace(workspace, cx); + + for info in &live_infos { + let Some(existing) = threads.iter_mut().find(|t| { + matches!(t, ListEntry::Thread { session_info, .. } if session_info.session_id == info.session_id) + }) else { + continue; + }; + + if let ListEntry::Thread { + session_info, + status, + icon, + icon_from_external_svg, + workspace_index: _, + is_live, + is_background, + .. + } = existing + { + session_info.title = Some(info.title.clone()); + *status = info.status; + *icon = info.icon; + *icon_from_external_svg = info.icon_from_external_svg.clone(); + *is_live = true; + *is_background = info.is_background; + } + } + + // Update notification state for live threads. + for thread in &threads { + if let ListEntry::Thread { + workspace_index, + session_info, + status, + is_background, + .. + } = thread + { + let session_id = &session_info.session_id; + if *is_background && *status == AgentThreadStatus::Completed { + notified_threads.insert(session_id.clone()); + } else if *status == AgentThreadStatus::Completed + && *workspace_index != active_workspace_index + && old_statuses.get(session_id) == Some(&AgentThreadStatus::Running) + { + notified_threads.insert(session_id.clone()); + } + + if *workspace_index == active_workspace_index && !*is_background { + notified_threads.remove(session_id); + } + } + } + + threads.sort_by(|a, b| { + let a_time = match a { + ListEntry::Thread { session_info, .. } => session_info.updated_at, + _ => unreachable!(), + }; + let b_time = match b { + ListEntry::Thread { session_info, .. } => session_info.updated_at, + _ => unreachable!(), + }; + b_time.cmp(&a_time) + }); + } + + if !query.is_empty() { + let mut matched_threads = Vec::new(); + for mut thread in threads { + if let ListEntry::Thread { + session_info, + highlight_positions, + .. + } = &mut thread + { + let title = session_info + .title + .as_ref() + .map(|s| s.as_ref()) + .unwrap_or(""); + if let Some(positions) = fuzzy_match_positions(&query, title) { + *highlight_positions = positions; + matched_threads.push(thread); + } + } + } + + let workspace_highlight_positions = + fuzzy_match_positions(&query, &label).unwrap_or_default(); + + if matched_threads.is_empty() && workspace_highlight_positions.is_empty() { + continue; + } + + entries.push(ListEntry::ProjectHeader { + path_list: path_list.clone(), + label, + highlight_positions: workspace_highlight_positions, + }); + entries.extend(matched_threads); + } else { + entries.push(ListEntry::ProjectHeader { + path_list: path_list.clone(), + label, + highlight_positions: Vec::new(), + }); + + if is_collapsed { + continue; + } + + let total = threads.len(); + let show_view_more = + total > DEFAULT_THREADS_SHOWN && !self.expanded_groups.contains(&path_list); + + let count = if show_view_more { + DEFAULT_THREADS_SHOWN + } else { + total + }; + + entries.extend(threads.into_iter().take(count)); + + if show_view_more { + entries.push(ListEntry::ViewMore { + path_list: path_list.clone(), + remaining_count: total - DEFAULT_THREADS_SHOWN, + }); + } + + if total == 0 { + entries.push(ListEntry::NewThread { + path_list: path_list.clone(), + }); + } + } + } + + // Prune stale entries from notified_threads. + let current_session_ids: HashSet<&acp::SessionId> = entries + .iter() + .filter_map(|e| match e { + ListEntry::Thread { session_info, .. } => Some(&session_info.session_id), + _ => None, + }) + .collect(); + notified_threads.retain(|id| current_session_ids.contains(id)); + + self.contents = SidebarContents { + entries, + notified_threads, + }; + } + fn update_entries(&mut self, window: &mut Window, cx: &mut Context) { let multi_workspace = self.multi_workspace.clone(); cx.defer_in(window, move |this, window, cx| { - if !this.multi_workspace.read(cx).multi_workspace_enabled(cx) { + let Some(multi_workspace) = multi_workspace.upgrade() else { + return; + }; + if !multi_workspace.read(cx).multi_workspace_enabled(cx) { return; } this._project_subscriptions = this.subscribe_to_projects(window, cx); this._agent_panel_subscriptions = this.subscribe_to_agent_panels(window, cx); - this._thread_subscriptions = this.subscribe_to_threads(window, cx); - let (entries, active_index) = multi_workspace.read_with(cx, |multi_workspace, cx| { - this.build_workspace_thread_entries(multi_workspace, cx) - }); + this.subscribe_to_thread_store(window, cx); - let had_notifications = !this.picker.read(cx).delegate.notified_workspaces.is_empty(); - this.picker.update(cx, |picker, cx| { - picker.delegate.set_entries(entries, active_index, cx); - let query = picker.query(cx); - picker.update_matches(query, window, cx); - }); - let has_notifications = !this.picker.read(cx).delegate.notified_workspaces.is_empty(); - if had_notifications != has_notifications { - multi_workspace.update(cx, |_, cx| cx.notify()); + let had_notifications = this.has_notifications(cx); + + this.rebuild_contents(cx); + + this.list_state.reset(this.contents.entries.len()); + + if let Some(selection) = this.selection { + if selection >= this.contents.entries.len() { + this.selection = this.contents.entries.len().checked_sub(1); + } } - }); - } -} -impl WorkspaceSidebar for Sidebar { - fn width(&self, _cx: &App) -> Pixels { - self.width - } + if had_notifications != this.has_notifications(cx) { + multi_workspace.update(cx, |_, cx| { + cx.notify(); + }); + } - fn set_width(&mut self, width: Option, cx: &mut Context) { - self.width = width.unwrap_or(DEFAULT_WIDTH).clamp(MIN_WIDTH, MAX_WIDTH); - cx.notify(); + cx.notify(); + }); } - fn has_notifications(&self, cx: &App) -> bool { - !self.picker.read(cx).delegate.notified_workspaces.is_empty() - } -} + fn render_list_entry( + &mut self, + ix: usize, + window: &mut Window, + cx: &mut Context, + ) -> AnyElement { + let Some(entry) = self.contents.entries.get(ix) else { + return div().into_any_element(); + }; + let is_focused = self.focus_handle.is_focused(window) + || self.filter_editor.focus_handle(cx).is_focused(window); + let is_selected = is_focused && self.selection == Some(ix); + + let is_group_header_after_first = + ix > 0 && matches!(entry, ListEntry::ProjectHeader { .. }); + + let rendered = match entry { + ListEntry::ProjectHeader { + path_list, + label, + highlight_positions, + } => self.render_project_header( + ix, + path_list, + label, + highlight_positions, + is_selected, + cx, + ), + ListEntry::Thread { + session_info, + icon, + icon_from_external_svg, + status, + workspace_index, + highlight_positions, + .. + } => self.render_thread( + ix, + session_info, + *icon, + icon_from_external_svg.clone(), + *status, + *workspace_index, + highlight_positions, + is_selected, + cx, + ), + ListEntry::ViewMore { + path_list, + remaining_count, + } => self.render_view_more(ix, path_list, *remaining_count, is_selected, cx), + ListEntry::NewThread { path_list } => { + self.render_new_thread(ix, path_list, is_selected, cx) + } + }; -impl Focusable for Sidebar { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.picker.read(cx).focus_handle(cx) + if is_group_header_after_first { + v_flex() + .w_full() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .child(rendered) + .into_any_element() + } else { + rendered + } } -} -impl Render for Sidebar { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let titlebar_height = ui::utils::platform_title_bar_height(window); - let ui_font = theme::setup_ui_font(window, cx); - let is_focused = self.focus_handle(cx).is_focused(window); - - let focus_tooltip_label = if is_focused { - "Focus Workspace" + fn render_project_header( + &self, + ix: usize, + path_list: &PathList, + label: &SharedString, + highlight_positions: &[usize], + is_selected: bool, + cx: &mut Context, + ) -> AnyElement { + let id = SharedString::from(format!("project-header-{}", ix)); + let ib_id = SharedString::from(format!("project-header-new-thread-{}", ix)); + let group = SharedString::from(format!("group-{}", ix)); + + let is_collapsed = self.collapsed_groups.contains(path_list); + let disclosure_icon = if is_collapsed { + IconName::ChevronRight } else { - "Focus Sidebar" + IconName::ChevronDown }; + let path_list_for_new_thread = path_list.clone(); + let path_list_for_remove = path_list.clone(); + let path_list_for_toggle = path_list.clone(); + let workspace_count = self + .multi_workspace + .upgrade() + .map_or(0, |mw| mw.read(cx).workspaces().len()); - v_flex() - .id("workspace-sidebar") - .key_context("WorkspaceSidebar") - .font(ui_font) - .h_full() - .w(self.width) - .bg(cx.theme().colors().surface_background) - .border_r_1() - .border_color(cx.theme().colors().border) + ListItem::new(id) + .group_name(&group) + .toggle_state(is_selected) .child( h_flex() - .flex_none() - .h(titlebar_height) - .w_full() - .mt_px() - .pb_px() - .pr_1() - .when_else( - cfg!(target_os = "macos") && !window.is_fullscreen(), - |this| this.pl(px(TRAFFIC_LIGHT_PADDING)), - |this| this.pl_2(), - ) - .justify_between() - .border_b_1() - .border_color(cx.theme().colors().border) - .child({ - let focus_handle = cx.focus_handle(); - IconButton::new("close-sidebar", IconName::WorkspaceNavOpen) - .icon_size(IconSize::Small) - .tooltip(Tooltip::element(move |_, cx| { - v_flex() - .gap_1() - .child( - h_flex() - .gap_2() - .justify_between() - .child(Label::new("Close Sidebar")) - .child(KeyBinding::for_action_in( - &ToggleWorkspaceSidebar, - &focus_handle, - cx, - )), - ) - .child( - h_flex() - .pt_1() - .gap_2() - .border_t_1() - .border_color(cx.theme().colors().border_variant) - .justify_between() - .child(Label::new(focus_tooltip_label)) - .child(KeyBinding::for_action_in( - &FocusWorkspaceSidebar, - &focus_handle, - cx, - )), - ) - .into_any_element() - })) - .on_click(cx.listener(|_this, _, _window, cx| { - cx.emit(SidebarEvent::Close); - })) + .px_1() + .py_1p5() + .gap_0p5() + .child(if highlight_positions.is_empty() { + Label::new(label.clone()) + .size(LabelSize::Small) + .color(Color::Muted) + .into_any_element() + } else { + HighlightedLabel::new(label.clone(), highlight_positions.to_vec()) + .size(LabelSize::Small) + .color(Color::Muted) + .into_any_element() }) .child( - IconButton::new("new-workspace", IconName::Plus) + div().visible_on_hover(group).child( + Icon::new(disclosure_icon) + .size(IconSize::Small) + .color(Color::Muted), + ), + ), + ) + .end_hover_slot( + h_flex() + .gap_0p5() + .child( + IconButton::new(ib_id, IconName::NewThread) .icon_size(IconSize::Small) - .tooltip(|_window, cx| { - Tooltip::for_action("New Workspace", &NewWorkspaceInWindow, cx) - }) - .on_click(cx.listener(|this, _, window, cx| { - this.multi_workspace.update(cx, |multi_workspace, cx| { - multi_workspace.create_workspace(window, cx); - }); + .icon_color(Color::Muted) + .tooltip(Tooltip::text("New Thread")) + .on_click(cx.listener(move |this, _, window, cx| { + this.selection = None; + this.create_new_thread(&path_list_for_new_thread, window, cx); })), - ), + ) + .when(workspace_count > 1, |this| { + this.child( + IconButton::new( + SharedString::from(format!("project-header-remove-{}", ix)), + IconName::Close, + ) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .tooltip(Tooltip::text("Remove Project")) + .on_click(cx.listener( + move |this, _, window, cx| { + this.remove_workspace(&path_list_for_remove, window, cx); + }, + )), + ) + }), ) - .child(self.picker.clone()) + .on_click(cx.listener(move |this, _, window, cx| { + this.selection = None; + this.toggle_collapse(&path_list_for_toggle, window, cx); + })) + .into_any_element() } -} -#[cfg(test)] -mod tests { - use super::*; - use feature_flags::FeatureFlagAppExt as _; - use fs::FakeFs; - use gpui::TestAppContext; - use settings::SettingsStore; + fn remove_workspace( + &mut self, + path_list: &PathList, + window: &mut Window, + cx: &mut Context, + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + let workspaces = multi_workspace.read(cx).workspaces().to_vec(); + + let Some(workspace_index) = workspace_index_for_path_list(&workspaces, path_list, cx) + else { + return; + }; + + multi_workspace.update(cx, |multi_workspace, cx| { + multi_workspace.remove_workspace(workspace_index, window, cx); + }); + } + + fn toggle_collapse( + &mut self, + path_list: &PathList, + window: &mut Window, + cx: &mut Context, + ) { + if self.collapsed_groups.contains(path_list) { + self.collapsed_groups.remove(path_list); + } else { + self.collapsed_groups.insert(path_list.clone()); + } + self.update_entries(window, cx); + } + + fn focus_in(&mut self, _window: &mut Window, cx: &mut Context) { + if self.selection.is_none() && !self.contents.entries.is_empty() { + self.selection = Some(0); + cx.notify(); + } + } + + fn cancel(&mut self, _: &Cancel, window: &mut Window, cx: &mut Context) { + if self.reset_filter_editor_text(window, cx) { + self.update_entries(window, cx); + } else { + self.focus_handle.focus(window, cx); + } + } + + fn reset_filter_editor_text(&mut self, window: &mut Window, cx: &mut Context) -> bool { + self.filter_editor.update(cx, |editor, cx| { + if editor.buffer().read(cx).len(cx).0 > 0 { + editor.set_text("", window, cx); + true + } else { + false + } + }) + } + + fn filter_query(&self, cx: &App) -> String { + self.filter_editor.read(cx).text(cx) + } + + fn editor_move_down(&mut self, _: &MoveDown, window: &mut Window, cx: &mut Context) { + self.select_next(&SelectNext, window, cx); + } + + fn editor_move_up(&mut self, _: &MoveUp, window: &mut Window, cx: &mut Context) { + self.select_previous(&SelectPrevious, window, cx); + } + + fn select_next(&mut self, _: &SelectNext, _window: &mut Window, cx: &mut Context) { + let next = match self.selection { + Some(ix) if ix + 1 < self.contents.entries.len() => ix + 1, + None if !self.contents.entries.is_empty() => 0, + _ => return, + }; + self.selection = Some(next); + self.list_state.scroll_to_reveal_item(next); + cx.notify(); + } + + fn select_previous( + &mut self, + _: &SelectPrevious, + _window: &mut Window, + cx: &mut Context, + ) { + let prev = match self.selection { + Some(ix) if ix > 0 => ix - 1, + None if !self.contents.entries.is_empty() => self.contents.entries.len() - 1, + _ => return, + }; + self.selection = Some(prev); + self.list_state.scroll_to_reveal_item(prev); + cx.notify(); + } + + fn select_first(&mut self, _: &SelectFirst, _window: &mut Window, cx: &mut Context) { + if !self.contents.entries.is_empty() { + self.selection = Some(0); + self.list_state.scroll_to_reveal_item(0); + cx.notify(); + } + } + + fn select_last(&mut self, _: &SelectLast, _window: &mut Window, cx: &mut Context) { + if let Some(last) = self.contents.entries.len().checked_sub(1) { + self.selection = Some(last); + self.list_state.scroll_to_reveal_item(last); + cx.notify(); + } + } + + fn confirm(&mut self, _: &Confirm, window: &mut Window, cx: &mut Context) { + let Some(ix) = self.selection else { return }; + let Some(entry) = self.contents.entries.get(ix) else { + return; + }; + + match entry { + ListEntry::ProjectHeader { path_list, .. } => { + let path_list = path_list.clone(); + self.toggle_collapse(&path_list, window, cx); + } + ListEntry::Thread { + session_info, + workspace_index, + .. + } => { + let session_info = session_info.clone(); + let workspace_index = *workspace_index; + self.activate_thread(session_info, workspace_index, window, cx); + } + ListEntry::ViewMore { path_list, .. } => { + let path_list = path_list.clone(); + self.expanded_groups.insert(path_list); + self.update_entries(window, cx); + } + ListEntry::NewThread { path_list } => { + let path_list = path_list.clone(); + self.create_new_thread(&path_list, window, cx); + } + } + } + + fn activate_thread( + &mut self, + session_info: acp_thread::AgentSessionInfo, + workspace_index: usize, + window: &mut Window, + cx: &mut Context, + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + + multi_workspace.update(cx, |multi_workspace, cx| { + multi_workspace.activate_index(workspace_index, window, cx); + }); + let workspaces = multi_workspace.read(cx).workspaces().to_vec(); + if let Some(workspace) = workspaces.get(workspace_index) { + if let Some(agent_panel) = workspace.read(cx).panel::(cx) { + agent_panel.update(cx, |panel, cx| { + panel.load_agent_thread(session_info, window, cx); + }); + } + } + } + + fn expand_selected_entry( + &mut self, + _: &ExpandSelectedEntry, + window: &mut Window, + cx: &mut Context, + ) { + let Some(ix) = self.selection else { return }; + + match self.contents.entries.get(ix) { + Some(ListEntry::ProjectHeader { path_list, .. }) => { + if self.collapsed_groups.contains(path_list) { + let path_list = path_list.clone(); + self.collapsed_groups.remove(&path_list); + self.update_entries(window, cx); + } else if ix + 1 < self.contents.entries.len() { + self.selection = Some(ix + 1); + self.list_state.scroll_to_reveal_item(ix + 1); + cx.notify(); + } + } + _ => {} + } + } + + fn collapse_selected_entry( + &mut self, + _: &CollapseSelectedEntry, + window: &mut Window, + cx: &mut Context, + ) { + let Some(ix) = self.selection else { return }; + + match self.contents.entries.get(ix) { + Some(ListEntry::ProjectHeader { path_list, .. }) => { + if !self.collapsed_groups.contains(path_list) { + let path_list = path_list.clone(); + self.collapsed_groups.insert(path_list); + self.update_entries(window, cx); + } + } + Some( + ListEntry::Thread { .. } | ListEntry::ViewMore { .. } | ListEntry::NewThread { .. }, + ) => { + for i in (0..ix).rev() { + if let Some(ListEntry::ProjectHeader { path_list, .. }) = + self.contents.entries.get(i) + { + let path_list = path_list.clone(); + self.selection = Some(i); + self.collapsed_groups.insert(path_list); + self.update_entries(window, cx); + break; + } + } + } + None => {} + } + } + + fn render_thread( + &self, + ix: usize, + session_info: &acp_thread::AgentSessionInfo, + icon: IconName, + icon_from_external_svg: Option, + status: AgentThreadStatus, + workspace_index: usize, + highlight_positions: &[usize], + is_selected: bool, + cx: &mut Context, + ) -> AnyElement { + let has_notification = self.contents.is_thread_notified(&session_info.session_id); + + let title: SharedString = session_info + .title + .clone() + .unwrap_or_else(|| "Untitled".into()); + let session_info = session_info.clone(); + + let id = SharedString::from(format!("thread-entry-{}", ix)); + ThreadItem::new(id, title) + .icon(icon) + .when_some(icon_from_external_svg, |this, svg| { + this.custom_icon_from_external_svg(svg) + }) + .highlight_positions(highlight_positions.to_vec()) + .status(status) + .notified(has_notification) + .selected(is_selected) + .on_click(cx.listener(move |this, _, window, cx| { + this.selection = None; + this.activate_thread(session_info.clone(), workspace_index, window, cx); + })) + .into_any_element() + } + + fn render_filter_input(&self, cx: &mut Context) -> impl IntoElement { + let settings = ThemeSettings::get_global(cx); + let text_style = TextStyle { + color: cx.theme().colors().text, + font_family: settings.ui_font.family.clone(), + font_features: settings.ui_font.features.clone(), + font_fallbacks: settings.ui_font.fallbacks.clone(), + font_size: rems(0.875).into(), + font_weight: settings.ui_font.weight, + font_style: FontStyle::Normal, + line_height: relative(1.3), + ..Default::default() + }; + + EditorElement::new( + &self.filter_editor, + EditorStyle { + local_player: cx.theme().players().local(), + text: text_style, + ..Default::default() + }, + ) + } + + fn render_view_more( + &self, + ix: usize, + path_list: &PathList, + remaining_count: usize, + is_selected: bool, + cx: &mut Context, + ) -> AnyElement { + let path_list = path_list.clone(); + let id = SharedString::from(format!("view-more-{}", ix)); + + let count = format!("({})", remaining_count); + + ListItem::new(id) + .toggle_state(is_selected) + .child( + h_flex() + .px_1() + .py_1p5() + .gap_1p5() + .child( + Icon::new(IconName::Plus) + .size(IconSize::Small) + .color(Color::Muted), + ) + .child(Label::new("View More")) + .child(Label::new(count).color(Color::Muted).size(LabelSize::Small)), + ) + .on_click(cx.listener(move |this, _, window, cx| { + this.selection = None; + this.expanded_groups.insert(path_list.clone()); + this.update_entries(window, cx); + })) + .into_any_element() + } + + fn create_new_thread( + &mut self, + path_list: &PathList, + window: &mut Window, + cx: &mut Context, + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + let workspaces = multi_workspace.read(cx).workspaces().to_vec(); + + let workspace_index = workspace_index_for_path_list(&workspaces, path_list, cx); + + let Some(workspace_index) = workspace_index else { + return; + }; + + multi_workspace.update(cx, |multi_workspace, cx| { + multi_workspace.activate_index(workspace_index, window, cx); + }); + + if let Some(workspace) = workspaces.get(workspace_index) { + workspace.update(cx, |workspace, cx| { + if let Some(agent_panel) = workspace.panel::(cx) { + agent_panel.update(cx, |panel, cx| { + panel.new_thread(&NewThread, window, cx); + }); + } + workspace.focus_panel::(window, cx); + }); + } + } + + fn render_new_thread( + &self, + ix: usize, + path_list: &PathList, + is_selected: bool, + cx: &mut Context, + ) -> AnyElement { + let path_list = path_list.clone(); + + div() + .w_full() + .p_2() + .child( + Button::new( + SharedString::from(format!("new-thread-btn-{}", ix)), + "New Thread", + ) + .full_width() + .style(ButtonStyle::Outlined) + .icon(IconName::Plus) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .toggle_state(is_selected) + .on_click(cx.listener(move |this, _, window, cx| { + this.selection = None; + this.create_new_thread(&path_list, window, cx); + })), + ) + .into_any_element() + } +} + +impl WorkspaceSidebar for Sidebar { + fn width(&self, _cx: &App) -> Pixels { + self.width + } + + fn set_width(&mut self, width: Option, cx: &mut Context) { + self.width = width.unwrap_or(DEFAULT_WIDTH).clamp(MIN_WIDTH, MAX_WIDTH); + cx.notify(); + } + + fn has_notifications(&self, _cx: &App) -> bool { + !self.contents.notified_threads.is_empty() + } +} + +impl Focusable for Sidebar { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.filter_editor.focus_handle(cx) + } +} + +impl Render for Sidebar { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let titlebar_height = ui::utils::platform_title_bar_height(window); + let ui_font = theme::setup_ui_font(window, cx); + let is_focused = self.focus_handle.is_focused(window) + || self.filter_editor.focus_handle(cx).is_focused(window); + let has_query = !self.filter_query(cx).is_empty(); + + let focus_tooltip_label = if is_focused { + "Focus Workspace" + } else { + "Focus Sidebar" + }; + + v_flex() + .id("workspace-sidebar") + .key_context("WorkspaceSidebar") + .track_focus(&self.focus_handle) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_previous)) + .on_action(cx.listener(Self::editor_move_down)) + .on_action(cx.listener(Self::editor_move_up)) + .on_action(cx.listener(Self::select_first)) + .on_action(cx.listener(Self::select_last)) + .on_action(cx.listener(Self::confirm)) + .on_action(cx.listener(Self::expand_selected_entry)) + .on_action(cx.listener(Self::collapse_selected_entry)) + .on_action(cx.listener(Self::cancel)) + .font(ui_font) + .h_full() + .w(self.width) + .bg(cx.theme().colors().surface_background) + .border_r_1() + .border_color(cx.theme().colors().border) + .child( + h_flex() + .flex_none() + .h(titlebar_height) + .w_full() + .mt_px() + .pb_px() + .pr_1() + .when_else( + cfg!(target_os = "macos") && !window.is_fullscreen(), + |this| this.pl(px(TRAFFIC_LIGHT_PADDING)), + |this| this.pl_2(), + ) + .justify_between() + .border_b_1() + .border_color(cx.theme().colors().border) + .child({ + let focus_handle_toggle = self.focus_handle.clone(); + let focus_handle_focus = self.focus_handle.clone(); + IconButton::new("close-sidebar", IconName::WorkspaceNavOpen) + .icon_size(IconSize::Small) + .tooltip(Tooltip::element(move |_, cx| { + v_flex() + .gap_1() + .child( + h_flex() + .gap_2() + .justify_between() + .child(Label::new("Close Sidebar")) + .child(KeyBinding::for_action_in( + &ToggleWorkspaceSidebar, + &focus_handle_toggle, + cx, + )), + ) + .child( + h_flex() + .pt_1() + .gap_2() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .justify_between() + .child(Label::new(focus_tooltip_label)) + .child(KeyBinding::for_action_in( + &FocusWorkspaceSidebar, + &focus_handle_focus, + cx, + )), + ) + .into_any_element() + })) + .on_click(cx.listener(|_this, _, _window, cx| { + cx.emit(SidebarEvent::Close); + })) + }) + .child({ + let workspace = self + .multi_workspace + .upgrade() + .map(|mw| mw.read(cx).workspace().downgrade()); + let focus_handle = workspace + .as_ref() + .and_then(|w| w.upgrade()) + .map(|w| w.read(cx).focus_handle(cx)) + .unwrap_or_else(|| cx.focus_handle()); + + PopoverMenu::new("sidebar-recent-projects-menu") + .menu(move |window, cx| { + let workspace = workspace.clone()?; + Some(recent_projects::RecentProjects::popover( + workspace, + false, + focus_handle.clone(), + window, + cx, + )) + }) + .trigger_with_tooltip( + IconButton::new("new-workspace", IconName::OpenFolder) + .icon_size(IconSize::Small), + |_window, cx| { + Tooltip::for_action( + "Open Recent Project", + &zed_actions::OpenRecent { + create_new_window: false, + }, + cx, + ) + }, + ) + .anchor(gpui::Corner::TopLeft) + }), + ) + .child( + h_flex() + .flex_none() + .p_2() + .h(Tab::container_height(cx)) + .gap_1p5() + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + Icon::new(IconName::MagnifyingGlass) + .size(IconSize::Small) + .color(Color::Muted), + ) + .child(self.render_filter_input(cx)) + .when(has_query, |this| { + this.pr_1().child( + IconButton::new("clear_filter", IconName::Close) + .shape(IconButtonShape::Square) + .tooltip(Tooltip::text("Clear Search")) + .on_click(cx.listener(|this, _, window, cx| { + this.reset_filter_editor_text(window, cx); + this.update_entries(window, cx); + })), + ) + }), + ) + .child( + v_flex() + .flex_1() + .overflow_hidden() + .child( + list( + self.list_state.clone(), + cx.processor(Self::render_list_entry), + ) + .flex_1() + .size_full(), + ) + .vertical_scrollbar_for(&self.list_state, window, cx), + ) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use acp_thread::StubAgentConnection; + use agent::ThreadStore; + use agent_ui::test_support::{active_session_id, open_thread_with_connection, send_message}; + use assistant_text_thread::TextThreadStore; + use chrono::DateTime; + use feature_flags::FeatureFlagAppExt as _; + use fs::FakeFs; + use gpui::TestAppContext; + use settings::SettingsStore; + use std::sync::Arc; + use util::path_list::PathList; + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + theme::init(theme::LoadThemes::JustBase, cx); + editor::init(cx); + cx.update_flags(false, vec!["agent-v2".into()]); + ThreadStore::init_global(cx); + }); + } + + fn make_test_thread(title: &str, updated_at: DateTime) -> agent::DbThread { + agent::DbThread { + title: title.to_string().into(), + messages: Vec::new(), + updated_at, + detailed_summary: None, + initial_project_snapshot: None, + cumulative_token_usage: Default::default(), + request_token_usage: Default::default(), + model: None, + profile: None, + imported: false, + subagent_context: None, + speed: None, + thinking_enabled: false, + thinking_effort: None, + draft_prompt: None, + ui_scroll_position: None, + } + } + + async fn init_test_project( + worktree_path: &str, + cx: &mut TestAppContext, + ) -> Entity { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(worktree_path, serde_json::json!({ "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + project::Project::test(fs, [worktree_path.as_ref()], cx).await + } + + fn setup_sidebar( + multi_workspace: &Entity, + cx: &mut gpui::VisualTestContext, + ) -> Entity { + let sidebar = multi_workspace.update_in(cx, |_mw, window, cx| { + let mw_handle = cx.entity(); + cx.new(|cx| Sidebar::new(mw_handle, window, cx)) + }); + multi_workspace.update_in(cx, |mw, window, cx| { + mw.register_sidebar(sidebar.clone(), window, cx); + }); + cx.run_until_parked(); + sidebar + } + + async fn save_n_test_threads( + count: u32, + path_list: &PathList, + cx: &mut gpui::VisualTestContext, + ) { + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + for i in 0..count { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(format!("thread-{}", i))), + make_test_thread( + &format!("Thread {}", i + 1), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, i).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + cx.run_until_parked(); + } + + async fn save_thread_to_store( + session_id: &acp::SessionId, + path_list: &PathList, + cx: &mut gpui::VisualTestContext, + ) { + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + session_id.clone(), + make_test_thread( + "Test", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + cx.run_until_parked(); + } + + fn open_and_focus_sidebar( + sidebar: &Entity, + multi_workspace: &Entity, + cx: &mut gpui::VisualTestContext, + ) { + multi_workspace.update_in(cx, |mw, window, cx| { + mw.toggle_sidebar(window, cx); + }); + cx.run_until_parked(); + sidebar.update_in(cx, |_, window, cx| { + cx.focus_self(window); + }); + cx.run_until_parked(); + } + + fn visible_entries_as_strings( + sidebar: &Entity, + cx: &mut gpui::VisualTestContext, + ) -> Vec { + sidebar.read_with(cx, |sidebar, _cx| { + sidebar + .contents + .entries + .iter() + .enumerate() + .map(|(ix, entry)| { + let selected = if sidebar.selection == Some(ix) { + " <== selected" + } else { + "" + }; + match entry { + ListEntry::ProjectHeader { + label, + path_list, + highlight_positions: _, + .. + } => { + let icon = if sidebar.collapsed_groups.contains(path_list) { + ">" + } else { + "v" + }; + format!("{} [{}]{}", icon, label, selected) + } + ListEntry::Thread { + session_info, + status, + is_live, + .. + } => { + let title = session_info + .title + .as_ref() + .map(|s| s.as_ref()) + .unwrap_or("Untitled"); + let active = if *is_live { " *" } else { "" }; + let status_str = match status { + AgentThreadStatus::Running => " (running)", + AgentThreadStatus::Error => " (error)", + AgentThreadStatus::WaitingForConfirmation => " (waiting)", + _ => "", + }; + let notified = if sidebar + .contents + .is_thread_notified(&session_info.session_id) + { + " (!)" + } else { + "" + }; + format!( + " {}{}{}{}{}", + title, active, status_str, notified, selected + ) + } + ListEntry::ViewMore { + remaining_count, .. + } => { + format!(" + View More ({}){}", remaining_count, selected) + } + ListEntry::NewThread { .. } => { + format!(" [+ New Thread]{}", selected) + } + } + }) + .collect() + }) + } + + #[gpui::test] + async fn test_single_workspace_no_threads(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " [+ New Thread]"] + ); + } + + #[gpui::test] + async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("thread-1")), + make_test_thread( + "Fix crash in project panel", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 3, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("thread-2")), + make_test_thread( + "Add inline diff view", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + cx.run_until_parked(); + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in project panel", + " Add inline diff view", + ] + ); + } + + #[gpui::test] + async fn test_workspace_lifecycle(cx: &mut TestAppContext) { + let project = init_test_project("/project-a", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Single workspace with a thread + let path_list = PathList::new(&[std::path::PathBuf::from("/project-a")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("thread-a1")), + make_test_thread( + "Thread A1", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + cx.run_until_parked(); + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project-a]", " Thread A1"] + ); + + // Add a second workspace + multi_workspace.update_in(cx, |mw, window, cx| { + mw.create_workspace(window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a]", + " Thread A1", + "v [Empty Workspace]", + " [+ New Thread]" + ] + ); + + // Remove the second workspace + multi_workspace.update_in(cx, |mw, window, cx| { + mw.remove_workspace(1, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project-a]", " Thread A1"] + ); + } + + #[gpui::test] + async fn test_view_more_pagination(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(12, &path_list, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Thread 12", + " Thread 11", + " Thread 10", + " Thread 9", + " Thread 8", + " + View More (7)", + ] + ); + } + + #[gpui::test] + async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(1, &path_list, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1"] + ); + + // Collapse + sidebar.update_in(cx, |s, window, cx| { + s.toggle_collapse(&path_list, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project]"] + ); + + // Expand + sidebar.update_in(cx, |s, window, cx| { + s.toggle_collapse(&path_list, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1"] + ); + } + + #[gpui::test] + async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let expanded_path = PathList::new(&[std::path::PathBuf::from("/expanded")]); + let collapsed_path = PathList::new(&[std::path::PathBuf::from("/collapsed")]); + + sidebar.update_in(cx, |s, _window, _cx| { + s.collapsed_groups.insert(collapsed_path.clone()); + s.contents + .notified_threads + .insert(acp::SessionId::new(Arc::from("t-5"))); + s.contents.entries = vec![ + // Expanded project header + ListEntry::ProjectHeader { + path_list: expanded_path.clone(), + label: "expanded-project".into(), + highlight_positions: Vec::new(), + }, + // Thread with default (Completed) status, not active + ListEntry::Thread { + session_info: acp_thread::AgentSessionInfo { + session_id: acp::SessionId::new(Arc::from("t-1")), + cwd: None, + title: Some("Completed thread".into()), + updated_at: Some(Utc::now()), + meta: None, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::Completed, + diff_stats: None, + workspace_index: 0, + is_live: false, + is_background: false, + highlight_positions: Vec::new(), + }, + // Active thread with Running status + ListEntry::Thread { + session_info: acp_thread::AgentSessionInfo { + session_id: acp::SessionId::new(Arc::from("t-2")), + cwd: None, + title: Some("Running thread".into()), + updated_at: Some(Utc::now()), + meta: None, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::Running, + diff_stats: None, + workspace_index: 0, + is_live: true, + is_background: false, + highlight_positions: Vec::new(), + }, + // Active thread with Error status + ListEntry::Thread { + session_info: acp_thread::AgentSessionInfo { + session_id: acp::SessionId::new(Arc::from("t-3")), + cwd: None, + title: Some("Error thread".into()), + updated_at: Some(Utc::now()), + meta: None, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::Error, + diff_stats: None, + workspace_index: 1, + is_live: true, + is_background: false, + highlight_positions: Vec::new(), + }, + // Thread with WaitingForConfirmation status, not active + ListEntry::Thread { + session_info: acp_thread::AgentSessionInfo { + session_id: acp::SessionId::new(Arc::from("t-4")), + cwd: None, + title: Some("Waiting thread".into()), + updated_at: Some(Utc::now()), + meta: None, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::WaitingForConfirmation, + diff_stats: None, + workspace_index: 0, + is_live: false, + is_background: false, + highlight_positions: Vec::new(), + }, + // Background thread that completed (should show notification) + ListEntry::Thread { + session_info: acp_thread::AgentSessionInfo { + session_id: acp::SessionId::new(Arc::from("t-5")), + cwd: None, + title: Some("Notified thread".into()), + updated_at: Some(Utc::now()), + meta: None, + }, + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::Completed, + diff_stats: None, + workspace_index: 1, + is_live: true, + is_background: true, + highlight_positions: Vec::new(), + }, + // View More entry + ListEntry::ViewMore { + path_list: expanded_path.clone(), + remaining_count: 42, + }, + // Collapsed project header + ListEntry::ProjectHeader { + path_list: collapsed_path.clone(), + label: "collapsed-project".into(), + highlight_positions: Vec::new(), + }, + ]; + // Select the Running thread (index 2) + s.selection = Some(2); + }); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [expanded-project]", + " Completed thread", + " Running thread * (running) <== selected", + " Error thread * (error)", + " Waiting thread (waiting)", + " Notified thread * (!)", + " + View More (42)", + "> [collapsed-project]", + ] + ); + + // Move selection to the collapsed header + sidebar.update_in(cx, |s, _window, _cx| { + s.selection = Some(7); + }); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx).last().cloned(), + Some("> [collapsed-project] <== selected".to_string()), + ); + + // Clear selection + sidebar.update_in(cx, |s, _window, _cx| { + s.selection = None; + }); + + // No entry should have the selected marker + let entries = visible_entries_as_strings(&sidebar, cx); + for entry in &entries { + assert!( + !entry.contains("<== selected"), + "unexpected selection marker in: {}", + entry + ); + } + } + + #[gpui::test] + async fn test_keyboard_select_next_and_previous(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(3, &path_list, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Entries: [header, thread3, thread2, thread1] + // Focusing the sidebar triggers focus_in, which selects the first entry + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + // Move down through all entries + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(2)); + + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(3)); + + // At the end, selection stays on the last entry + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(3)); + + // Move back up + + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(2)); + + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + // At the top, selection stays on the first entry + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + } + + #[gpui::test] + async fn test_keyboard_select_first_and_last(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(3, &path_list, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + + // SelectLast jumps to the end + cx.dispatch_action(SelectLast); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(3)); + + // SelectFirst jumps to the beginning + cx.dispatch_action(SelectFirst); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + } + + #[gpui::test] + async fn test_keyboard_focus_in_selects_first(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Initially no selection + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); + + // Open the sidebar so it's rendered, then focus it to trigger focus_in + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + // Blur the sidebar, then refocus — existing selection should be preserved + cx.update(|window, _cx| { + window.blur(); + }); + cx.run_until_parked(); + + sidebar.update_in(cx, |_, window, cx| { + cx.focus_self(window); + }); + cx.run_until_parked(); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + } + + #[gpui::test] + async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(1, &path_list, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1"] + ); + + // Focus the sidebar — focus_in selects the header (index 0) + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + // Press confirm to collapse + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project] <== selected"] + ); + + // Confirm again to expand + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project] <== selected", " Thread 1",] + ); + } + + #[gpui::test] + async fn test_keyboard_confirm_on_view_more_expands(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(8, &path_list, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Should show header + 5 threads + "View More (3)" + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 7); + assert!(entries.iter().any(|e| e.contains("View More (3)"))); + + // Focus sidebar (selects index 0), then navigate down to the "View More" entry (index 6) + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + for _ in 0..6 { + cx.dispatch_action(SelectNext); + } + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(6)); + + // Confirm on "View More" to expand + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + // All 8 threads should now be visible, no "View More" + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 9); // header + 8 threads + assert!(!entries.iter().any(|e| e.contains("View More"))); + } + + #[gpui::test] + async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(1, &path_list, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1"] + ); + + // Focus sidebar — focus_in selects the header (index 0). Press left to collapse. + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + cx.dispatch_action(CollapseSelectedEntry); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project] <== selected"] + ); + + // Press right to expand + cx.dispatch_action(ExpandSelectedEntry); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project] <== selected", " Thread 1",] + ); + + // Press right again on already-expanded header moves selection down + cx.dispatch_action(ExpandSelectedEntry); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + } + + #[gpui::test] + async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(1, &path_list, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Focus sidebar (selects header at index 0), then navigate down to the thread (child) + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread 1 <== selected",] + ); + + // Pressing left on a child collapses the parent group and selects it + cx.dispatch_action(CollapseSelectedEntry); + cx.run_until_parked(); + + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project] <== selected"] + ); + } + + #[gpui::test] + async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) { + let project = init_test_project("/empty-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Even an empty project has the header and a new thread button + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [empty-project]", " [+ New Thread]"] + ); + + // Focus sidebar — focus_in selects the first entry (header at 0) + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + + // SelectNext moves to the new thread button + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + // At the end, selection stays on the last entry + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); + + // SelectPrevious goes back to the header + cx.dispatch_action(SelectPrevious); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + } + + #[gpui::test] + async fn test_selection_clamps_after_entry_removal(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + save_n_test_threads(1, &path_list, cx).await; + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Focus sidebar (selects header at 0), navigate down to the thread (index 1) + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + cx.dispatch_action(SelectNext); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); - fn init_test(cx: &mut TestAppContext) { + // Collapse the group, which removes the thread from the list + cx.dispatch_action(CollapseSelectedEntry); + cx.run_until_parked(); + + // Selection should be clamped to the last valid index (0 = header) + let selection = sidebar.read_with(cx, |s, _| s.selection); + let entry_count = sidebar.read_with(cx, |s, _| s.contents.entries.len()); + assert!( + selection.unwrap_or(0) < entry_count, + "selection {} should be within bounds (entries: {})", + selection.unwrap_or(0), + entry_count, + ); + } + + async fn init_test_project_with_agent_panel( + worktree_path: &str, + cx: &mut TestAppContext, + ) -> Entity { + agent_ui::test_support::init_test(cx); cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - theme::init(theme::LoadThemes::JustBase, cx); - editor::init(cx); cx.update_flags(false, vec!["agent-v2".into()]); + ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(worktree_path, serde_json::json!({ "src": {} })) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + project::Project::test(fs, [worktree_path.as_ref()], cx).await } - fn set_thread_info_and_refresh( - sidebar: &Entity, + fn add_agent_panel( + workspace: &Entity, + project: &Entity, + cx: &mut gpui::VisualTestContext, + ) -> Entity { + workspace.update_in(cx, |workspace, window, cx| { + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let panel = cx.new(|cx| AgentPanel::test_new(workspace, text_thread_store, window, cx)); + workspace.add_panel(panel.clone(), window, cx); + panel + }) + } + + fn setup_sidebar_with_agent_panel( multi_workspace: &Entity, - index: usize, - title: &str, - status: AgentThreadStatus, + project: &Entity, cx: &mut gpui::VisualTestContext, - ) { - sidebar.update_in(cx, |s, _window, _cx| { - s.set_test_thread_info(index, SharedString::from(title.to_string()), status); + ) -> (Entity, Entity) { + let sidebar = setup_sidebar(multi_workspace, cx); + let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); + let panel = add_agent_panel(&workspace, project, cx); + (sidebar, panel) + } + + #[gpui::test] + async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) { + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, &project, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + + // Open thread A and keep it generating. + let connection_a = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_a.clone(), cx); + send_message(&panel, cx); + + let session_id_a = active_session_id(&panel, cx); + save_thread_to_store(&session_id_a, &path_list, cx).await; + + cx.update(|_, cx| { + connection_a.send_update( + session_id_a.clone(), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("working...".into())), + cx, + ); }); - multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); - } - fn has_notifications(sidebar: &Entity, cx: &mut gpui::VisualTestContext) -> bool { - sidebar.read_with(cx, |s, cx| s.has_notifications(cx)) + // Open thread B (idle, default response) — thread A goes to background. + let connection_b = StubAgentConnection::new(); + connection_b.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Done".into()), + )]); + open_thread_with_connection(&panel, connection_b, cx); + send_message(&panel, cx); + + let session_id_b = active_session_id(&panel, cx); + save_thread_to_store(&session_id_b, &path_list, cx).await; + + cx.run_until_parked(); + + let mut entries = visible_entries_as_strings(&sidebar, cx); + entries[1..].sort(); + assert_eq!( + entries, + vec!["v [my-project]", " Hello *", " Hello * (running)",] + ); } #[gpui::test] - async fn test_notification_on_running_to_completed_transition(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - cx.update(|cx| ::set_global(fs.clone(), cx)); - let project = project::Project::test(fs, [], cx).await; + async fn test_background_thread_completion_triggers_notification(cx: &mut TestAppContext) { + let project_a = init_test_project_with_agent_panel("/project-a", cx).await; + let (multi_workspace, cx) = cx + .add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + let (sidebar, panel_a) = setup_sidebar_with_agent_panel(&multi_workspace, &project_a, cx); + + let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]); + + // Open thread on workspace A and keep it generating. + let connection_a = StubAgentConnection::new(); + open_thread_with_connection(&panel_a, connection_a.clone(), cx); + send_message(&panel_a, cx); + + let session_id_a = active_session_id(&panel_a, cx); + save_thread_to_store(&session_id_a, &path_list_a, cx).await; + + cx.update(|_, cx| { + connection_a.send_update( + session_id_a.clone(), + acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("chunk".into())), + cx, + ); + }); + cx.run_until_parked(); + + // Add a second workspace and activate it (making workspace A the background). + let fs = cx.update(|_, cx| ::global(cx)); + let project_b = project::Project::test(fs, [], cx).await; + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx); + }); + cx.run_until_parked(); + + // Thread A is still running; no notification yet. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a]", + " Hello * (running)", + "v [Empty Workspace]", + " [+ New Thread]", + ] + ); + + // Complete thread A's turn (transition Running → Completed). + connection_a.end_turn(session_id_a.clone(), acp::StopReason::EndTurn); + cx.run_until_parked(); + + // The completed background thread shows a notification indicator. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a]", + " Hello * (!)", + "v [Empty Workspace]", + " [+ New Thread]", + ] + ); + } + + fn type_in_search(sidebar: &Entity, query: &str, cx: &mut gpui::VisualTestContext) { + sidebar.update_in(cx, |sidebar, window, cx| { + window.focus(&sidebar.filter_editor.focus_handle(cx), cx); + sidebar.filter_editor.update(cx, |editor, cx| { + editor.set_text(query, window, cx); + }); + }); + cx.run_until_parked(); + } + #[gpui::test] + async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + for (id, title, hour) in [ + ("t-1", "Fix crash in project panel", 3), + ("t-2", "Add inline diff view", 2), + ("t-3", "Refactor settings module", 1), + ] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + cx.run_until_parked(); - let sidebar = multi_workspace.update_in(cx, |_mw, window, cx| { - let mw_handle = cx.entity(); - cx.new(|cx| Sidebar::new(mw_handle, window, cx)) - }); - multi_workspace.update_in(cx, |mw, window, cx| { - mw.register_sidebar(sidebar.clone(), window, cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in project panel", + " Add inline diff view", + " Refactor settings module", + ] + ); + + // User types "diff" in the search box — only the matching thread remains, + // with its workspace header preserved for context. + type_in_search(&sidebar, "diff", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Add inline diff view <== selected",] + ); + + // User changes query to something with no matches — list is empty. + type_in_search(&sidebar, "nonexistent", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + Vec::::new() + ); + } + + #[gpui::test] + async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) { + // Scenario: A user remembers a thread title but not the exact casing. + // Search should match case-insensitively so they can still find it. + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("thread-1")), + make_test_thread( + "Fix Crash In Project Panel", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) }); + save_task.await.unwrap(); + cx.run_until_parked(); + + // Lowercase query matches mixed-case title. + type_in_search(&sidebar, "fix crash", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix Crash In Project Panel <== selected", + ] + ); + + // Uppercase query also matches the same title. + type_in_search(&sidebar, "FIX CRASH", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix Crash In Project Panel <== selected", + ] + ); + } + + #[gpui::test] + async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContext) { + // Scenario: A user searches, finds what they need, then presses Escape + // to dismiss the filter and see the full list again. + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + for (id, title, hour) in [("t-1", "Alpha thread", 2), ("t-2", "Beta thread", 1)] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + cx.run_until_parked(); + + // Confirm the full list is showing. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Alpha thread", " Beta thread",] + ); + + // User types a search query to filter down. + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + type_in_search(&sidebar, "alpha", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Alpha thread <== selected",] + ); + + // User presses Escape — filter clears, full list is restored. + cx.dispatch_action(Cancel); cx.run_until_parked(); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Alpha thread <== selected", + " Beta thread", + ] + ); + } + + #[gpui::test] + async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppContext) { + let project_a = init_test_project("/project-a", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + for (id, title, hour) in [ + ("a1", "Fix bug in sidebar", 2), + ("a2", "Add tests for editor", 1), + ] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list_a.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } - // Create a second workspace and switch to it so workspace 0 is background. + // Add a second workspace. multi_workspace.update_in(cx, |mw, window, cx| { mw.create_workspace(window, cx); }); cx.run_until_parked(); + + let path_list_b = PathList::new::(&[]); + + for (id, title, hour) in [ + ("b1", "Refactor sidebar layout", 3), + ("b2", "Fix typo in README", 1), + ] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list_b.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a]", + " Fix bug in sidebar", + " Add tests for editor", + "v [Empty Workspace]", + " Refactor sidebar layout", + " Fix typo in README", + ] + ); + + // "sidebar" matches a thread in each workspace — both headers stay visible. + type_in_search(&sidebar, "sidebar", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project-a]", + " Fix bug in sidebar <== selected", + "v [Empty Workspace]", + " Refactor sidebar layout", + ] + ); + + // "typo" only matches in the second workspace — the first header disappears. + type_in_search(&sidebar, "typo", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [Empty Workspace]", " Fix typo in README <== selected",] + ); + + // "project-a" matches the first workspace name — the header appears alone + // without any child threads (none of them match "project-a"). + type_in_search(&sidebar, "project-a", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project-a] <== selected"] + ); + } + + #[gpui::test] + async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { + let project_a = init_test_project("/alpha-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list_a = PathList::new(&[std::path::PathBuf::from("/alpha-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + for (id, title, hour) in [ + ("a1", "Fix bug in sidebar", 2), + ("a2", "Add tests for editor", 1), + ] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list_a.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + + // Add a second workspace. multi_workspace.update_in(cx, |mw, window, cx| { - mw.activate_index(1, window, cx); + mw.create_workspace(window, cx); }); cx.run_until_parked(); - assert!( - !has_notifications(&sidebar, cx), - "should have no notifications initially" + let path_list_b = PathList::new::(&[]); + + for (id, title, hour) in [ + ("b1", "Refactor sidebar layout", 3), + ("b2", "Fix typo in README", 1), + ] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list_b.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + cx.run_until_parked(); + + // "alpha" matches the workspace name "alpha-project" but no thread titles. + // The workspace header should appear with no child threads. + type_in_search(&sidebar, "alpha", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [alpha-project] <== selected"] ); - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Running, - cx, + // "sidebar" matches thread titles in both workspaces but not workspace names. + // Both headers appear with their matching threads. + type_in_search(&sidebar, "sidebar", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [alpha-project]", + " Fix bug in sidebar <== selected", + "v [Empty Workspace]", + " Refactor sidebar layout", + ] ); - assert!( - !has_notifications(&sidebar, cx), - "Running status alone should not create a notification" + // "alpha sidebar" matches the workspace name "alpha-project" (fuzzy: a-l-p-h-a-s-i-d-e-b-a-r + // doesn't match) — but does not match either workspace name or any thread. + // Actually let's test something simpler: a query that matches both a workspace + // name AND some threads in that workspace. Matching threads should still appear. + type_in_search(&sidebar, "fix", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [alpha-project]", + " Fix bug in sidebar <== selected", + "v [Empty Workspace]", + " Fix typo in README", + ] ); - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Completed, - cx, + // A query that matches a workspace name AND a thread in that same workspace. + // Both the header (highlighted) and the matching thread should appear. + type_in_search(&sidebar, "alpha", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [alpha-project] <== selected"] ); - assert!( - has_notifications(&sidebar, cx), - "Running → Completed transition should create a notification" + // Now search for something that matches only a workspace name when there + // are also threads with matching titles — the non-matching workspace's + // threads should still appear if their titles match. + type_in_search(&sidebar, "alp", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [alpha-project] <== selected"] ); } #[gpui::test] - async fn test_no_notification_for_active_workspace(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - cx.update(|cx| ::set_global(fs.clone(), cx)); - let project = project::Project::test(fs, [], cx).await; - + async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); - let sidebar = multi_workspace.update_in(cx, |_mw, window, cx| { - let mw_handle = cx.entity(); - cx.new(|cx| Sidebar::new(mw_handle, window, cx)) - }); - multi_workspace.update_in(cx, |mw, window, cx| { - mw.register_sidebar(sidebar.clone(), window, cx); - }); + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + // Create 8 threads. The oldest one has a unique name and will be + // behind View More (only 5 shown by default). + for i in 0..8u32 { + let title = if i == 0 { + "Hidden gem thread".to_string() + } else { + format!("Thread {}", i + 1) + }; + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(format!("thread-{}", i))), + make_test_thread( + &title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, i).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } cx.run_until_parked(); - // Workspace 0 is the active workspace — thread completes while - // the user is already looking at it. - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Running, - cx, + // Confirm the thread is not visible and View More is shown. + let entries = visible_entries_as_strings(&sidebar, cx); + assert!( + entries.iter().any(|e| e.contains("View More")), + "should have View More button" ); - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Completed, - cx, + assert!( + !entries.iter().any(|e| e.contains("Hidden gem")), + "Hidden gem should be behind View More" ); + // User searches for the hidden thread — it appears, and View More is gone. + type_in_search(&sidebar, "hidden gem", cx); + let filtered = visible_entries_as_strings(&sidebar, cx); + assert_eq!( + filtered, + vec!["v [my-project]", " Hidden gem thread <== selected",] + ); assert!( - !has_notifications(&sidebar, cx), - "should not notify for the workspace the user is already looking at" + !filtered.iter().any(|e| e.contains("View More")), + "View More should not appear when filtering" ); } #[gpui::test] - async fn test_notification_cleared_on_workspace_activation(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.executor()); - cx.update(|cx| ::set_global(fs.clone(), cx)); - let project = project::Project::test(fs, [], cx).await; + async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("thread-1")), + make_test_thread( + "Important thread", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + cx.run_until_parked(); + + // User focuses the sidebar and collapses the group using keyboard: + // select the header, then press Confirm to toggle collapse. + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0)); + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project] <== selected"] + ); + + // User types a search — the thread appears even though its group is collapsed. + type_in_search(&sidebar, "important", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["> [my-project]", " Important thread <== selected",] + ); + } + + #[gpui::test] + async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + for (id, title, hour) in [ + ("t-1", "Fix crash in panel", 3), + ("t-2", "Fix lint warnings", 2), + ("t-3", "Add new feature", 1), + ] { + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + } + cx.run_until_parked(); + + open_and_focus_sidebar(&sidebar, &multi_workspace, cx); + + // User types "fix" — two threads match. + type_in_search(&sidebar, "fix", cx); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in panel <== selected", + " Fix lint warnings", + ] + ); + + // Selection starts on the first matching thread. User presses + // SelectNext to move to the second match. + cx.dispatch_action(SelectNext); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in panel", + " Fix lint warnings <== selected", + ] + ); + + // User can also jump back with SelectPrevious. + cx.dispatch_action(SelectPrevious); + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Fix crash in panel <== selected", + " Fix lint warnings", + ] + ); + } + #[gpui::test] + async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); - let sidebar = multi_workspace.update_in(cx, |_mw, window, cx| { - let mw_handle = cx.entity(); - cx.new(|cx| Sidebar::new(mw_handle, window, cx)) - }); multi_workspace.update_in(cx, |mw, window, cx| { - mw.register_sidebar(sidebar.clone(), window, cx); + mw.create_workspace(window, cx); }); cx.run_until_parked(); - // Create a second workspace so we can switch away and back. - multi_workspace.update_in(cx, |mw, window, cx| { - mw.create_workspace(window, cx); + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("hist-1")), + make_test_thread( + "Historical Thread", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 6, 1, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) }); + save_task.await.unwrap(); + cx.run_until_parked(); + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); - // Switch to workspace 1 so workspace 0 becomes a background workspace. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [my-project]", + " Historical Thread", + "v [Empty Workspace]", + " [+ New Thread]", + ] + ); + + // Switch to workspace 1 so we can verify the confirm switches back. multi_workspace.update_in(cx, |mw, window, cx| { mw.activate_index(1, window, cx); }); cx.run_until_parked(); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), + 1 + ); - // Thread on workspace 0 transitions Running → Completed while - // the user is looking at workspace 1. - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Running, - cx, + // Confirm on the historical (non-live) thread at index 1. + // Before the fix, workspace_index was Option and historical + // threads had None, so activate_thread early-returned without + // switching the workspace. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.selection = Some(1); + sidebar.confirm(&Confirm, window, cx); + }); + cx.run_until_parked(); + + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), + 0 ); - set_thread_info_and_refresh( - &sidebar, - &multi_workspace, - 0, - "Test Thread", - AgentThreadStatus::Completed, - cx, + } + + #[gpui::test] + async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppContext) { + let project = init_test_project("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("t-1")), + make_test_thread( + "Thread A", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from("t-2")), + make_test_thread( + "Thread B", + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + cx.run_until_parked(); + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Thread A", " Thread B",] ); - assert!( - has_notifications(&sidebar, cx), - "background workspace completion should create a notification" + // Keyboard confirm preserves selection. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.selection = Some(1); + sidebar.confirm(&Confirm, window, cx); + }); + assert_eq!( + sidebar.read_with(cx, |sidebar, _| sidebar.selection), + Some(1) ); - // Switching back to workspace 0 should clear the notification. - multi_workspace.update_in(cx, |mw, window, cx| { - mw.activate_index(0, window, cx); + // Click handlers clear selection to None so no highlight lingers + // after a click regardless of focus state. The hover style provides + // visual feedback during mouse interaction instead. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.selection = None; + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + sidebar.toggle_collapse(&path_list, window, cx); }); + assert_eq!(sidebar.read_with(cx, |sidebar, _| sidebar.selection), None); + + // When the user tabs back into the sidebar, focus_in restores + // selection to the first entry for keyboard navigation. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.focus_in(window, cx); + }); + assert_eq!( + sidebar.read_with(cx, |sidebar, _| sidebar.selection), + Some(0) + ); + } + + #[gpui::test] + async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) { + let project = init_test_project_with_agent_panel("/my-project", cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, &project, cx); + + let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + + let connection = StubAgentConnection::new(); + connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( + acp::ContentChunk::new("Hi there!".into()), + )]); + open_thread_with_connection(&panel, connection, cx); + send_message(&panel, cx); + + let session_id = active_session_id(&panel, cx); + save_thread_to_store(&session_id, &path_list, cx).await; cx.run_until_parked(); - assert!( - !has_notifications(&sidebar, cx), - "notification should be cleared when workspace becomes active" + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Hello *"] + ); + + // Simulate the agent generating a title. The notification chain is: + // AcpThread::set_title emits TitleUpdated → + // ConnectionView::handle_thread_event calls cx.notify() → + // AgentPanel observer fires and emits AgentPanelEvent → + // Sidebar subscription calls update_entries / rebuild_contents. + // + // Before the fix, handle_thread_event did NOT call cx.notify() for + // TitleUpdated, so the AgentPanel observer never fired and the + // sidebar kept showing the old title. + let thread = panel.read_with(cx, |panel, cx| panel.active_agent_thread(cx).unwrap()); + thread.update(cx, |thread, cx| { + thread + .set_title("Friendly Greeting with AI".into(), cx) + .detach(); + }); + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [my-project]", " Friendly Greeting with AI *"] ); } } diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index fa83dd937489f0c52e6c02b83b52112b5ff52ec1..068bc4bce56816962a3b75d6f6497b033a9209a5 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -661,6 +661,51 @@ impl SumTree { } } + pub fn update_first( + &mut self, + f: impl FnOnce(&mut T), + cx: ::Context<'_>, + ) { + self.update_first_recursive(f, cx); + } + + fn update_first_recursive( + &mut self, + f: impl FnOnce(&mut T), + cx: ::Context<'_>, + ) -> Option { + match Arc::make_mut(&mut self.0) { + Node::Internal { + summary, + child_summaries, + child_trees, + .. + } => { + let first_summary = child_summaries.first_mut().unwrap(); + let first_child = child_trees.first_mut().unwrap(); + *first_summary = first_child.update_first_recursive(f, cx).unwrap(); + *summary = sum(child_summaries.iter(), cx); + Some(summary.clone()) + } + Node::Leaf { + summary, + items, + item_summaries, + } => { + if let Some((item, item_summary)) = + items.first_mut().zip(item_summaries.first_mut()) + { + (f)(item); + *item_summary = item.summary(cx); + *summary = sum(item_summaries.iter(), cx); + Some(summary.clone()) + } else { + None + } + } + } + } + pub fn extent<'a, D: Dimension<'a, T::Summary>>( &'a self, cx: ::Context<'_>, diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index e58f7a65dd5d13ca67d4433bd25118ffb55d1169..004ec918514e0ad18b3c1e55178a6527866d1bb1 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -53,6 +53,10 @@ impl TreeMap { self.0.is_empty() } + pub fn contains_key(&self, key: &K) -> bool { + self.get(key).is_some() + } + pub fn get(&self, key: &K) -> Option<&V> { let (.., item) = self .0 diff --git a/crates/supermaven/Cargo.toml b/crates/supermaven/Cargo.toml deleted file mode 100644 index c2d0c48a9e7733402eae32886c0863326882c134..0000000000000000000000000000000000000000 --- a/crates/supermaven/Cargo.toml +++ /dev/null @@ -1,44 +0,0 @@ -[package] -name = "supermaven" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/supermaven.rs" -doctest = false - -[dependencies] -anyhow.workspace = true -client.workspace = true -collections.workspace = true -edit_prediction_types.workspace = true -futures.workspace = true -gpui.workspace = true -language.workspace = true -log.workspace = true -postage.workspace = true -serde.workspace = true -serde_json.workspace = true -settings.workspace = true -smol.workspace = true -supermaven_api.workspace = true -text.workspace = true -ui.workspace = true -unicode-segmentation.workspace = true -util.workspace = true - -[dev-dependencies] -editor = { workspace = true, features = ["test-support"] } -env_logger.workspace = true -gpui = { workspace = true, features = ["test-support"] } -language = { workspace = true, features = ["test-support"] } -project = { workspace = true, features = ["test-support"] } -settings = { workspace = true, features = ["test-support"] } -theme = { workspace = true, features = ["test-support"] } -util = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } diff --git a/crates/supermaven/src/messages.rs b/crates/supermaven/src/messages.rs deleted file mode 100644 index 9210343587bbb2cbf172a62a2eff73bbbb7cfb72..0000000000000000000000000000000000000000 --- a/crates/supermaven/src/messages.rs +++ /dev/null @@ -1,146 +0,0 @@ -use serde::{Deserialize, Serialize}; - -// Outbound messages -#[derive(Debug, Serialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum OutboundMessage { - StateUpdate(StateUpdateMessage), - #[allow(dead_code)] - UseFreeVersion, - Logout, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct StateUpdateMessage { - pub new_id: String, - pub updates: Vec, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum StateUpdate { - FileUpdate(FileUpdateMessage), - CursorUpdate(CursorPositionUpdateMessage), -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub struct FileUpdateMessage { - pub path: String, - pub content: String, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub struct CursorPositionUpdateMessage { - pub path: String, - pub offset: usize, -} - -// Inbound messages coming in on stdout - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum ResponseItem { - // A completion - Text { text: String }, - // Vestigial message type from old versions -- safe to ignore - Del { text: String }, - // Be able to delete whitespace prior to the cursor, likely for the rest of the completion - Dedent { text: String }, - // When the completion is over - End, - // Got the closing parentheses and shouldn't show any more after - Barrier, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SupermavenResponse { - pub state_id: String, - pub items: Vec, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct SupermavenMetadataMessage { - pub dust_strings: Option>, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct SupermavenTaskUpdateMessage { - pub task: String, - pub status: TaskStatus, - pub percent_complete: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum TaskStatus { - InProgress, - Complete, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct SupermavenActiveRepoMessage { - pub repo_simple_name: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum SupermavenPopupAction { - OpenUrl { label: String, url: String }, - NoOp { label: String }, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub struct SupermavenPopupMessage { - pub message: String, - pub actions: Vec, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "camelCase")] -pub struct ActivationRequest { - pub activate_url: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SupermavenSetMessage { - pub key: String, - pub value: serde_json::Value, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub enum ServiceTier { - FreeNoLicense, - #[serde(other)] - Unknown, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum SupermavenMessage { - Response(SupermavenResponse), - Metadata(SupermavenMetadataMessage), - Apology { - message: Option, - }, - ActivationRequest(ActivationRequest), - ActivationSuccess, - Passthrough { - passthrough: Box, - }, - Popup(SupermavenPopupMessage), - TaskStatus(SupermavenTaskUpdateMessage), - ActiveRepo(SupermavenActiveRepoMessage), - ServiceTier { - service_tier: ServiceTier, - }, - - Set(SupermavenSetMessage), - #[serde(other)] - Unknown, -} diff --git a/crates/supermaven/src/supermaven.rs b/crates/supermaven/src/supermaven.rs deleted file mode 100644 index 96f9b9c58bf934ae3991375ee8ef15cbf990dcc4..0000000000000000000000000000000000000000 --- a/crates/supermaven/src/supermaven.rs +++ /dev/null @@ -1,485 +0,0 @@ -mod messages; -mod supermaven_edit_prediction_delegate; - -pub use supermaven_edit_prediction_delegate::*; - -use anyhow::{Context as _, Result}; -#[allow(unused_imports)] -use client::{Client, proto}; -use collections::BTreeMap; - -use futures::{AsyncBufReadExt, StreamExt, channel::mpsc, io::BufReader}; -use gpui::{App, AsyncApp, Context, Entity, EntityId, Global, Task, WeakEntity, actions}; -use language::{ - Anchor, Buffer, BufferSnapshot, ToOffset, language_settings::all_language_settings, -}; -use messages::*; -use postage::watch; -use serde::{Deserialize, Serialize}; -use settings::SettingsStore; -use smol::io::AsyncWriteExt; -use std::{path::PathBuf, sync::Arc}; -use ui::prelude::*; -use util::ResultExt; -use util::command::Child; -use util::command::Stdio; - -actions!( - supermaven, - [ - /// Signs out of Supermaven. - SignOut - ] -); - -pub fn init(client: Arc, cx: &mut App) { - let supermaven = cx.new(|_| Supermaven::Starting); - Supermaven::set_global(supermaven.clone(), cx); - - let mut provider = all_language_settings(None, cx).edit_predictions.provider; - if provider == language::language_settings::EditPredictionProvider::Supermaven { - supermaven.update(cx, |supermaven, cx| supermaven.start(client.clone(), cx)); - } - - cx.observe_global::(move |cx| { - let new_provider = all_language_settings(None, cx).edit_predictions.provider; - if new_provider != provider { - provider = new_provider; - if provider == language::language_settings::EditPredictionProvider::Supermaven { - supermaven.update(cx, |supermaven, cx| supermaven.start(client.clone(), cx)); - } else { - supermaven.update(cx, |supermaven, _cx| supermaven.stop()); - } - } - }) - .detach(); - - cx.on_action(|_: &SignOut, cx| { - if let Some(supermaven) = Supermaven::global(cx) { - supermaven.update(cx, |supermaven, _cx| supermaven.sign_out()); - } - }); -} - -pub enum Supermaven { - Starting, - FailedDownload { error: anyhow::Error }, - Spawned(SupermavenAgent), - Error { error: anyhow::Error }, -} - -#[derive(Clone)] -pub enum AccountStatus { - Unknown, - NeedsActivation { activate_url: String }, - Ready, -} - -#[derive(Clone)] -struct SupermavenGlobal(Entity); - -impl Global for SupermavenGlobal {} - -impl Supermaven { - pub fn global(cx: &App) -> Option> { - cx.try_global::() - .map(|model| model.0.clone()) - } - - pub fn set_global(supermaven: Entity, cx: &mut App) { - cx.set_global(SupermavenGlobal(supermaven)); - } - - pub fn start(&mut self, client: Arc, cx: &mut Context) { - if let Self::Starting = self { - cx.spawn(async move |this, cx| { - let binary_path = - supermaven_api::get_supermaven_agent_path(client.http_client()).await?; - - this.update(cx, |this, cx| { - if let Self::Starting = this { - *this = - Self::Spawned(SupermavenAgent::new(binary_path, client.clone(), cx)?); - } - anyhow::Ok(()) - }) - }) - .detach_and_log_err(cx) - } - } - - pub fn stop(&mut self) { - *self = Self::Starting; - } - - pub fn is_enabled(&self) -> bool { - matches!(self, Self::Spawned { .. }) - } - - pub fn complete( - &mut self, - buffer: &Entity, - cursor_position: Anchor, - cx: &App, - ) -> Option { - if let Self::Spawned(agent) = self { - let buffer_id = buffer.entity_id(); - let buffer = buffer.read(cx); - let path = buffer - .file() - .and_then(|file| Some(file.as_local()?.abs_path(cx))) - .unwrap_or_else(|| PathBuf::from("untitled")) - .to_string_lossy() - .to_string(); - let content = buffer.text(); - let offset = cursor_position.to_offset(buffer); - let state_id = agent.next_state_id; - agent.next_state_id.0 += 1; - - let (updates_tx, mut updates_rx) = watch::channel(); - postage::stream::Stream::try_recv(&mut updates_rx).unwrap(); - - agent.states.insert( - state_id, - SupermavenCompletionState { - buffer_id, - prefix_anchor: cursor_position, - prefix_offset: offset, - text: String::new(), - dedent: String::new(), - updates_tx, - }, - ); - // ensure the states map is max 1000 elements - if agent.states.len() > 1000 { - // state id is monotonic so it's sufficient to remove the first element - agent - .states - .remove(&agent.states.keys().next().unwrap().clone()); - } - - let _ = agent - .outgoing_tx - .unbounded_send(OutboundMessage::StateUpdate(StateUpdateMessage { - new_id: state_id.0.to_string(), - updates: vec![ - StateUpdate::FileUpdate(FileUpdateMessage { - path: path.clone(), - content, - }), - StateUpdate::CursorUpdate(CursorPositionUpdateMessage { path, offset }), - ], - })); - - Some(SupermavenCompletion { - id: state_id, - updates: updates_rx, - }) - } else { - None - } - } - - pub fn completion( - &self, - buffer: &Entity, - cursor_position: Anchor, - cx: &App, - ) -> Option<&str> { - if let Self::Spawned(agent) = self { - find_relevant_completion( - &agent.states, - buffer.entity_id(), - &buffer.read(cx).snapshot(), - cursor_position, - ) - } else { - None - } - } - - pub fn sign_out(&mut self) { - if let Self::Spawned(agent) = self { - agent - .outgoing_tx - .unbounded_send(OutboundMessage::Logout) - .ok(); - // The account status will get set to RequiresActivation or Ready when the next - // message from the agent comes in. Until that happens, set the status to Unknown - // to disable the button. - agent.account_status = AccountStatus::Unknown; - } - } -} - -fn find_relevant_completion<'a>( - states: &'a BTreeMap, - buffer_id: EntityId, - buffer: &BufferSnapshot, - cursor_position: Anchor, -) -> Option<&'a str> { - let mut best_completion: Option<&str> = None; - 'completions: for state in states.values() { - if state.buffer_id != buffer_id { - continue; - } - let Some(state_completion) = state.text.strip_prefix(&state.dedent) else { - continue; - }; - - let current_cursor_offset = cursor_position.to_offset(buffer); - if current_cursor_offset < state.prefix_offset { - continue; - } - - let original_cursor_offset = buffer.clip_offset(state.prefix_offset, text::Bias::Left); - let text_inserted_since_completion_request: String = buffer - .text_for_range(original_cursor_offset..current_cursor_offset) - .collect(); - let trimmed_completion = - match state_completion.strip_prefix(&text_inserted_since_completion_request) { - Some(suffix) => suffix, - None => continue 'completions, - }; - - if best_completion.is_some_and(|best| best.len() > trimmed_completion.len()) { - continue; - } - - best_completion = Some(trimmed_completion); - } - best_completion -} - -pub struct SupermavenAgent { - _process: Child, - next_state_id: SupermavenCompletionStateId, - states: BTreeMap, - outgoing_tx: mpsc::UnboundedSender, - _handle_outgoing_messages: Task>, - _handle_incoming_messages: Task>, - pub account_status: AccountStatus, - service_tier: Option, - #[allow(dead_code)] - client: Arc, -} - -impl SupermavenAgent { - fn new( - binary_path: PathBuf, - client: Arc, - cx: &mut Context, - ) -> Result { - let mut process = util::command::new_command(&binary_path) - .arg("stdio") - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .kill_on_drop(true) - .spawn() - .context("failed to start the binary")?; - - let stdin = process - .stdin - .take() - .context("failed to get stdin for process")?; - let stdout = process - .stdout - .take() - .context("failed to get stdout for process")?; - - let (outgoing_tx, outgoing_rx) = mpsc::unbounded(); - - Ok(Self { - _process: process, - next_state_id: SupermavenCompletionStateId::default(), - states: BTreeMap::default(), - outgoing_tx, - _handle_outgoing_messages: cx.spawn(async move |_, _cx| { - Self::handle_outgoing_messages(outgoing_rx, stdin).await - }), - _handle_incoming_messages: cx.spawn(async move |this, cx| { - Self::handle_incoming_messages(this, stdout, cx).await - }), - account_status: AccountStatus::Unknown, - service_tier: None, - client, - }) - } - - async fn handle_outgoing_messages( - mut outgoing: mpsc::UnboundedReceiver, - mut stdin: W, - ) -> Result<()> { - while let Some(message) = outgoing.next().await { - let bytes = serde_json::to_vec(&message)?; - stdin.write_all(&bytes).await?; - stdin.write_all(&[b'\n']).await?; - } - Ok(()) - } - - async fn handle_incoming_messages( - this: WeakEntity, - stdout: R, - cx: &mut AsyncApp, - ) -> Result<()> { - const MESSAGE_PREFIX: &str = "SM-MESSAGE "; - - let stdout = BufReader::new(stdout); - let mut lines = stdout.lines(); - while let Some(line) = lines.next().await { - let Some(line) = line.context("failed to read line from stdout").log_err() else { - continue; - }; - let Some(line) = line.strip_prefix(MESSAGE_PREFIX) else { - continue; - }; - let Some(message) = serde_json::from_str::(line) - .with_context(|| format!("failed to deserialize line from stdout: {:?}", line)) - .log_err() - else { - continue; - }; - - this.update(cx, |this, _cx| { - if let Supermaven::Spawned(this) = this { - this.handle_message(message); - } - Task::ready(anyhow::Ok(())) - })? - .await?; - } - - Ok(()) - } - - fn handle_message(&mut self, message: SupermavenMessage) { - match message { - SupermavenMessage::ActivationRequest(request) => { - self.account_status = match request.activate_url { - Some(activate_url) => AccountStatus::NeedsActivation { activate_url }, - None => AccountStatus::Ready, - }; - } - SupermavenMessage::ActivationSuccess => { - self.account_status = AccountStatus::Ready; - } - SupermavenMessage::ServiceTier { service_tier } => { - self.account_status = AccountStatus::Ready; - self.service_tier = Some(service_tier); - } - SupermavenMessage::Response(response) => { - let state_id = SupermavenCompletionStateId(response.state_id.parse().unwrap()); - if let Some(state) = self.states.get_mut(&state_id) { - for item in &response.items { - match item { - ResponseItem::Text { text } => state.text.push_str(text), - ResponseItem::Dedent { text } => state.dedent.push_str(text), - _ => {} - } - } - *state.updates_tx.borrow_mut() = (); - } - } - SupermavenMessage::Passthrough { passthrough } => self.handle_message(*passthrough), - _ => { - log::warn!("unhandled message: {:?}", message); - } - } - } -} - -#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] -pub struct SupermavenCompletionStateId(usize); - -#[allow(dead_code)] -pub struct SupermavenCompletionState { - buffer_id: EntityId, - prefix_anchor: Anchor, - // prefix_offset is tracked independently because the anchor biases left which - // doesn't allow us to determine if the prior text has been deleted. - prefix_offset: usize, - text: String, - dedent: String, - updates_tx: watch::Sender<()>, -} - -pub struct SupermavenCompletion { - pub id: SupermavenCompletionStateId, - pub updates: watch::Receiver<()>, -} - -#[cfg(test)] -mod tests { - use super::*; - use collections::BTreeMap; - use gpui::TestAppContext; - use language::Buffer; - - #[gpui::test] - async fn test_find_relevant_completion_no_first_letter_skip(cx: &mut TestAppContext) { - let buffer = cx.new(|cx| Buffer::local("hello world", cx)); - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - - let mut states = BTreeMap::new(); - let state_id = SupermavenCompletionStateId(1); - let (updates_tx, _) = watch::channel(); - - states.insert( - state_id, - SupermavenCompletionState { - buffer_id: buffer.entity_id(), - prefix_anchor: buffer_snapshot.anchor_before(0), // Start of buffer - prefix_offset: 0, - text: "hello".to_string(), - dedent: String::new(), - updates_tx, - }, - ); - - let cursor_position = buffer_snapshot.anchor_after(1); - - let result = find_relevant_completion( - &states, - buffer.entity_id(), - &buffer_snapshot, - cursor_position, - ); - - assert_eq!(result, Some("ello")); - } - - #[gpui::test] - async fn test_find_relevant_completion_with_multiple_chars(cx: &mut TestAppContext) { - let buffer = cx.new(|cx| Buffer::local("hello world", cx)); - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - - let mut states = BTreeMap::new(); - let state_id = SupermavenCompletionStateId(1); - let (updates_tx, _) = watch::channel(); - - states.insert( - state_id, - SupermavenCompletionState { - buffer_id: buffer.entity_id(), - prefix_anchor: buffer_snapshot.anchor_before(0), // Start of buffer - prefix_offset: 0, - text: "hello".to_string(), - dedent: String::new(), - updates_tx, - }, - ); - - let cursor_position = buffer_snapshot.anchor_after(3); - - let result = find_relevant_completion( - &states, - buffer.entity_id(), - &buffer_snapshot, - cursor_position, - ); - - assert_eq!(result, Some("lo")); - } -} diff --git a/crates/supermaven/src/supermaven_edit_prediction_delegate.rs b/crates/supermaven/src/supermaven_edit_prediction_delegate.rs deleted file mode 100644 index f9eb4a210cff705d609cad3de13924a86253655a..0000000000000000000000000000000000000000 --- a/crates/supermaven/src/supermaven_edit_prediction_delegate.rs +++ /dev/null @@ -1,303 +0,0 @@ -use crate::{Supermaven, SupermavenCompletionStateId}; -use anyhow::Result; -use edit_prediction_types::{ - EditPrediction, EditPredictionDelegate, EditPredictionDiscardReason, EditPredictionIconSet, -}; -use futures::StreamExt as _; -use gpui::{App, Context, Entity, EntityId, Task}; -use language::{Anchor, Buffer, BufferSnapshot}; -use std::{ - ops::{AddAssign, Range}, - path::Path, - sync::Arc, - time::Duration, -}; -use text::{ToOffset, ToPoint}; -use ui::prelude::*; -use unicode_segmentation::UnicodeSegmentation; - -pub const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75); - -pub struct SupermavenEditPredictionDelegate { - supermaven: Entity, - buffer_id: Option, - completion_id: Option, - completion_text: Option, - file_extension: Option, - pending_refresh: Option>>, - completion_position: Option, -} - -impl SupermavenEditPredictionDelegate { - pub fn new(supermaven: Entity) -> Self { - Self { - supermaven, - buffer_id: None, - completion_id: None, - completion_text: None, - file_extension: None, - pending_refresh: None, - completion_position: None, - } - } -} - -// Computes the edit prediction from the difference between the completion text. -// This is defined by greedily matching the buffer text against the completion text. -// Inlays are inserted for parts of the completion text that are not present in the buffer text. -// For example, given the completion text "axbyc" and the buffer text "xy", the rendered output in the editor would be "[a]x[b]y[c]". -// The parts in brackets are the inlays. -fn completion_from_diff( - snapshot: BufferSnapshot, - completion_text: &str, - position: Anchor, - delete_range: Range, -) -> EditPrediction { - let buffer_text = snapshot.text_for_range(delete_range).collect::(); - - let mut edits: Vec<(Range, Arc)> = Vec::new(); - - let completion_graphemes: Vec<&str> = completion_text.graphemes(true).collect(); - let buffer_graphemes: Vec<&str> = buffer_text.graphemes(true).collect(); - - let mut offset = position.to_offset(&snapshot); - - let mut i = 0; - let mut j = 0; - while i < completion_graphemes.len() && j < buffer_graphemes.len() { - // find the next instance of the buffer text in the completion text. - let k = completion_graphemes[i..] - .iter() - .position(|c| *c == buffer_graphemes[j]); - match k { - Some(k) => { - if k != 0 { - let offset = snapshot.anchor_after(offset); - // the range from the current position to item is an inlay. - let edit = ( - offset..offset, - completion_graphemes[i..i + k].join("").into(), - ); - edits.push(edit); - } - i += k + 1; - j += 1; - offset.add_assign(buffer_graphemes[j - 1].len()); - } - None => { - // there are no more matching completions, so drop the remaining - // completion text as an inlay. - break; - } - } - } - - if j == buffer_graphemes.len() && i < completion_graphemes.len() { - let offset = snapshot.anchor_after(offset); - // there is leftover completion text, so drop it as an inlay. - let edit_range = offset..offset; - let edit_text = completion_graphemes[i..].join(""); - edits.push((edit_range, edit_text.into())); - } - - EditPrediction::Local { - id: None, - edits, - cursor_position: None, - edit_preview: None, - } -} - -impl EditPredictionDelegate for SupermavenEditPredictionDelegate { - fn name() -> &'static str { - "supermaven" - } - - fn display_name() -> &'static str { - "Supermaven" - } - - fn show_predictions_in_menu() -> bool { - true - } - - fn show_tab_accept_marker() -> bool { - true - } - - fn supports_jump_to_edit() -> bool { - false - } - - fn icons(&self, _cx: &App) -> EditPredictionIconSet { - EditPredictionIconSet::new(IconName::Supermaven) - .with_disabled(IconName::SupermavenDisabled) - .with_error(IconName::SupermavenError) - } - - fn is_enabled(&self, _buffer: &Entity, _cursor_position: Anchor, cx: &App) -> bool { - self.supermaven.read(cx).is_enabled() - } - - fn is_refreshing(&self, _cx: &App) -> bool { - self.pending_refresh.is_some() && self.completion_id.is_none() - } - - fn refresh( - &mut self, - buffer_handle: Entity, - cursor_position: Anchor, - debounce: bool, - cx: &mut Context, - ) { - // Only make new completion requests when debounce is true (i.e., when text is typed) - // When debounce is false (i.e., cursor movement), we should not make new requests - if !debounce { - return; - } - - reset_completion_cache(self, cx); - - let Some(mut completion) = self.supermaven.update(cx, |supermaven, cx| { - supermaven.complete(&buffer_handle, cursor_position, cx) - }) else { - return; - }; - - self.pending_refresh = Some(cx.spawn(async move |this, cx| { - if debounce { - cx.background_executor().timer(DEBOUNCE_TIMEOUT).await; - } - - while let Some(()) = completion.updates.next().await { - this.update(cx, |this, cx| { - // Get the completion text and cache it - if let Some(text) = - this.supermaven - .read(cx) - .completion(&buffer_handle, cursor_position, cx) - { - this.completion_text = Some(text.to_string()); - - this.completion_position = Some(cursor_position); - } - - this.completion_id = Some(completion.id); - this.buffer_id = Some(buffer_handle.entity_id()); - this.file_extension = buffer_handle.read(cx).file().and_then(|file| { - Some( - Path::new(file.file_name(cx)) - .extension()? - .to_str()? - .to_string(), - ) - }); - cx.notify(); - })?; - } - Ok(()) - })); - } - - fn accept(&mut self, _cx: &mut Context) { - reset_completion_cache(self, _cx); - } - - fn discard(&mut self, _reason: EditPredictionDiscardReason, _cx: &mut Context) { - reset_completion_cache(self, _cx); - } - - fn suggest( - &mut self, - buffer: &Entity, - cursor_position: Anchor, - cx: &mut Context, - ) -> Option { - if self.buffer_id != Some(buffer.entity_id()) { - return None; - } - - if self.completion_id.is_none() { - return None; - } - - let completion_text = if let Some(cached_text) = &self.completion_text { - cached_text.as_str() - } else { - let text = self - .supermaven - .read(cx) - .completion(buffer, cursor_position, cx)?; - self.completion_text = Some(text.to_string()); - text - }; - - // Check if the cursor is still at the same position as the completion request - // If we don't have a completion position stored, don't show the completion - if let Some(completion_position) = self.completion_position { - if cursor_position != completion_position { - return None; - } - } else { - return None; - } - - let completion_text = trim_to_end_of_line_unless_leading_newline(completion_text); - - let completion_text = completion_text.trim_end(); - - if !completion_text.trim().is_empty() { - let snapshot = buffer.read(cx).snapshot(); - - // Calculate the range from cursor to end of line correctly - let cursor_point = cursor_position.to_point(&snapshot); - let end_of_line = snapshot.anchor_after(language::Point::new( - cursor_point.row, - snapshot.line_len(cursor_point.row), - )); - let delete_range = cursor_position..end_of_line; - - Some(completion_from_diff( - snapshot, - completion_text, - cursor_position, - delete_range, - )) - } else { - None - } - } -} - -fn reset_completion_cache( - provider: &mut SupermavenEditPredictionDelegate, - _cx: &mut Context, -) { - provider.pending_refresh = None; - provider.completion_id = None; - provider.completion_text = None; - provider.completion_position = None; - provider.buffer_id = None; -} - -fn trim_to_end_of_line_unless_leading_newline(text: &str) -> &str { - if has_leading_newline(text) { - text - } else if let Some(i) = text.find('\n') { - &text[..i] - } else { - text - } -} - -fn has_leading_newline(text: &str) -> bool { - for c in text.chars() { - if c == '\n' { - return true; - } - if !c.is_whitespace() { - return false; - } - } - false -} diff --git a/crates/supermaven_api/Cargo.toml b/crates/supermaven_api/Cargo.toml deleted file mode 100644 index 28868a9a7433f995e99b861cf7f6e9aeeb28942f..0000000000000000000000000000000000000000 --- a/crates/supermaven_api/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -name = "supermaven_api" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/supermaven_api.rs" -doctest = false - -[dependencies] -anyhow.workspace = true -futures.workspace = true -http_client.workspace = true -paths.workspace = true -serde.workspace = true -serde_json.workspace = true -smol.workspace = true -util.workspace = true diff --git a/crates/supermaven_api/LICENSE-GPL b/crates/supermaven_api/LICENSE-GPL deleted file mode 120000 index 89e542f750cd3860a0598eff0dc34b56d7336dc4..0000000000000000000000000000000000000000 --- a/crates/supermaven_api/LICENSE-GPL +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-GPL \ No newline at end of file diff --git a/crates/supermaven_api/src/supermaven_api.rs b/crates/supermaven_api/src/supermaven_api.rs deleted file mode 100644 index 97e70e58a18fc277d8cb17e2fb8fd3c71b884420..0000000000000000000000000000000000000000 --- a/crates/supermaven_api/src/supermaven_api.rs +++ /dev/null @@ -1,125 +0,0 @@ -use anyhow::{Context as _, Result}; -use futures::AsyncReadExt; -use futures::io::BufReader; -use http_client::{AsyncBody, HttpClient, Request as HttpRequest}; -use paths::supermaven_dir; -use serde::Deserialize; -use smol::fs::{self, File}; -use std::path::{Path, PathBuf}; -use std::sync::Arc; - -use util::fs::{make_file_executable, remove_matching}; - -#[derive(Deserialize)] -pub struct SupermavenApiError { - pub message: String, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SupermavenDownloadResponse { - pub download_url: String, - pub version: u64, - pub sha256_hash: String, -} - -pub async fn latest_release( - client: Arc, - platform: &str, - arch: &str, -) -> Result { - let uri = format!( - "https://supermaven.com/api/download-path?platform={}&arch={}", - platform, arch - ); - - // Download is not authenticated - let request = HttpRequest::get(&uri); - - let mut response = client - .send(request.body(AsyncBody::default())?) - .await - .with_context(|| "Unable to acquire Supermaven Agent".to_string())?; - - let mut body = Vec::new(); - response.body_mut().read_to_end(&mut body).await?; - - if response.status().is_client_error() || response.status().is_server_error() { - let body_str = std::str::from_utf8(&body)?; - let error: SupermavenApiError = serde_json::from_str(body_str)?; - anyhow::bail!("Supermaven API error: {}", error.message); - } - - serde_json::from_slice::(&body) - .with_context(|| "Unable to parse Supermaven Agent response".to_string()) -} - -pub fn version_path(version: u64) -> PathBuf { - supermaven_dir().join(format!( - "sm-agent-{}{}", - version, - std::env::consts::EXE_SUFFIX - )) -} - -pub async fn has_version(version_path: &Path) -> bool { - fs::metadata(version_path).await.is_ok_and(|m| m.is_file()) -} - -pub async fn get_supermaven_agent_path(client: Arc) -> Result { - fs::create_dir_all(supermaven_dir()) - .await - .with_context(|| { - format!( - "Could not create Supermaven Agent Directory at {:?}", - supermaven_dir() - ) - })?; - - let platform = match std::env::consts::OS { - "macos" => "darwin", - "windows" => "windows", - "linux" => "linux", - unsupported => anyhow::bail!("unsupported platform {unsupported}"), - }; - - let arch = match std::env::consts::ARCH { - "x86_64" => "amd64", - "aarch64" => "arm64", - unsupported => anyhow::bail!("unsupported architecture {unsupported}"), - }; - - let download_info = latest_release(client.clone(), platform, arch).await?; - - let binary_path = version_path(download_info.version); - - if has_version(&binary_path).await { - // Due to an issue with the Supermaven binary not being made executable on - // earlier Zed versions and Supermaven releases not occurring that frequently, - // we ensure here that the found binary is actually executable. - make_file_executable(&binary_path).await?; - - return Ok(binary_path); - } - - let request = HttpRequest::get(&download_info.download_url); - - let mut response = client - .send(request.body(AsyncBody::default())?) - .await - .with_context(|| "Unable to download Supermaven Agent".to_string())?; - - let mut file = File::create(&binary_path) - .await - .with_context(|| format!("Unable to create file at {:?}", binary_path))?; - - futures::io::copy(BufReader::new(response.body_mut()), &mut file) - .await - .with_context(|| format!("Unable to write binary to file at {:?}", binary_path))?; - - make_file_executable(&binary_path).await?; - - remove_matching(supermaven_dir(), |file| file != binary_path).await; - - Ok(binary_path) -} diff --git a/crates/terminal/src/terminal_hyperlinks.rs b/crates/terminal/src/terminal_hyperlinks.rs index d239f680f9e2ecbd3d320e731d3cc74303a552ed..0ca6cb2edd916019a4a7822830faa1fdfaa238f3 100644 --- a/crates/terminal/src/terminal_hyperlinks.rs +++ b/crates/terminal/src/terminal_hyperlinks.rs @@ -905,6 +905,18 @@ mod tests { ); } + #[test] + // + fn issue_50531() { + // Paths preceded by "N:" prefix (e.g. grep output line numbers) + // should still be clickable + test_path!("0: ‹«foo/👉bar.txt»›"); + test_path!("0: ‹«👉foo/bar.txt»›"); + test_path!("42: ‹«👉foo/bar.txt»›"); + test_path!("1: ‹«/👉test/cool.rs»›"); + test_path!("1: ‹«/👉test/cool.rs»:«4»:«2»›"); + } + #[test] // fn issue_46795() { diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index 80926f17f0ce5a4cd464bfe3bf71e5576495d407..88bde3c771f72a0771a405cfbf123ac4e2286ad9 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -397,10 +397,7 @@ impl TerminalPanel { }; panel .update_in(cx, |panel, window, cx| { - panel - .center - .split(&pane, &new_pane, direction, cx) - .log_err(); + panel.center.split(&pane, &new_pane, direction, cx); window.focus(&new_pane.focus_handle(cx), cx); }) .ok(); @@ -424,7 +421,7 @@ impl TerminalPanel { new_pane.update(cx, |pane, cx| { pane.add_item(item, true, true, None, window, cx); }); - self.center.split(&pane, &new_pane, direction, cx).log_err(); + self.center.split(&pane, &new_pane, direction, cx); window.focus(&new_pane.focus_handle(cx), cx); } }; @@ -1303,17 +1300,13 @@ pub fn new_terminal_pane( &new_pane, split_direction, cx, - )?; - anyhow::Ok(new_pane) + ); + new_pane }) else { return; }; - let Some(new_pane) = new_pane.log_err() else { - return; - }; - move_item( &source, &new_pane, @@ -1569,15 +1562,12 @@ impl Render for TerminalPanel { _ = terminal_panel.update_in( cx, |terminal_panel, window, cx| { - terminal_panel - .center - .split( - &terminal_panel.active_pane, - &new_pane, - SplitDirection::Right, - cx, - ) - .log_err(); + terminal_panel.center.split( + &terminal_panel.active_pane, + &new_pane, + SplitDirection::Right, + cx, + ); let new_pane = new_pane.read(cx); window.focus(&new_pane.focus_handle(cx), cx); }, diff --git a/crates/terminal_view/src/terminal_scrollbar.rs b/crates/terminal_view/src/terminal_scrollbar.rs index 82ca0b4097dad1be899879b0241aed50d8e60bfa..16dc580e877310b79501ca469b0351935dbb46f7 100644 --- a/crates/terminal_view/src/terminal_scrollbar.rs +++ b/crates/terminal_view/src/terminal_scrollbar.rs @@ -3,7 +3,7 @@ use std::{ rc::Rc, }; -use gpui::{Bounds, Point, Size, size}; +use gpui::{Bounds, Point, point, size}; use terminal::Terminal; use ui::{Pixels, ScrollableHandle, px}; @@ -46,9 +46,9 @@ impl TerminalScrollHandle { } impl ScrollableHandle for TerminalScrollHandle { - fn max_offset(&self) -> Size { + fn max_offset(&self) -> Point { let state = self.state.borrow(); - size( + point( Pixels::ZERO, state.total_lines.saturating_sub(state.viewport_lines) as f32 * state.line_height, ) diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 22a5c3090a1ef9e1c3581893ae8cbe16f79d776b..63e0570e91ef08dfce02fbbca25e97ee7519dc0a 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -20,7 +20,7 @@ pub struct Anchor { /// The byte offset into the text inserted in the operation /// at `timestamp`. - pub offset: usize, + pub offset: u32, /// Whether this anchor stays attached to the character *before* or *after* /// the offset. pub bias: Bias, @@ -49,7 +49,7 @@ impl Anchor { pub const MIN: Self = Self { timestamp_replica_id: clock::Lamport::MIN.replica_id, timestamp_value: clock::Lamport::MIN.value, - offset: usize::MIN, + offset: u32::MIN, bias: Bias::Left, buffer_id: None, }; @@ -57,14 +57,14 @@ impl Anchor { pub const MAX: Self = Self { timestamp_replica_id: clock::Lamport::MAX.replica_id, timestamp_value: clock::Lamport::MAX.value, - offset: usize::MAX, + offset: u32::MAX, bias: Bias::Right, buffer_id: None, }; pub fn new( timestamp: clock::Lamport, - offset: usize, + offset: u32, bias: Bias, buffer_id: Option, ) -> Self { @@ -81,7 +81,7 @@ impl Anchor { Self { timestamp_replica_id: clock::Lamport::MIN.replica_id, timestamp_value: clock::Lamport::MIN.value, - offset: usize::MIN, + offset: u32::MIN, bias: Bias::Left, buffer_id: Some(buffer_id), } @@ -91,7 +91,7 @@ impl Anchor { Self { timestamp_replica_id: clock::Lamport::MAX.replica_id, timestamp_value: clock::Lamport::MAX.value, - offset: usize::MAX, + offset: u32::MAX, bias: Bias::Right, buffer_id: Some(buffer_id), } @@ -190,13 +190,13 @@ impl Anchor { pub fn is_min(&self) -> bool { self.timestamp() == clock::Lamport::MIN - && self.offset == usize::MIN + && self.offset == u32::MIN && self.bias == Bias::Left } pub fn is_max(&self) -> bool { self.timestamp() == clock::Lamport::MAX - && self.offset == usize::MAX + && self.offset == u32::MAX && self.bias == Bias::Right } diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs index cc94441a3d1ea2654875cd286d91b9dc2334ab53..be72c4dd9564d4c6024ce0206ff0eb99b0cd457b 100644 --- a/crates/text/src/locator.rs +++ b/crates/text/src/locator.rs @@ -8,18 +8,32 @@ use std::iter; /// /// The initial location for a collection should be `Locator::between(Locator::min(), Locator::max())`, /// leaving room for items to be inserted before and after it. -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Locator(SmallVec<[u64; 4]>); +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Locator(SmallVec<[u64; 2]>); + +impl Clone for Locator { + fn clone(&self) -> Self { + // We manually implement clone to avoid the overhead of SmallVec's clone implementation. + // Using `from_slice` is faster than `clone` for SmallVec as we can use our `Copy` implementation of u64. + Self { + 0: SmallVec::from_slice(&self.0), + } + } + + fn clone_from(&mut self, source: &Self) { + self.0.clone_from(&source.0); + } +} impl Locator { pub const fn min() -> Self { - // SAFETY: 1 is <= 4 - Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MIN; 4], 1) }) + // SAFETY: 1 is <= 2 + Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MIN; 2], 1) }) } pub const fn max() -> Self { - // SAFETY: 1 is <= 4 - Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MAX; 4], 1) }) + // SAFETY: 1 is <= 2 + Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MAX; 2], 1) }) } pub const fn min_ref() -> &'static Self { @@ -40,6 +54,7 @@ impl Locator { let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX)); let mut location = SmallVec::new(); for (lhs, rhs) in lhs.zip(rhs) { + // This shift is essential! It optimizes for the common case of sequential typing. let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48); location.push(mid); if mid > lhs { @@ -127,4 +142,36 @@ mod tests { ); } } + + // Simulates 100,000 sequential forward appends (the pattern used when + // building a buffer's initial fragments and when + // `push_fragments_for_insertion` chains new text fragments). + #[test] + fn test_sequential_forward_append_stays_at_depth_1() { + let mut prev = Locator::min(); + let max = Locator::max(); + for _ in 0..100_000 { + let loc = Locator::between(&prev, &max); + assert_eq!(loc.len(), 1, "sequential forward append grew past depth 1"); + prev = loc; + } + } + + // Simulates the most common real editing pattern: a fragment is split + // (producing a depth-2 prefix), then 10,000 new fragments are inserted + // sequentially forward within that split region. + #[test] + fn test_typing_at_cursor_stays_at_depth_2() { + let initial = Locator::between(&Locator::min(), &Locator::max()); + let prefix = Locator::between(&Locator::min(), &initial); + assert_eq!(prefix.len(), 2); + + let suffix_id = initial; + let mut prev = prefix; + for _ in 0..10_000 { + let loc = Locator::between(&prev, &suffix_id); + assert_eq!(loc.len(), 2, "forward typing after split grew past depth 2"); + prev = loc; + } + } } diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index c9e04e407ffdb8ffde6b139e01d78822e54e1a4b..194ac2a40d5ac96a39177eedd35b991ded30de38 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -810,3 +810,188 @@ fn test_random_concurrent_edits(mut rng: StdRng) { buffer.check_invariants(); } } + +#[test] +fn test_new_normalized_splits_large_base_text() { + // ASCII text that exceeds max_insertion_len + let text = "abcdefghij".repeat(10); // 100 bytes + let rope = Rope::from(text.as_str()); + let buffer = Buffer::new_normalized( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + LineEnding::Unix, + rope, + ); + assert_eq!(buffer.text(), text); + buffer.check_invariants(); + + // Verify anchors at various positions, including across chunk boundaries + for offset in [0, 1, 15, 16, 17, 50, 99] { + let anchor = buffer.anchor_before(offset); + assert_eq!( + anchor.to_offset(&buffer), + offset, + "anchor_before({offset}) round-tripped incorrectly" + ); + let anchor = buffer.anchor_after(offset); + assert_eq!( + anchor.to_offset(&buffer), + offset, + "anchor_after({offset}) round-tripped incorrectly" + ); + } + + // Verify editing works after a split initialization + let mut buffer = buffer; + buffer.edit([(50..60, "XYZ")]); + let mut expected = text; + expected.replace_range(50..60, "XYZ"); + assert_eq!(buffer.text(), expected); + buffer.check_invariants(); +} + +#[test] +fn test_new_normalized_splits_large_base_text_with_multibyte_chars() { + // Use multi-byte chars (é is 2 bytes in UTF-8) so that a naive byte-level + // split would land in the middle of a character. + let unit = "ééééééééé"; // 9 chars × 2 bytes = 18 bytes + let text = unit.repeat(6); // 108 bytes + let rope = Rope::from(text.as_str()); + let buffer = Buffer::new_normalized( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + LineEnding::Unix, + rope, + ); + assert_eq!(buffer.text(), text); + buffer.check_invariants(); + + // Every anchor should resolve correctly even though chunks had to be + // rounded down to a char boundary. + let snapshot = buffer.snapshot(); + for offset in (0..text.len()).filter(|o| text.is_char_boundary(*o)) { + let anchor = snapshot.anchor_before(offset); + assert_eq!( + anchor.to_offset(snapshot), + offset, + "anchor round-trip failed at byte offset {offset}" + ); + } +} + +#[test] +fn test_new_normalized_small_text_unchanged() { + // Text that fits in a single chunk should produce exactly one fragment, + // matching the original single-fragment behaviour. + let text = "hello world"; + let rope = Rope::from(text); + let buffer = Buffer::new_normalized( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + LineEnding::Unix, + rope, + ); + assert_eq!(buffer.text(), text); + buffer.check_invariants(); + assert_eq!(buffer.snapshot().fragments.items(&None).len(), 1); +} + +#[test] +fn test_edit_splits_large_insertion() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "abcdefghij"); + + let large_text: Arc = "X".repeat(100).into(); + let edits = vec![(3..7, large_text.clone())]; + + buffer.edit(edits); + + let expected = format!("abc{}hij", large_text); + assert_eq!(buffer.text(), expected); + buffer.check_invariants(); + + // Anchors should resolve correctly throughout the buffer. + for offset in [0, 3, 50, 103, expected.len()] { + let anchor = buffer.anchor_before(offset); + assert_eq!( + anchor.to_offset(&buffer), + offset, + "anchor_before({offset}) round-tripped incorrectly" + ); + } +} + +#[test] +fn test_edit_splits_large_insertion_with_multibyte_chars() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "abcdefghij"); + + // 4-byte chars so that naive byte splits would land mid-character. + let large_text: Arc = "😀".repeat(30).into(); // 30 × 4 = 120 bytes + let edits = vec![(5..5, large_text.clone())]; + + buffer.edit(edits); + + let expected = format!("abcde{}fghij", large_text); + assert_eq!(buffer.text(), expected); + buffer.check_invariants(); +} + +#[test] +fn test_edit_splits_large_insertion_among_multiple_edits() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "ABCDEFGHIJ"); + + let large_text: Arc = "x".repeat(60).into(); + // Three edits: small, large, small. The large one must be split while + // preserving the correct positions of the surrounding edits. + let edits = vec![ + (1..2, Arc::from("y")), // replace "B" with "y" + (4..6, large_text.clone()), // replace "EF" with 60 x's + (9..9, Arc::from("z")), // insert "z" before "J" + ]; + + buffer.edit(edits); + + // Original: A B C D E F G H I J + // After (1..2, "y"): A y C D E F G H I J + // After (4..6, large): A y C D <60 x's> G H I J + // After (9..9, "z"): A y C D <60 x's> G H I z J + let expected = format!("AyCD{}GHIzJ", large_text); + assert_eq!(buffer.text(), expected); + buffer.check_invariants(); +} + +#[test] +fn test_edit_splits_multiple_large_insertions() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "ABCDE"); + + let text1: Arc = "a".repeat(40).into(); + let text2: Arc = "b".repeat(40).into(); + let edits = vec![ + (1..2, text1.clone()), // replace "B" with 40 a's + (3..4, text2.clone()), // replace "D" with 40 b's + ]; + + buffer.edit(edits); + + let expected = format!("A{}C{}E", text1, text2); + assert_eq!(buffer.text(), expected); + buffer.check_invariants(); +} + +#[test] +fn test_edit_undo_after_split() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "hello world"); + buffer.set_group_interval(Duration::from_secs(0)); + let original = buffer.text(); + + let large_text: Arc = "Z".repeat(50).into(); + let edits = vec![(5..6, large_text)]; + buffer.edit(edits); + assert_ne!(buffer.text(), original); + buffer.check_invariants(); + + // Undo should restore the original text even though the edit was split + // into multiple internal operations grouped in one transaction. + buffer.undo(); + assert_eq!(buffer.text(), original); + buffer.check_invariants(); +} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 2c51a0d5e5b29bc08fdacc6b8b90edd8f65cd83d..a5bdbe443bbaa4723c8d3104bfed28e4c2fe8fdb 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -48,6 +48,12 @@ use util::RandomCharIter; static LINE_SEPARATORS_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX")); +/// The maximum length of a single insertion operation. +/// Fragments larger than this will be split into multiple smaller +/// fragments. This allows us to use relative `u32` offsets instead of `usize`, +/// reducing memory usage. +const MAX_INSERTION_LEN: usize = if cfg!(test) { 16 } else { u32::MAX as usize }; + pub type TransactionId = clock::Lamport; pub struct Buffer { @@ -155,18 +161,38 @@ struct History { #[derive(Clone, Debug, Eq, PartialEq)] struct InsertionSlice { - edit_id: clock::Lamport, - insertion_id: clock::Lamport, - range: Range, + // Inline the lamports to allow the replica ids to share the same alignment + // saving 4 bytes space edit_id: clock::Lamport, + edit_id_value: clock::Seq, + edit_id_replica_id: ReplicaId, + // insertion_id: clock::Lamport, + insertion_id_value: clock::Seq, + insertion_id_replica_id: ReplicaId, + range: Range, } impl Ord for InsertionSlice { fn cmp(&self, other: &Self) -> Ordering { - self.edit_id - .cmp(&other.edit_id) - .then_with(|| self.insertion_id.cmp(&other.insertion_id)) - .then_with(|| self.range.start.cmp(&other.range.start)) - .then_with(|| self.range.end.cmp(&other.range.end)) + Lamport { + value: self.edit_id_value, + replica_id: self.edit_id_replica_id, + } + .cmp(&Lamport { + value: other.edit_id_value, + replica_id: other.edit_id_replica_id, + }) + .then_with(|| { + Lamport { + value: self.insertion_id_value, + replica_id: self.insertion_id_replica_id, + } + .cmp(&Lamport { + value: other.insertion_id_value, + replica_id: other.insertion_id_replica_id, + }) + }) + .then_with(|| self.range.start.cmp(&other.range.start)) + .then_with(|| self.range.end.cmp(&other.range.end)) } } @@ -179,8 +205,10 @@ impl PartialOrd for InsertionSlice { impl InsertionSlice { fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self { Self { - edit_id, - insertion_id: fragment.timestamp, + edit_id_value: edit_id.value, + edit_id_replica_id: edit_id.replica_id, + insertion_id_value: fragment.timestamp.value, + insertion_id_replica_id: fragment.timestamp.replica_id, range: fragment.insertion_offset..fragment.insertion_offset + fragment.len, } } @@ -309,6 +337,7 @@ impl History { fn finalize_last_transaction(&mut self) -> Option<&Transaction> { self.undo_stack.last_mut().map(|entry| { + entry.transaction.edit_ids.shrink_to_fit(); entry.suppress_grouping = true; &entry.transaction }) @@ -489,7 +518,7 @@ struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> { since: &'a clock::Global, old_end: D, new_end: D, - range: Range<(&'a Locator, usize)>, + range: Range<(&'a Locator, u32)>, buffer_id: BufferId, } @@ -536,18 +565,18 @@ impl Edit<(D1, D2)> { } #[derive(Eq, PartialEq, Clone, Debug)] -pub struct Fragment { - pub id: Locator, - pub timestamp: clock::Lamport, - pub insertion_offset: usize, - pub len: usize, - pub visible: bool, - pub deletions: SmallVec<[clock::Lamport; 2]>, - pub max_undos: clock::Global, +struct Fragment { + id: Locator, + timestamp: clock::Lamport, + insertion_offset: u32, + len: u32, + visible: bool, + deletions: SmallVec<[clock::Lamport; 2]>, + max_undos: clock::Global, } #[derive(Eq, PartialEq, Clone, Debug)] -pub struct FragmentSummary { +struct FragmentSummary { text: FragmentTextSummary, max_id: Locator, max_version: clock::Global, @@ -575,14 +604,14 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { #[derive(Eq, PartialEq, Clone, Debug)] struct InsertionFragment { timestamp: clock::Lamport, - split_offset: usize, + split_offset: u32, fragment_id: Locator, } #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] struct InsertionFragmentKey { timestamp: clock::Lamport, - split_offset: usize, + split_offset: u32, } #[derive(Clone, Debug, Eq, PartialEq)] @@ -740,18 +769,37 @@ impl Buffer { let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL); lamport_clock.observe(insertion_timestamp); version.observe(insertion_timestamp); - let fragment_id = Locator::between(&Locator::min(), &Locator::max()); - let fragment = Fragment { - id: fragment_id, - timestamp: insertion_timestamp, - insertion_offset: 0, - len: visible_text.len(), - visible: true, - deletions: Default::default(), - max_undos: Default::default(), - }; - insertions.push(InsertionFragment::new(&fragment), ()); - fragments.push(fragment, &None); + + let mut insertion_offset: u32 = 0; + let mut text_offset: usize = 0; + let mut prev_locator = Locator::min(); + + while text_offset < visible_text.len() { + let target_end = visible_text.len().min(text_offset + MAX_INSERTION_LEN); + let chunk_end = if target_end == visible_text.len() { + target_end + } else { + visible_text.floor_char_boundary(target_end) + }; + let chunk_len = chunk_end - text_offset; + + let fragment_id = Locator::between(&prev_locator, &Locator::max()); + let fragment = Fragment { + id: fragment_id.clone(), + timestamp: insertion_timestamp, + insertion_offset, + len: chunk_len as u32, + visible: true, + deletions: Default::default(), + max_undos: Default::default(), + }; + insertions.push(InsertionFragment::new(&fragment), ()); + fragments.push(fragment, &None); + + prev_locator = fragment_id; + insertion_offset += chunk_len as u32; + text_offset = chunk_end; + } } Buffer { @@ -853,7 +901,7 @@ impl Buffer { new_text: Vec::with_capacity(edits.len()), }; let mut new_insertions = Vec::new(); - let mut insertion_offset = 0; + let mut insertion_offset: u32 = 0; let mut insertion_slices = Vec::new(); let mut edits = edits @@ -879,8 +927,9 @@ impl Buffer { if fragment_start > old_fragments.start().visible { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; - suffix.insertion_offset += fragment_start - old_fragments.start().visible; + suffix.len = (fragment_end - fragment_start) as u32; + suffix.insertion_offset += + (fragment_start - old_fragments.start().visible) as u32; new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); @@ -899,8 +948,8 @@ impl Buffer { // Preserve any portion of the current fragment that precedes this range. if fragment_start < range.start { let mut prefix = old_fragments.item().unwrap().clone(); - prefix.len = range.start - fragment_start; - prefix.insertion_offset += fragment_start - old_fragments.start().visible; + prefix.len = (range.start - fragment_start) as u32; + prefix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32; prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id); new_insertions.push(InsertionFragment::insert_new(&prefix)); new_ropes.push_fragment(&prefix, prefix.visible); @@ -912,29 +961,24 @@ impl Buffer { if !new_text.is_empty() { let new_start = new_fragments.summary().text.visible; - let fragment = Fragment { - id: Locator::between( - &new_fragments.summary().max_id, - old_fragments - .item() - .map_or(&Locator::max(), |old_fragment| &old_fragment.id), - ), + let next_fragment_id = old_fragments + .item() + .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id); + Self::push_fragments_for_insertion( + new_text.as_ref(), timestamp, - insertion_offset, - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }; + &mut insertion_offset, + &mut new_fragments, + &mut new_insertions, + &mut insertion_slices, + &mut new_ropes, + next_fragment_id, + timestamp, + ); edits_patch.push(Edit { old: fragment_start..fragment_start, new: new_start..new_start + new_text.len(), }); - insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment)); - new_insertions.push(InsertionFragment::insert_new(&fragment)); - new_ropes.push_str(new_text.as_ref()); - new_fragments.push(fragment, &None); - insertion_offset += new_text.len(); } // Advance through every fragment that intersects this range, marking the intersecting @@ -945,8 +989,9 @@ impl Buffer { let mut intersection = fragment.clone(); let intersection_end = cmp::min(range.end, fragment_end); if fragment.visible { - intersection.len = intersection_end - fragment_start; - intersection.insertion_offset += fragment_start - old_fragments.start().visible; + intersection.len = (intersection_end - fragment_start) as u32; + intersection.insertion_offset += + (fragment_start - old_fragments.start().visible) as u32; intersection.id = Locator::between(&new_fragments.summary().max_id, &intersection.id); intersection.deletions.push(timestamp); @@ -983,8 +1028,8 @@ impl Buffer { let fragment_end = old_fragments.end().visible; if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; - suffix.insertion_offset += fragment_start - old_fragments.start().visible; + suffix.len = (fragment_end - fragment_start) as u32; + suffix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32; new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); @@ -1075,7 +1120,7 @@ impl Buffer { let mut insertion_slices = Vec::new(); let cx = Some(version.clone()); let mut new_insertions = Vec::new(); - let mut insertion_offset = 0; + let mut insertion_offset: u32 = 0; let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); let mut old_fragments = self @@ -1097,9 +1142,9 @@ impl Buffer { if fragment_start > old_fragments.start().0.full_offset() { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end.0 - fragment_start.0; + suffix.len = (fragment_end.0 - fragment_start.0) as u32; suffix.insertion_offset += - fragment_start - old_fragments.start().0.full_offset(); + (fragment_start - old_fragments.start().0.full_offset()) as u32; new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); @@ -1118,8 +1163,9 @@ impl Buffer { let fragment_end = old_fragments.end().0.full_offset(); if fragment_end == range.start && fragment_end > fragment_start { let mut fragment = old_fragments.item().unwrap().clone(); - fragment.len = fragment_end.0 - fragment_start.0; - fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + fragment.len = (fragment_end.0 - fragment_start.0) as u32; + fragment.insertion_offset += + (fragment_start - old_fragments.start().0.full_offset()) as u32; new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_fragment(&fragment, fragment.visible); new_fragments.push(fragment, &None); @@ -1144,8 +1190,9 @@ impl Buffer { // Preserve any portion of the current fragment that precedes this range. if fragment_start < range.start { let mut prefix = old_fragments.item().unwrap().clone(); - prefix.len = range.start.0 - fragment_start.0; - prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + prefix.len = (range.start.0 - fragment_start.0) as u32; + prefix.insertion_offset += + (fragment_start - old_fragments.start().0.full_offset()) as u32; prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id); new_insertions.push(InsertionFragment::insert_new(&prefix)); fragment_start = range.start; @@ -1160,29 +1207,24 @@ impl Buffer { old_start += fragment_start.0 - old_fragments.start().0.full_offset().0; } let new_start = new_fragments.summary().text.visible; - let fragment = Fragment { - id: Locator::between( - &new_fragments.summary().max_id, - old_fragments - .item() - .map_or(&Locator::max(), |old_fragment| &old_fragment.id), - ), + let next_fragment_id = old_fragments + .item() + .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id); + Self::push_fragments_for_insertion( + new_text, timestamp, - insertion_offset, - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }; + &mut insertion_offset, + &mut new_fragments, + &mut new_insertions, + &mut insertion_slices, + &mut new_ropes, + next_fragment_id, + timestamp, + ); edits_patch.push(Edit { old: old_start..old_start, new: new_start..new_start + new_text.len(), }); - insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment)); - new_insertions.push(InsertionFragment::insert_new(&fragment)); - new_ropes.push_str(new_text); - new_fragments.push(fragment, &None); - insertion_offset += new_text.len(); } // Advance through every fragment that intersects this range, marking the intersecting @@ -1193,9 +1235,9 @@ impl Buffer { let mut intersection = fragment.clone(); let intersection_end = cmp::min(range.end, fragment_end); if fragment.was_visible(version, &self.undo_map) { - intersection.len = intersection_end.0 - fragment_start.0; + intersection.len = (intersection_end.0 - fragment_start.0) as u32; intersection.insertion_offset += - fragment_start - old_fragments.start().0.full_offset(); + (fragment_start - old_fragments.start().0.full_offset()) as u32; intersection.id = Locator::between(&new_fragments.summary().max_id, &intersection.id); intersection.deletions.push(timestamp); @@ -1208,7 +1250,7 @@ impl Buffer { + (fragment_start.0 - old_fragments.start().0.full_offset().0); let new_start = new_fragments.summary().text.visible; edits_patch.push(Edit { - old: old_start..old_start + intersection.len, + old: old_start..old_start + intersection.len as usize, new: new_start..new_start, }); } @@ -1229,8 +1271,9 @@ impl Buffer { let fragment_end = old_fragments.end().0.full_offset(); if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end.0 - fragment_start.0; - suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + suffix.len = (fragment_end.0 - fragment_start.0) as u32; + suffix.insertion_offset += + (fragment_start - old_fragments.start().0.full_offset()) as u32; new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); @@ -1252,6 +1295,49 @@ impl Buffer { self.subscriptions.publish_mut(&edits_patch) } + fn push_fragments_for_insertion( + new_text: &str, + timestamp: clock::Lamport, + insertion_offset: &mut u32, + new_fragments: &mut SumTree, + new_insertions: &mut Vec>, + insertion_slices: &mut Vec, + new_ropes: &mut RopeBuilder, + next_fragment_id: &Locator, + edit_timestamp: clock::Lamport, + ) { + let mut text_offset = 0; + while text_offset < new_text.len() { + let target_end = new_text.len().min(text_offset + MAX_INSERTION_LEN); + let chunk_end = if target_end == new_text.len() { + target_end + } else { + new_text.floor_char_boundary(target_end) + }; + if chunk_end == text_offset { + break; + } + let chunk_len = chunk_end - text_offset; + + let fragment = Fragment { + id: Locator::between(&new_fragments.summary().max_id, next_fragment_id), + timestamp, + insertion_offset: *insertion_offset, + len: chunk_len as u32, + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }; + insertion_slices.push(InsertionSlice::from_fragment(edit_timestamp, &fragment)); + new_insertions.push(InsertionFragment::insert_new(&fragment)); + new_fragments.push(fragment, &None); + + *insertion_offset += chunk_len as u32; + text_offset = chunk_end; + } + new_ropes.push_str(new_text); + } + fn fragment_ids_for_edits<'a>( &'a self, edit_ids: impl Iterator, @@ -1260,38 +1346,56 @@ impl Buffer { let mut insertion_slices = Vec::new(); for edit_id in edit_ids { let insertion_slice = InsertionSlice { - edit_id: *edit_id, - insertion_id: clock::Lamport::MIN, + edit_id_value: edit_id.value, + edit_id_replica_id: edit_id.replica_id, + insertion_id_value: Lamport::MIN.value, + insertion_id_replica_id: Lamport::MIN.replica_id, range: 0..0, }; let slices = self .snapshot .insertion_slices .iter_from(&insertion_slice) - .take_while(|slice| slice.edit_id == *edit_id); + .take_while(|slice| { + Lamport { + value: slice.edit_id_value, + replica_id: slice.edit_id_replica_id, + } == *edit_id + }); insertion_slices.extend(slices) } - insertion_slices - .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end))); + insertion_slices.sort_unstable_by_key(|s| { + ( + Lamport { + value: s.insertion_id_value, + replica_id: s.insertion_id_replica_id, + }, + s.range.start, + Reverse(s.range.end), + ) + }); // Get all of the fragments corresponding to these insertion slices. let mut fragment_ids = Vec::new(); let mut insertions_cursor = self.insertions.cursor::(()); for insertion_slice in &insertion_slices { - if insertion_slice.insertion_id != insertions_cursor.start().timestamp + let insertion_id = Lamport { + value: insertion_slice.insertion_id_value, + replica_id: insertion_slice.insertion_id_replica_id, + }; + if insertion_id != insertions_cursor.start().timestamp || insertion_slice.range.start > insertions_cursor.start().split_offset { insertions_cursor.seek_forward( &InsertionFragmentKey { - timestamp: insertion_slice.insertion_id, + timestamp: insertion_id, split_offset: insertion_slice.range.start, }, Bias::Left, ); } while let Some(item) = insertions_cursor.item() { - if item.timestamp != insertion_slice.insertion_id - || item.split_offset >= insertion_slice.range.end + if item.timestamp != insertion_id || item.split_offset >= insertion_slice.range.end { break; } @@ -1330,13 +1434,13 @@ impl Buffer { let new_start = new_fragments.summary().text.visible; if fragment_was_visible && !fragment.visible { edits.push(Edit { - old: old_start..old_start + fragment.len, + old: old_start..old_start + fragment.len as usize, new: new_start..new_start, }); } else if !fragment_was_visible && fragment.visible { edits.push(Edit { old: old_start..old_start, - new: new_start..new_start + fragment.len, + new: new_start..new_start + fragment.len as usize, }); } new_ropes.push_fragment(&fragment, fragment_was_visible); @@ -1582,7 +1686,12 @@ impl Buffer { cursor.seek_forward(&Some(fragment_id), Bias::Left); let fragment = cursor.item()?; let start_offset = cursor.start().1; - let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 }; + let end_offset = start_offset + + if fragment.visible { + fragment.len as usize + } else { + 0 + }; Some(start_offset..end_offset) }); @@ -2038,10 +2147,6 @@ impl BufferSnapshot { self.deleted_text.to_string() } - pub fn fragments(&self) -> impl Iterator { - self.fragments.iter() - } - pub fn text_summary(&self) -> TextSummary { self.visible_text.summary() } @@ -2287,7 +2392,7 @@ impl BufferSnapshot { let fragment = fragment_cursor.item().unwrap(); let mut fragment_offset = fragment_cursor.start().1; if fragment.visible { - fragment_offset += anchor.offset - insertion.split_offset; + fragment_offset += (anchor.offset - insertion.split_offset) as usize; } position.add_assign(&text_cursor.summary(fragment_offset)); @@ -2332,7 +2437,7 @@ impl BufferSnapshot { let fragment = item.unwrap(); let mut fragment_offset = start.1; if fragment.visible { - fragment_offset += anchor.offset - insertion.split_offset; + fragment_offset += (anchor.offset - insertion.split_offset) as usize; } fragment_offset } @@ -2457,7 +2562,7 @@ impl BufferSnapshot { let overshoot = offset - start; Anchor::new( fragment.timestamp, - fragment.insertion_offset + overshoot, + fragment.insertion_offset + overshoot as u32, bias, Some(self.remote_id), ) @@ -2546,7 +2651,7 @@ impl BufferSnapshot { let mut visible_start = start.1.visible; let mut deleted_start = start.1.deleted; if let Some(fragment) = item { - let overshoot = range.start.offset - fragment.insertion_offset; + let overshoot = (range.start.offset - fragment.insertion_offset) as usize; if fragment.visible { visible_start += overshoot; } else { @@ -2706,7 +2811,7 @@ impl<'a> RopeBuilder<'a> { fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) { debug_assert!(fragment.len > 0); - self.push(fragment.len, was_visible, fragment.visible) + self.push(fragment.len as usize, was_visible, fragment.visible) } fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) { @@ -2781,7 +2886,8 @@ impl bool> Iterator for Ed if fragment.id == *self.range.end.0 { visible_end = cmp::min( visible_end, - cursor.start().visible + (self.range.end.1 - fragment.insertion_offset), + cursor.start().visible + + (self.range.end.1 - fragment.insertion_offset) as usize, ); } @@ -2807,7 +2913,8 @@ impl bool> Iterator for Ed if fragment.id == *self.range.end.0 { deleted_end = cmp::min( deleted_end, - cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset), + cursor.start().deleted + + (self.range.end.1 - fragment.insertion_offset) as usize, ); } @@ -2872,7 +2979,7 @@ impl sum_tree::Item for Fragment { FragmentSummary { max_id: self.id.clone(), text: FragmentTextSummary { - visible: self.len, + visible: self.len as usize, deleted: 0, }, max_version, @@ -2884,7 +2991,7 @@ impl sum_tree::Item for Fragment { max_id: self.id.clone(), text: FragmentTextSummary { visible: 0, - deleted: self.len, + deleted: self.len as usize, }, max_version, min_insertion_version, diff --git a/crates/theme/src/default_colors.rs b/crates/theme/src/default_colors.rs index cc5bdd1f32dc334e0a4a7974c2ca8d263b27adb8..fce0e54c720bf2c21ded9bd8240498053a651f6d 100644 --- a/crates/theme/src/default_colors.rs +++ b/crates/theme/src/default_colors.rs @@ -175,6 +175,7 @@ impl ThemeColors { vim_visual_background: system.transparent, vim_visual_line_background: system.transparent, vim_visual_block_background: system.transparent, + vim_yank_background: neutral().light_alpha().step_3(), vim_helix_normal_background: system.transparent, vim_helix_select_background: system.transparent, vim_normal_foreground: system.transparent, @@ -320,6 +321,7 @@ impl ThemeColors { vim_visual_background: system.transparent, vim_visual_line_background: system.transparent, vim_visual_block_background: system.transparent, + vim_yank_background: neutral().dark_alpha().step_4(), vim_helix_normal_background: system.transparent, vim_helix_select_background: system.transparent, vim_normal_foreground: system.transparent, diff --git a/crates/theme/src/fallback_themes.rs b/crates/theme/src/fallback_themes.rs index b04d676774626bf708e2ef58dca6ff6b1b87d2b0..72b65f85c9ecb2776fc6066c8b926cfa4bd42929 100644 --- a/crates/theme/src/fallback_themes.rs +++ b/crates/theme/src/fallback_themes.rs @@ -257,6 +257,7 @@ pub(crate) fn zed_default_dark() -> Theme { vim_visual_background: SystemColors::default().transparent, vim_visual_line_background: SystemColors::default().transparent, vim_visual_block_background: SystemColors::default().transparent, + vim_yank_background: hsla(207.8 / 360., 81. / 100., 66. / 100., 0.2), vim_helix_normal_background: SystemColors::default().transparent, vim_helix_select_background: SystemColors::default().transparent, vim_normal_foreground: SystemColors::default().transparent, diff --git a/crates/theme/src/icon_theme.rs b/crates/theme/src/icon_theme.rs index 8415462595cb93a19365a929660b4e8e3f78f8d8..121ff9d7d4fbd841315b89e631606c7e67bc5cde 100644 --- a/crates/theme/src/icon_theme.rs +++ b/crates/theme/src/icon_theme.rs @@ -66,7 +66,7 @@ pub struct IconDefinition { } const FILE_STEMS_BY_ICON_KEY: &[(&str, &[&str])] = &[ - ("docker", &["Dockerfile"]), + ("docker", &["Containerfile", "Dockerfile"]), ("ruby", &["Podfile"]), ("heroku", &["Procfile"]), ]; @@ -89,7 +89,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ( "cpp", &[ - "c++", "h++", "cc", "cpp", "cxx", "hh", "hpp", "hxx", "inl", "ixx", + "c++", "h++", "cc", "cpp", "cppm", "cxx", "hh", "hpp", "hxx", "inl", "ixx", ], ), ("crystal", &["cr", "ecr"]), @@ -99,6 +99,15 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("cue", &["cue"]), ("dart", &["dart"]), ("diff", &["diff"]), + ( + "docker", + &[ + "docker-compose.yml", + "docker-compose.yaml", + "compose.yml", + "compose.yaml", + ], + ), ( "document", &[ @@ -138,12 +147,27 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("font", &["otf", "ttf", "woff", "woff2"]), ("fsharp", &["fs"]), ("fsproj", &["fsproj"]), - ("gitlab", &["gitlab-ci.yml"]), + ("gitlab", &["gitlab-ci.yml", "gitlab-ci.yaml"]), ("gleam", &["gleam"]), ("go", &["go", "mod", "work"]), ("graphql", &["gql", "graphql", "graphqls"]), ("haskell", &["hs"]), ("hcl", &["hcl"]), + ( + "helm", + &[ + "helmfile.yaml", + "helmfile.yml", + "Chart.yaml", + "Chart.yml", + "Chart.lock", + "values.yaml", + "values.yml", + "requirements.yaml", + "requirements.yml", + "tpl", + ], + ), ("html", &["htm", "html"]), ( "image", @@ -198,7 +222,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("rust", &["rs"]), ("sass", &["sass", "scss"]), ("scala", &["scala", "sc"]), - ("settings", &["conf", "ini", "yaml", "yml"]), + ("settings", &["conf", "ini"]), ("solidity", &["sol"]), ( "storage", @@ -279,6 +303,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("vue", &["vue"]), ("vyper", &["vy", "vyi"]), ("wgsl", &["wgsl"]), + ("yaml", &["yaml", "yml"]), ("zig", &["zig"]), ]; @@ -310,12 +335,13 @@ const FILE_ICONS: &[(&str, &str)] = &[ ("font", "icons/file_icons/font.svg"), ("fsharp", "icons/file_icons/fsharp.svg"), ("fsproj", "icons/file_icons/file.svg"), - ("gitlab", "icons/file_icons/settings.svg"), + ("gitlab", "icons/file_icons/gitlab.svg"), ("gleam", "icons/file_icons/gleam.svg"), ("go", "icons/file_icons/go.svg"), ("graphql", "icons/file_icons/graphql.svg"), ("haskell", "icons/file_icons/haskell.svg"), ("hcl", "icons/file_icons/hcl.svg"), + ("helm", "icons/file_icons/helm.svg"), ("heroku", "icons/file_icons/heroku.svg"), ("html", "icons/file_icons/html.svg"), ("image", "icons/file_icons/image.svg"), @@ -371,6 +397,7 @@ const FILE_ICONS: &[(&str, &str)] = &[ ("vue", "icons/file_icons/vue.svg"), ("vyper", "icons/file_icons/vyper.svg"), ("wgsl", "icons/file_icons/wgsl.svg"), + ("yaml", "icons/file_icons/yaml.svg"), ("zig", "icons/file_icons/zig.svg"), ]; diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index afe7bf759ddf817a686e21b929345040a61cff7e..61cf869b951ac4d285e1eaca42e226a6ac3e4a6a 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -796,6 +796,11 @@ pub fn theme_colors_refinement( .vim_visual_block_background .as_ref() .and_then(|color| try_parse_color(color).ok()), + vim_yank_background: this + .vim_yank_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + .or(editor_document_highlight_read_background), vim_helix_normal_background: this .vim_helix_normal_background .as_ref() diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index b54dc38fdf80665977120cf80c5a15b12b0021a6..75ba8ea391848a5a899f5ff64319a77b5f3d6ca9 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -175,6 +175,8 @@ pub struct ThemeColors { pub vim_visual_line_background: Hsla, /// Background color for Vim Visual Block mode indicator. pub vim_visual_block_background: Hsla, + /// Background color for Vim yank highlight. + pub vim_yank_background: Hsla, /// Background color for Vim Helix Normal mode indicator. pub vim_helix_normal_background: Hsla, /// Background color for Vim Helix Select mode indicator. diff --git a/crates/theme_importer/src/vscode/converter.rs b/crates/theme_importer/src/vscode/converter.rs index 2df113bfe803fd3e60d9238f7127fd84a5ef05f7..b052e865265368234d7a1bed42957a714ca9d5bb 100644 --- a/crates/theme_importer/src/vscode/converter.rs +++ b/crates/theme_importer/src/vscode/converter.rs @@ -207,6 +207,7 @@ impl VsCodeThemeConverter { terminal_ansi_white: vscode_colors.terminal.ansi_white.clone(), terminal_ansi_bright_white: vscode_colors.terminal.ansi_bright_white.clone(), link_text_hover: vscode_colors.text_link.active_foreground.clone(), + vim_yank_background: vscode_colors.editor.range_highlight_background.clone(), ..Default::default() }) } diff --git a/crates/time_format/Cargo.toml b/crates/time_format/Cargo.toml index b598d19887e128a0c5951c1d1bd5ec42f27f975b..7f5f2d9f1b56666036816d43bfa3564bf9721f05 100644 --- a/crates/time_format/Cargo.toml +++ b/crates/time_format/Cargo.toml @@ -19,3 +19,6 @@ time.workspace = true [target.'cfg(target_os = "macos")'.dependencies] core-foundation.workspace = true core-foundation-sys.workspace = true + +[target.'cfg(target_os = "windows")'.dependencies] +windows.workspace = true diff --git a/crates/time_format/src/time_format.rs b/crates/time_format/src/time_format.rs index 25a7ae84232b69570e8c800c5955e684a13dc08a..bbf214623eb4b5b9dd978a675551c25f5e937a8d 100644 --- a/crates/time_format/src/time_format.rs +++ b/crates/time_format/src/time_format.rs @@ -86,10 +86,25 @@ fn format_absolute_date( macos::format_date(×tamp) } } - #[cfg(not(target_os = "macos"))] + #[cfg(target_os = "windows")] + { + if !enhanced_date_formatting { + return windows::format_date(×tamp); + } + + let timestamp_date = timestamp.date(); + let reference_date = reference.date(); + if timestamp_date == reference_date { + "Today".to_string() + } else if reference_date.previous_day() == Some(timestamp_date) { + "Yesterday".to_string() + } else { + windows::format_date(×tamp) + } + } + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences - // todo(windows) respect user's date/time preferences let current_locale = CURRENT_LOCALE .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US"))); format_timestamp_naive_date( @@ -105,10 +120,13 @@ fn format_absolute_time(timestamp: OffsetDateTime) -> String { { macos::format_time(×tamp) } - #[cfg(not(target_os = "macos"))] + #[cfg(target_os = "windows")] + { + windows::format_time(×tamp) + } + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences - // todo(windows) respect user's date/time preferences let current_locale = CURRENT_LOCALE .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US"))); format_timestamp_naive_time( @@ -123,7 +141,7 @@ fn format_absolute_timestamp( reference: OffsetDateTime, #[allow(unused_variables)] enhanced_date_formatting: bool, ) -> String { - #[cfg(target_os = "macos")] + #[cfg(any(target_os = "macos", target_os = "windows"))] { if !enhanced_date_formatting { return format!( @@ -147,10 +165,9 @@ fn format_absolute_timestamp( ) } } - #[cfg(not(target_os = "macos"))] + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences - // todo(windows) respect user's date/time preferences format_timestamp_fallback(timestamp, reference) } } @@ -176,10 +193,25 @@ fn format_absolute_date_medium( macos::format_date_medium(×tamp) } } - #[cfg(not(target_os = "macos"))] + #[cfg(target_os = "windows")] + { + if !enhanced_formatting { + return windows::format_date_medium(×tamp); + } + + let timestamp_date = timestamp.date(); + let reference_date = reference.date(); + if timestamp_date == reference_date { + "Today".to_string() + } else if reference_date.previous_day() == Some(timestamp_date) { + "Yesterday".to_string() + } else { + windows::format_date_medium(×tamp) + } + } + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences - // todo(windows) respect user's date/time preferences let current_locale = CURRENT_LOCALE .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US"))); if !enhanced_formatting { @@ -212,7 +244,11 @@ fn format_absolute_timestamp_medium( { format_absolute_date_medium(timestamp, reference, false) } - #[cfg(not(target_os = "macos"))] + #[cfg(target_os = "windows")] + { + format_absolute_date_medium(timestamp, reference, false) + } + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences // todo(windows) respect user's date/time preferences @@ -360,7 +396,7 @@ fn format_timestamp_naive_date( } } -#[cfg(not(target_os = "macos"))] +#[cfg(not(any(target_os = "macos", target_os = "windows")))] fn format_timestamp_naive_date_medium( timestamp_local: OffsetDateTime, is_12_hour_time: bool, @@ -415,10 +451,10 @@ pub fn format_timestamp_naive( } } -#[cfg(not(target_os = "macos"))] +#[cfg(not(any(target_os = "macos", target_os = "windows")))] static CURRENT_LOCALE: std::sync::OnceLock = std::sync::OnceLock::new(); -#[cfg(not(target_os = "macos"))] +#[cfg(not(any(target_os = "macos", target_os = "windows")))] fn format_timestamp_fallback(timestamp: OffsetDateTime, reference: OffsetDateTime) -> String { let current_locale = CURRENT_LOCALE .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US"))); @@ -428,7 +464,7 @@ fn format_timestamp_fallback(timestamp: OffsetDateTime, reference: OffsetDateTim } /// Returns `true` if the locale is recognized as a 12-hour time locale. -#[cfg(not(target_os = "macos"))] +#[cfg(not(any(target_os = "macos", target_os = "windows")))] fn is_12_hour_time_by_locale(locale: &str) -> bool { [ "es-MX", "es-CO", "es-SV", "es-NI", @@ -522,6 +558,57 @@ mod macos { } } +#[cfg(target_os = "windows")] +mod windows { + use windows::Globalization::DateTimeFormatting::DateTimeFormatter; + + pub fn format_time(timestamp: &time::OffsetDateTime) -> String { + format_with_formatter(DateTimeFormatter::ShortTime(), timestamp, true) + } + + pub fn format_date(timestamp: &time::OffsetDateTime) -> String { + format_with_formatter(DateTimeFormatter::ShortDate(), timestamp, false) + } + + pub fn format_date_medium(timestamp: &time::OffsetDateTime) -> String { + format_with_formatter( + DateTimeFormatter::CreateDateTimeFormatter(windows::core::h!( + "month.abbreviated day year.full" + )), + timestamp, + false, + ) + } + + fn format_with_formatter( + formatter: windows::core::Result, + timestamp: &time::OffsetDateTime, + is_time: bool, + ) -> String { + formatter + .and_then(|formatter| formatter.Format(to_winrt_datetime(timestamp))) + .map(|hstring| hstring.to_string()) + .unwrap_or_else(|_| { + if is_time { + super::format_timestamp_naive_time(*timestamp, true) + } else { + super::format_timestamp_naive_date(*timestamp, *timestamp, true) + } + }) + } + + fn to_winrt_datetime(timestamp: &time::OffsetDateTime) -> windows::Foundation::DateTime { + // DateTime uses 100-nanosecond intervals since January 1, 1601 (UTC). + const WINDOWS_EPOCH: time::OffsetDateTime = time::macros::datetime!(1601-01-01 0:00 UTC); + let duration_since_winrt_epoch = *timestamp - WINDOWS_EPOCH; + let universal_time = duration_since_winrt_epoch.whole_nanoseconds() / 100; + + windows::Foundation::DateTime { + UniversalTime: universal_time as i64, + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index b52bde3fdb0b0230c271cc99210fdd13e7eac7cc..f00a71a305e306ba9201e5a4976382012ae0059e 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -31,7 +31,9 @@ use gpui::{ StatefulInteractiveElement, Styled, Subscription, WeakEntity, Window, actions, div, }; use onboarding_banner::OnboardingBanner; -use project::{Project, git_store::GitStoreEvent, trusted_worktrees::TrustedWorktrees}; +use project::{ + DisableAiSettings, Project, git_store::GitStoreEvent, trusted_worktrees::TrustedWorktrees, +}; use remote::RemoteConnectionOptions; use settings::Settings; use settings::WorktreeId; @@ -686,7 +688,7 @@ impl TitleBar { _window: &mut Window, cx: &mut Context, ) -> Option { - if !cx.has_flag::() { + if !cx.has_flag::() || DisableAiSettings::get_global(cx).disable_ai { return None; } diff --git a/crates/ui/src/components/ai/thread_item.rs b/crates/ui/src/components/ai/thread_item.rs index 6cc710690ea0103bf2de4253bc405eb52be5af69..52d91e09824077738bde6be75122b0bf7b9e3d52 100644 --- a/crates/ui/src/components/ai/thread_item.rs +++ b/crates/ui/src/components/ai/thread_item.rs @@ -3,7 +3,7 @@ use crate::{ prelude::*, }; -use gpui::{AnyView, ClickEvent, SharedString}; +use gpui::{AnyView, ClickEvent, Hsla, SharedString}; #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] pub enum AgentThreadStatus { @@ -18,10 +18,10 @@ pub enum AgentThreadStatus { pub struct ThreadItem { id: ElementId, icon: IconName, + custom_icon_from_external_svg: Option, title: SharedString, timestamp: SharedString, - running: bool, - generation_done: bool, + notified: bool, status: AgentThreadStatus, selected: bool, hovered: bool, @@ -41,10 +41,10 @@ impl ThreadItem { Self { id: id.into(), icon: IconName::ZedAgent, + custom_icon_from_external_svg: None, title: title.into(), timestamp: "".into(), - running: false, - generation_done: false, + notified: false, status: AgentThreadStatus::default(), selected: false, hovered: false, @@ -70,13 +70,13 @@ impl ThreadItem { self } - pub fn running(mut self, running: bool) -> Self { - self.running = running; + pub fn custom_icon_from_external_svg(mut self, svg: impl Into) -> Self { + self.custom_icon_from_external_svg = Some(svg.into()); self } - pub fn generation_done(mut self, generation_done: bool) -> Self { - self.generation_done = generation_done; + pub fn notified(mut self, notified: bool) -> Self { + self.notified = notified; self } @@ -155,49 +155,34 @@ impl RenderOnce for ThreadItem { // }; let icon_container = || h_flex().size_4().justify_center(); - let agent_icon = Icon::new(self.icon) - .color(Color::Muted) - .size(IconSize::Small); + let agent_icon = if let Some(custom_svg) = self.custom_icon_from_external_svg { + Icon::from_external_svg(custom_svg) + .color(Color::Muted) + .size(IconSize::Small) + } else { + Icon::new(self.icon) + .color(Color::Muted) + .size(IconSize::Small) + }; - let decoration = if self.status == AgentThreadStatus::WaitingForConfirmation { - Some( - IconDecoration::new( - IconDecorationKind::Triangle, - cx.theme().colors().surface_background, - cx, - ) - .color(cx.theme().status().warning) + let decoration = |icon: IconDecorationKind, color: Hsla| { + IconDecoration::new(icon, cx.theme().colors().surface_background, cx) + .color(color) .position(gpui::Point { x: px(-2.), y: px(-2.), - }), - ) + }) + }; + + let decoration = if self.status == AgentThreadStatus::WaitingForConfirmation { + Some(decoration( + IconDecorationKind::Triangle, + cx.theme().status().warning, + )) } else if self.status == AgentThreadStatus::Error { - Some( - IconDecoration::new( - IconDecorationKind::X, - cx.theme().colors().surface_background, - cx, - ) - .color(cx.theme().status().error) - .position(gpui::Point { - x: px(-2.), - y: px(-2.), - }), - ) - } else if self.generation_done { - Some( - IconDecoration::new( - IconDecorationKind::Dot, - cx.theme().colors().surface_background, - cx, - ) - .color(cx.theme().colors().text_accent) - .position(gpui::Point { - x: px(-2.), - y: px(-2.), - }), - ) + Some(decoration(IconDecorationKind::X, cx.theme().status().error)) + } else if self.notified { + Some(decoration(IconDecorationKind::Dot, clr.text_accent)) } else { None }; @@ -208,9 +193,11 @@ impl RenderOnce for ThreadItem { icon_container().child(agent_icon) }; - let running_or_action = self.running || (self.hovered && self.action_slot.is_some()); - - // let has_no_changes = self.added.is_none() && self.removed.is_none(); + let is_running = matches!( + self.status, + AgentThreadStatus::Running | AgentThreadStatus::WaitingForConfirmation + ); + let running_or_action = is_running || (self.hovered && self.action_slot.is_some()); let title = self.title; let highlight_positions = self.highlight_positions; @@ -225,6 +212,7 @@ impl RenderOnce for ThreadItem { v_flex() .id(self.id.clone()) .cursor_pointer() + .w_full() .map(|this| { if self.worktree.is_some() { this.p_2() @@ -255,7 +243,7 @@ impl RenderOnce for ThreadItem { this.child( h_flex() .gap_1() - .when(self.running, |this| { + .when(is_running, |this| { this.child( icon_container() .child(SpinnerLabel::new().color(Color::Accent)), @@ -347,12 +335,12 @@ impl Component for ThreadItem { .into_any_element(), ), single_example( - "Generation Done", + "Notified", container() .child( ThreadItem::new("ti-2", "Refine thread view scrolling behavior") .timestamp("12:12 AM") - .generation_done(true), + .notified(true), ) .into_any_element(), ), @@ -383,7 +371,7 @@ impl Component for ThreadItem { ThreadItem::new("ti-3", "Add line numbers option to FileEditBlock") .icon(IconName::AiClaude) .timestamp("7:30 PM") - .running(true), + .status(AgentThreadStatus::Running), ) .into_any_element(), ), diff --git a/crates/ui/src/components/callout.rs b/crates/ui/src/components/callout.rs index 24762ec1765a58259b061194ea31ed7e8721c2a0..23c820cd545adff2985a4116a6efb00c1e731693 100644 --- a/crates/ui/src/components/callout.rs +++ b/crates/ui/src/components/callout.rs @@ -295,7 +295,7 @@ impl Component for Callout { "Error details:", "• Quota exceeded for metric", "• Limit: 0", - "• Model: gemini-3-pro", + "• Model: gemini-3.1-pro", "Please retry in 26.33s.", "Additional details:", "- Request ID: abc123def456", diff --git a/crates/ui/src/components/data_table.rs b/crates/ui/src/components/data_table.rs index 8a40c246ca44ea9dbb25e61bb611882343ba7f94..76ed64850c92e274bd8aeca483dd197cfbccbf52 100644 --- a/crates/ui/src/components/data_table.rs +++ b/crates/ui/src/components/data_table.rs @@ -36,6 +36,13 @@ pub mod table_row { pub struct TableRow(Vec); impl TableRow { + pub fn from_element(element: T, length: usize) -> Self + where + T: Clone, + { + Self::from_vec(vec![element; length], length) + } + /// Constructs a `TableRow` from a `Vec`, panicking if the length does not match `expected_length`. /// /// Use this when you want to ensure at construction time that the row has the correct number of columns. @@ -70,7 +77,8 @@ pub mod table_row { /// /// # Panics /// Panics if `col` is out of bounds (i.e., `col >= self.cols()`). - pub fn expect_get(&self, col: usize) -> &T { + pub fn expect_get(&self, col: impl Into) -> &T { + let col = col.into(); self.0.get(col).unwrap_or_else(|| { panic!( "Expected table row of `{}` to have {col:?}", @@ -79,8 +87,8 @@ pub mod table_row { }) } - pub fn get(&self, col: usize) -> Option<&T> { - self.0.get(col) + pub fn get(&self, col: impl Into) -> Option<&T> { + self.0.get(col.into()) } pub fn as_slice(&self) -> &[T] { @@ -735,6 +743,7 @@ pub struct Table { empty_table_callback: Option AnyElement>>, /// The number of columns in the table. Used to assert column numbers in `TableRow` collections cols: usize, + disable_base_cell_style: bool, } impl Table { @@ -753,9 +762,19 @@ impl Table { use_ui_font: true, empty_table_callback: None, col_widths: None, + disable_base_cell_style: false, } } + /// Disables based styling of row cell (paddings, text ellipsis, nowrap, etc), keeping width settings + /// + /// Doesn't affect base style of header cell. + /// Doesn't remove overflow-hidden + pub fn disable_base_style(mut self) -> Self { + self.disable_base_cell_style = true; + self + } + /// Enables uniform list rendering. /// The provided function will be passed directly to the `uniform_list` element. /// Therefore, if this method is called, any calls to [`Table::row`] before or after @@ -973,10 +992,18 @@ pub fn render_table_row( .into_iter() .zip(column_widths.into_vec()) .map(|(cell, width)| { - base_cell_style_text(width, table_context.use_ui_font, cx) - .px_1() - .py_0p5() - .child(cell) + if table_context.disable_base_cell_style { + div() + .when_some(width, |this, width| this.w(width)) + .when(width.is_none(), |this| this.flex_1()) + .overflow_hidden() + .child(cell) + } else { + base_cell_style_text(width, table_context.use_ui_font, cx) + .px_1() + .py_0p5() + .child(cell) + } }), ); @@ -1071,6 +1098,7 @@ pub struct TableRenderContext { pub column_widths: Option>, pub map_row: Option), &mut Window, &mut App) -> AnyElement>>, pub use_ui_font: bool, + pub disable_base_cell_style: bool, } impl TableRenderContext { @@ -1083,6 +1111,7 @@ impl TableRenderContext { column_widths: table.col_widths.as_ref().map(|widths| widths.lengths(cx)), map_row: table.map_row.clone(), use_ui_font: table.use_ui_font, + disable_base_cell_style: table.disable_base_cell_style, } } } diff --git a/crates/ui/src/components/scrollbar.rs b/crates/ui/src/components/scrollbar.rs index 8e8e89be9c0580a7820685b5690a996dfd2dade0..21d6aa46d0f90a0d48e267e935b00d9f263a30c5 100644 --- a/crates/ui/src/components/scrollbar.rs +++ b/crates/ui/src/components/scrollbar.rs @@ -9,8 +9,8 @@ use gpui::{ Along, App, AppContext as _, Axis as ScrollbarAxis, BorderStyle, Bounds, ContentMask, Context, Corner, Corners, CursorStyle, DispatchPhase, Div, Edges, Element, ElementId, Entity, EntityId, GlobalElementId, Hitbox, HitboxBehavior, Hsla, InteractiveElement, IntoElement, IsZero, - LayoutId, ListState, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Negate, - ParentElement, Pixels, Point, Position, Render, ScrollHandle, ScrollWheelEvent, Size, Stateful, + LayoutId, ListState, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, ParentElement, + Pixels, Point, Position, Render, ScrollHandle, ScrollWheelEvent, Size, Stateful, StatefulInteractiveElement, Style, Styled, Task, UniformListDecoration, UniformListScrollHandle, Window, ease_in_out, prelude::FluentBuilder as _, px, quad, relative, size, @@ -258,7 +258,7 @@ impl UniformListDecoration for ScrollbarStateWrapper { _cx: &mut App, ) -> gpui::AnyElement { ScrollbarElement { - origin: scroll_offset.negate(), + origin: -scroll_offset, state: self.0.clone(), } .into_any() @@ -911,7 +911,7 @@ impl ThumbState { } impl ScrollableHandle for UniformListScrollHandle { - fn max_offset(&self) -> Size { + fn max_offset(&self) -> Point { self.0.borrow().base_handle.max_offset() } @@ -929,7 +929,7 @@ impl ScrollableHandle for UniformListScrollHandle { } impl ScrollableHandle for ListState { - fn max_offset(&self) -> Size { + fn max_offset(&self) -> Point { self.max_offset_for_scrollbar() } @@ -955,7 +955,7 @@ impl ScrollableHandle for ListState { } impl ScrollableHandle for ScrollHandle { - fn max_offset(&self) -> Size { + fn max_offset(&self) -> Point { self.max_offset() } @@ -973,7 +973,7 @@ impl ScrollableHandle for ScrollHandle { } pub trait ScrollableHandle: 'static + Any + Sized + Clone { - fn max_offset(&self) -> Size; + fn max_offset(&self) -> Point; fn set_offset(&self, point: Point); fn offset(&self) -> Point; fn viewport(&self) -> Bounds; @@ -984,7 +984,7 @@ pub trait ScrollableHandle: 'static + Any + Sized + Clone { self.max_offset().along(axis) > Pixels::ZERO } fn content_size(&self) -> Size { - self.viewport().size + self.max_offset() + self.viewport().size + self.max_offset().into() } } @@ -1006,7 +1006,7 @@ impl ScrollbarLayout { fn compute_click_offset( &self, event_position: Point, - max_offset: Size, + max_offset: Point, event_type: ScrollbarMouseEvent, ) -> Pixels { let Self { diff --git a/crates/util/Cargo.toml b/crates/util/Cargo.toml index 55997b25344d69e090581d46008d9983bc895bca..6a9b30d463af2d9407e8f4c9e3a81133a87c1bce 100644 --- a/crates/util/Cargo.toml +++ b/crates/util/Cargo.toml @@ -19,14 +19,11 @@ test-support = ["git2", "rand", "util_macros"] [dependencies] anyhow.workspace = true -async-fs.workspace = true async_zip.workspace = true collections.workspace = true -dirs.workspace = true dunce = "1.0" futures-lite.workspace = true futures.workspace = true -git2 = { workspace = true, optional = true } globset.workspace = true itertools.workspace = true log.workspace = true @@ -38,15 +35,21 @@ serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true shlex.workspace = true -smol.workspace = true take-until.workspace = true tempfile.workspace = true unicase.workspace = true url.workspace = true percent-encoding.workspace = true util_macros = { workspace = true, optional = true } -walkdir.workspace = true +gpui_util.workspace = true + +[target.'cfg(not(target_family = "wasm"))'.dependencies] +smol.workspace = true which.workspace = true +git2 = { workspace = true, optional = true } +async-fs.workspace = true +walkdir.workspace = true +dirs.workspace = true [target.'cfg(unix)'.dependencies] command-fds = "0.3.1" diff --git a/crates/util/src/archive.rs b/crates/util/src/archive.rs index 99ff0254929825292d9f4c56806e7ad8177baa9a..7fe43a25c37afa0fa2487374c89a13945e54fc6c 100644 --- a/crates/util/src/archive.rs +++ b/crates/util/src/archive.rs @@ -6,6 +6,7 @@ use async_zip::base::read; use futures::AsyncSeek; use futures::{AsyncRead, io::BufReader}; +#[cfg(any(unix, windows))] fn archive_path_is_normal(filename: &str) -> bool { Path::new(filename).components().all(|c| { matches!( @@ -64,7 +65,7 @@ pub async fn extract_zip(destination: &Path, reader: R) -> Ok(()) } -#[cfg(not(windows))] +#[cfg(unix)] pub async fn extract_zip(destination: &Path, reader: R) -> Result<()> { // Unix needs file permissions copied when extracting. // This is only possible to do when a reader impls `AsyncSeek` and `seek::ZipFileReader` is used. @@ -81,7 +82,7 @@ pub async fn extract_zip(destination: &Path, reader: R) -> extract_seekable_zip(destination, file).await } -#[cfg(not(windows))] +#[cfg(unix)] pub async fn extract_seekable_zip( destination: &Path, reader: R, diff --git a/crates/workspace/src/path_list.rs b/crates/util/src/path_list.rs similarity index 91% rename from crates/workspace/src/path_list.rs rename to crates/util/src/path_list.rs index 035f9e44fcce46527faa0c1053b7a6bb09aae0c8..7d605c7924a7d9c25a89634ca7339a457fb99ae4 100644 --- a/crates/workspace/src/path_list.rs +++ b/crates/util/src/path_list.rs @@ -3,8 +3,9 @@ use std::{ sync::Arc, }; +use crate::paths::SanitizedPath; use itertools::Itertools; -use util::paths::SanitizedPath; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; /// A list of absolute paths, in a specific order. /// @@ -12,7 +13,7 @@ use util::paths::SanitizedPath; /// other path lists without regard to the order of the paths. /// /// The paths can be retrieved in the original order using `ordered_paths()`. -#[derive(Default, PartialEq, Eq, Debug, Clone)] +#[derive(Default, PartialEq, Eq, Hash, Debug, Clone)] pub struct PathList { /// The paths, in lexicographic order. paths: Arc<[PathBuf]>, @@ -118,6 +119,19 @@ impl PathList { } } +impl Serialize for PathList { + fn serialize(&self, serializer: S) -> Result { + self.paths.serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for PathList { + fn deserialize>(deserializer: D) -> Result { + let paths: Vec = Vec::deserialize(deserializer)?; + Ok(PathList::new(&paths)) + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index 5cc1a0c8ebcfcf7120ef59f9305c4f4622751143..39b4064a1bd9d3c4c240abf9665b17151066e9ef 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -1,4 +1,3 @@ -use anyhow::Context; use globset::{GlobBuilder, GlobSet, GlobSetBuilder}; use itertools::Itertools; use regex::Regex; @@ -9,20 +8,19 @@ use std::error::Error; use std::fmt::{Display, Formatter}; use std::mem; use std::path::StripPrefixError; -use std::sync::{Arc, OnceLock}; +use std::sync::Arc; use std::{ ffi::OsStr, path::{Path, PathBuf}, sync::LazyLock, }; +use crate::rel_path::RelPath; use crate::rel_path::RelPathBuf; -use crate::{rel_path::RelPath, shell::ShellKind}; - -static HOME_DIR: OnceLock = OnceLock::new(); /// Returns the path to the user's home directory. pub fn home_dir() -> &'static PathBuf { + static HOME_DIR: std::sync::OnceLock = std::sync::OnceLock::new(); HOME_DIR.get_or_init(|| { if cfg!(any(test, feature = "test-support")) { if cfg!(target_os = "macos") { @@ -56,6 +54,13 @@ pub trait PathExt { where Self: From<&'a Path>, { + #[cfg(target_family = "wasm")] + { + std::str::from_utf8(bytes) + .map(Path::new) + .map(Into::into) + .map_err(Into::into) + } #[cfg(unix)] { use std::os::unix::prelude::OsStrExt; @@ -63,6 +68,7 @@ pub trait PathExt { } #[cfg(windows)] { + use anyhow::Context; use tendril::fmt::{Format, WTF8}; WTF8::validate(bytes) .then(|| { @@ -86,11 +92,17 @@ pub trait PathExt { fn multiple_extensions(&self) -> Option; /// Try to make a shell-safe representation of the path. - fn try_shell_safe(&self, shell_kind: ShellKind) -> anyhow::Result; + #[cfg(not(target_family = "wasm"))] + fn try_shell_safe(&self, shell_kind: crate::shell::ShellKind) -> anyhow::Result; } impl> PathExt for T { fn compact(&self) -> PathBuf { + #[cfg(target_family = "wasm")] + { + self.as_ref().to_path_buf() + } + #[cfg(not(target_family = "wasm"))] if cfg!(any(target_os = "linux", target_os = "freebsd")) || cfg!(target_os = "macos") { match self.as_ref().strip_prefix(home_dir().as_path()) { Ok(relative_path) => { @@ -164,7 +176,9 @@ impl> PathExt for T { Some(parts.into_iter().join(".")) } - fn try_shell_safe(&self, shell_kind: ShellKind) -> anyhow::Result { + #[cfg(not(target_family = "wasm"))] + fn try_shell_safe(&self, shell_kind: crate::shell::ShellKind) -> anyhow::Result { + use anyhow::Context; let path_str = self .as_ref() .to_str() diff --git a/crates/util/src/process.rs b/crates/util/src/process.rs index 6c3d4e0c41eaeabf4e0d485e4d70dd340ae7afc9..eaf543dbd817ba9b30e42eb17b7115aec39d44c9 100644 --- a/crates/util/src/process.rs +++ b/crates/util/src/process.rs @@ -36,7 +36,12 @@ impl Child { .stdout(stdout) .stderr(stderr) .spawn() - .with_context(|| format!("failed to spawn command {command:?}"))?; + .with_context(|| { + format!( + "failed to spawn command {}", + crate::redact::redact_command(&format!("{command:?}")) + ) + })?; Ok(Self { process }) } @@ -55,7 +60,12 @@ impl Child { .stdout(stdout) .stderr(stderr) .spawn() - .with_context(|| format!("failed to spawn command {command:?}"))?; + .with_context(|| { + format!( + "failed to spawn command {}", + crate::redact::redact_command(&format!("{command:?}")) + ) + })?; Ok(Self { process }) } diff --git a/crates/util/src/shell.rs b/crates/util/src/shell.rs index 27ab18b58ce14cc59d57e563103fc9135f93d060..87872856d916ae39809debaeb6c151705367246b 100644 --- a/crates/util/src/shell.rs +++ b/crates/util/src/shell.rs @@ -1012,4 +1012,40 @@ mod tests { "uname".to_string() ); } + + #[test] + fn test_try_quote_single_quote_paths() { + let path_with_quote = r"C:\Temp\O'Brien\repo"; + let shlex_shells = [ + ShellKind::Posix, + ShellKind::Fish, + ShellKind::Csh, + ShellKind::Tcsh, + ShellKind::Rc, + ShellKind::Xonsh, + ShellKind::Elvish, + ShellKind::Nushell, + ]; + + for shell_kind in shlex_shells { + let quoted = shell_kind.try_quote(path_with_quote).unwrap().into_owned(); + assert_ne!(quoted, path_with_quote); + assert_eq!( + shlex::split("ed), + Some(vec![path_with_quote.to_string()]) + ); + + if shell_kind == ShellKind::Nushell { + let prefixed = shell_kind.prepend_command_prefix("ed); + assert!(prefixed.starts_with('^')); + } + } + + for shell_kind in [ShellKind::PowerShell, ShellKind::Pwsh] { + let quoted = shell_kind.try_quote(path_with_quote).unwrap().into_owned(); + assert!(quoted.starts_with('\'')); + assert!(quoted.ends_with('\'')); + assert!(quoted.contains("O''Brien")); + } + } } diff --git a/crates/util/src/shell_env.rs b/crates/util/src/shell_env.rs index 4fc9fd2d69b608c1215495d84c340f11e5be8179..ba9e77cb81086e810af8d17c7f17f2b77f5392d9 100644 --- a/crates/util/src/shell_env.rs +++ b/crates/util/src/shell_env.rs @@ -141,6 +141,14 @@ async fn capture_windows( std::env::current_exe().context("Failed to determine current zed executable path.")?; let shell_kind = ShellKind::new(shell_path, true); + let directory_string = directory.display().to_string(); + let zed_path_string = zed_path.display().to_string(); + let quote_for_shell = |value: &str| { + shell_kind + .try_quote(value) + .map(|quoted| quoted.into_owned()) + .unwrap_or_else(|| value.to_owned()) + }; let mut cmd = crate::command::new_command(shell_path); cmd.args(args); let cmd = match shell_kind { @@ -149,52 +157,54 @@ async fn capture_windows( | ShellKind::Rc | ShellKind::Fish | ShellKind::Xonsh - | ShellKind::Posix => cmd.args([ - "-l", - "-i", - "-c", - &format!( - "cd '{}'; '{}' --printenv", - directory.display(), - zed_path.display() - ), - ]), - ShellKind::PowerShell | ShellKind::Pwsh => cmd.args([ - "-NonInteractive", - "-NoProfile", - "-Command", - &format!( - "Set-Location '{}'; & '{}' --printenv", - directory.display(), - zed_path.display() - ), - ]), - ShellKind::Elvish => cmd.args([ - "-c", - &format!( - "cd '{}'; '{}' --printenv", - directory.display(), - zed_path.display() - ), - ]), - ShellKind::Nushell => cmd.args([ - "-c", - &format!( - "cd '{}'; {}'{}' --printenv", - directory.display(), - shell_kind - .command_prefix() - .map(|prefix| prefix.to_string()) - .unwrap_or_default(), - zed_path.display() - ), - ]), + | ShellKind::Posix => { + let quoted_directory = quote_for_shell(&directory_string); + let quoted_zed_path = quote_for_shell(&zed_path_string); + cmd.args([ + "-l", + "-i", + "-c", + &format!("cd {}; {} --printenv", quoted_directory, quoted_zed_path), + ]) + } + ShellKind::PowerShell | ShellKind::Pwsh => { + let quoted_directory = ShellKind::quote_pwsh(&directory_string); + let quoted_zed_path = ShellKind::quote_pwsh(&zed_path_string); + cmd.args([ + "-NonInteractive", + "-NoProfile", + "-Command", + &format!( + "Set-Location {}; & {} --printenv", + quoted_directory, quoted_zed_path + ), + ]) + } + ShellKind::Elvish => { + let quoted_directory = quote_for_shell(&directory_string); + let quoted_zed_path = quote_for_shell(&zed_path_string); + cmd.args([ + "-c", + &format!("cd {}; {} --printenv", quoted_directory, quoted_zed_path), + ]) + } + ShellKind::Nushell => { + let quoted_directory = quote_for_shell(&directory_string); + let quoted_zed_path = quote_for_shell(&zed_path_string); + let zed_command = shell_kind + .prepend_command_prefix("ed_zed_path) + .into_owned(); + cmd.args([ + "-c", + &format!("cd {}; {} --printenv", quoted_directory, zed_command), + ]) + } ShellKind::Cmd => cmd.args([ "/c", "cd", - &directory.display().to_string(), + &directory_string, "&&", - &zed_path.display().to_string(), + &zed_path_string, "--printenv", ]), } diff --git a/crates/util/src/test.rs b/crates/util/src/test.rs index 0a251a1e2541dba1a4269f9b575401c85c308aac..717754e33375a5fee51829e2e1d35d153e493915 100644 --- a/crates/util/src/test.rs +++ b/crates/util/src/test.rs @@ -1,16 +1,14 @@ mod assertions; mod marked_text; -use git2; -use std::{ - ffi::OsStr, - path::{Path, PathBuf}, -}; -use tempfile::TempDir; - pub use assertions::*; pub use marked_text::*; +use git2; +use std::ffi::OsStr; +use std::path::{Path, PathBuf}; +use tempfile::TempDir; + pub struct TempTree { _temp_dir: TempDir, path: PathBuf, @@ -45,6 +43,7 @@ fn write_tree(path: &Path, tree: serde_json::Value) { Value::Object(_) => { fs::create_dir(&path).unwrap(); + #[cfg(not(target_family = "wasm"))] if path.file_name() == Some(OsStr::new(".git")) { git2::Repository::init(path.parent().unwrap()).unwrap(); } diff --git a/crates/util/src/test/git.rs b/crates/util/src/test/git.rs new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index 499ef71e1acac6f5f416e7ed313510c995a3df0a..4f129ef6d529aff0991b86882e5e60b6ad837d5c 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -1,8 +1,8 @@ -pub mod arc_cow; pub mod archive; pub mod command; pub mod fs; pub mod markdown; +pub mod path_list; pub mod paths; pub mod process; pub mod redact; @@ -17,40 +17,27 @@ pub mod size; pub mod test; pub mod time; -use anyhow::{Context as _, Result}; -use futures::Future; +use anyhow::Result; use itertools::Either; -use paths::PathExt; use regex::Regex; use std::path::{Path, PathBuf}; -use std::sync::{LazyLock, OnceLock}; +use std::sync::LazyLock; use std::{ borrow::Cow, cmp::{self, Ordering}, - env, - ops::{AddAssign, Range, RangeInclusive}, - panic::Location, - pin::Pin, - task::{Context, Poll}, - time::Instant, + ops::{Range, RangeInclusive}, }; use unicase::UniCase; +pub use gpui_util::*; + pub use take_until::*; #[cfg(any(test, feature = "test-support"))] pub use util_macros::{line_endings, path, uri}; -#[macro_export] -macro_rules! debug_panic { - ( $($fmt_arg:tt)* ) => { - if cfg!(debug_assertions) { - panic!( $($fmt_arg)* ); - } else { - let backtrace = std::backtrace::Backtrace::capture(); - log::error!("{}\n{:?}", format_args!($($fmt_arg)*), backtrace); - } - }; -} +pub use self::shell::{ + get_default_system_shell, get_default_system_shell_preferring_bash, get_system_shell, +}; #[inline] pub const fn is_utf8_char_boundary(u8: u8) -> bool { @@ -174,12 +161,6 @@ fn test_truncate_lines_to_byte_limit() { ); } -pub fn post_inc + AddAssign + Copy>(value: &mut T) -> T { - let prev = *value; - *value += T::from(1); - prev -} - /// Extend a sorted vector with a sorted sequence of items, maintaining the vector's sort order and /// enforcing a maximum length. This also de-duplicates items. Sort the items according to the given callback. Before calling this, /// both `vec` and `new_items` should already be sorted according to the `cmp` comparator. @@ -287,7 +268,7 @@ fn load_shell_from_passwd() -> Result<()> { ); let shell = unsafe { std::ffi::CStr::from_ptr(entry.pw_shell).to_str().unwrap() }; - let should_set_shell = env::var("SHELL").map_or(true, |shell_env| { + let should_set_shell = std::env::var("SHELL").map_or(true, |shell_env| { shell_env != shell && !std::path::Path::new(&shell_env).exists() }); @@ -296,7 +277,7 @@ fn load_shell_from_passwd() -> Result<()> { "updating SHELL environment variable to value from passwd entry: {:?}", shell, ); - unsafe { env::set_var("SHELL", shell) }; + unsafe { std::env::set_var("SHELL", shell) }; } Ok(()) @@ -304,6 +285,8 @@ fn load_shell_from_passwd() -> Result<()> { /// Returns a shell escaped path for the current zed executable pub fn get_shell_safe_zed_path(shell_kind: shell::ShellKind) -> anyhow::Result { + use anyhow::Context as _; + use paths::PathExt; let mut zed_path = std::env::current_exe().context("Failed to determine current zed executable path.")?; if cfg!(target_os = "linux") @@ -326,6 +309,7 @@ pub fn get_shell_safe_zed_path(shell_kind: shell::ShellKind) -> anyhow::Result Result { + use anyhow::Context as _; let zed_path = std::env::current_exe().context("Failed to determine current zed executable path.")?; let parent = zed_path @@ -365,6 +349,8 @@ pub fn get_zed_cli_path() -> Result { #[cfg(unix)] pub async fn load_login_shell_environment() -> Result<()> { + use anyhow::Context as _; + load_shell_from_passwd().log_err(); // If possible, we want to `cd` in the user's `$HOME` to trigger programs @@ -383,7 +369,7 @@ pub async fn load_login_shell_environment() -> Result<()> { if name == "SHLVL" { continue; } - unsafe { env::set_var(&name, &value) }; + unsafe { std::env::set_var(&name, &value) }; } log::info!( @@ -404,7 +390,7 @@ pub fn set_pre_exec_to_start_new_session( ) -> &mut std::process::Command { // safety: code in pre_exec should be signal safe. // https://man7.org/linux/man-pages/man7/signal-safety.7.html - #[cfg(not(target_os = "windows"))] + #[cfg(unix)] unsafe { use std::os::unix::process::CommandExt; command.pre_exec(|| { @@ -485,25 +471,6 @@ pub fn merge_non_null_json_value_into(source: serde_json::Value, target: &mut se } } -pub fn measure(label: &str, f: impl FnOnce() -> R) -> R { - static ZED_MEASUREMENTS: OnceLock = OnceLock::new(); - let zed_measurements = ZED_MEASUREMENTS.get_or_init(|| { - env::var("ZED_MEASUREMENTS") - .map(|measurements| measurements == "1" || measurements == "true") - .unwrap_or(false) - }); - - if *zed_measurements { - let start = Instant::now(); - let result = f(); - let elapsed = start.elapsed(); - eprintln!("{}: {:?}", label, elapsed); - result - } else { - f() - } -} - pub fn expanded_and_wrapped_usize_range( range: Range, additional_before: usize, @@ -570,222 +537,6 @@ pub fn wrapped_usize_outward_from( }) } -pub trait ResultExt { - type Ok; - - fn log_err(self) -> Option; - /// Assert that this result should never be an error in development or tests. - fn debug_assert_ok(self, reason: &str) -> Self; - fn warn_on_err(self) -> Option; - fn log_with_level(self, level: log::Level) -> Option; - fn anyhow(self) -> anyhow::Result - where - E: Into; -} - -impl ResultExt for Result -where - E: std::fmt::Debug, -{ - type Ok = T; - - #[track_caller] - fn log_err(self) -> Option { - self.log_with_level(log::Level::Error) - } - - #[track_caller] - fn debug_assert_ok(self, reason: &str) -> Self { - if let Err(error) = &self { - debug_panic!("{reason} - {error:?}"); - } - self - } - - #[track_caller] - fn warn_on_err(self) -> Option { - self.log_with_level(log::Level::Warn) - } - - #[track_caller] - fn log_with_level(self, level: log::Level) -> Option { - match self { - Ok(value) => Some(value), - Err(error) => { - log_error_with_caller(*Location::caller(), error, level); - None - } - } - } - - fn anyhow(self) -> anyhow::Result - where - E: Into, - { - self.map_err(Into::into) - } -} - -fn log_error_with_caller(caller: core::panic::Location<'_>, error: E, level: log::Level) -where - E: std::fmt::Debug, -{ - #[cfg(not(target_os = "windows"))] - let file = caller.file(); - #[cfg(target_os = "windows")] - let file = caller.file().replace('\\', "/"); - // In this codebase all crates reside in a `crates` directory, - // so discard the prefix up to that segment to find the crate name - let file = file.split_once("crates/"); - let target = file.as_ref().and_then(|(_, s)| s.split_once("/src/")); - - let module_path = target.map(|(krate, module)| { - if module.starts_with(krate) { - module.trim_end_matches(".rs").replace('/', "::") - } else { - krate.to_owned() + "::" + &module.trim_end_matches(".rs").replace('/', "::") - } - }); - let file = file.map(|(_, file)| format!("crates/{file}")); - log::logger().log( - &log::Record::builder() - .target(module_path.as_deref().unwrap_or("")) - .module_path(file.as_deref()) - .args(format_args!("{:?}", error)) - .file(Some(caller.file())) - .line(Some(caller.line())) - .level(level) - .build(), - ); -} - -pub fn log_err(error: &E) { - log_error_with_caller(*Location::caller(), error, log::Level::Error); -} - -pub trait TryFutureExt { - fn log_err(self) -> LogErrorFuture - where - Self: Sized; - - fn log_tracked_err(self, location: core::panic::Location<'static>) -> LogErrorFuture - where - Self: Sized; - - fn warn_on_err(self) -> LogErrorFuture - where - Self: Sized; - fn unwrap(self) -> UnwrapFuture - where - Self: Sized; -} - -impl TryFutureExt for F -where - F: Future>, - E: std::fmt::Debug, -{ - #[track_caller] - fn log_err(self) -> LogErrorFuture - where - Self: Sized, - { - let location = Location::caller(); - LogErrorFuture(self, log::Level::Error, *location) - } - - fn log_tracked_err(self, location: core::panic::Location<'static>) -> LogErrorFuture - where - Self: Sized, - { - LogErrorFuture(self, log::Level::Error, location) - } - - #[track_caller] - fn warn_on_err(self) -> LogErrorFuture - where - Self: Sized, - { - let location = Location::caller(); - LogErrorFuture(self, log::Level::Warn, *location) - } - - fn unwrap(self) -> UnwrapFuture - where - Self: Sized, - { - UnwrapFuture(self) - } -} - -#[must_use] -pub struct LogErrorFuture(F, log::Level, core::panic::Location<'static>); - -impl Future for LogErrorFuture -where - F: Future>, - E: std::fmt::Debug, -{ - type Output = Option; - - fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll { - let level = self.1; - let location = self.2; - let inner = unsafe { Pin::new_unchecked(&mut self.get_unchecked_mut().0) }; - match inner.poll(cx) { - Poll::Ready(output) => Poll::Ready(match output { - Ok(output) => Some(output), - Err(error) => { - log_error_with_caller(location, error, level); - None - } - }), - Poll::Pending => Poll::Pending, - } - } -} - -pub struct UnwrapFuture(F); - -impl Future for UnwrapFuture -where - F: Future>, - E: std::fmt::Debug, -{ - type Output = T; - - fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll { - let inner = unsafe { Pin::new_unchecked(&mut self.get_unchecked_mut().0) }; - match inner.poll(cx) { - Poll::Ready(result) => Poll::Ready(result.unwrap()), - Poll::Pending => Poll::Pending, - } - } -} - -pub struct Deferred(Option); - -impl Deferred { - /// Drop without running the deferred function. - pub fn abort(mut self) { - self.0.take(); - } -} - -impl Drop for Deferred { - fn drop(&mut self) { - if let Some(f) = self.0.take() { - f() - } - } -} - -/// Run the given function when the returned value is dropped (unless it's cancelled). -#[must_use] -pub fn defer(f: F) -> Deferred { - Deferred(Some(f)) -} - #[cfg(any(test, feature = "test-support"))] mod rng { use rand::prelude::*; @@ -849,23 +600,6 @@ pub fn asset_str(path: &str) -> Cow<'static, str> { } } -/// Expands to an immediately-invoked function expression. Good for using the ? operator -/// in functions which do not return an Option or Result. -/// -/// Accepts a normal block, an async block, or an async move block. -#[macro_export] -macro_rules! maybe { - ($block:block) => { - (|| $block)() - }; - (async $block:block) => { - (async || $block)() - }; - (async move $block:block) => { - (async move || $block)() - }; -} - pub trait RangeExt { fn sorted(&self) -> Self; fn to_inclusive(&self) -> RangeInclusive; @@ -1022,10 +756,6 @@ pub fn default() -> D { Default::default() } -pub use self::shell::{ - get_default_system_shell, get_default_system_shell_preferring_bash, get_system_shell, -}; - #[derive(Debug)] pub enum ConnectionResult { Timeout, @@ -1049,15 +779,6 @@ impl From> for ConnectionResult { } } -#[track_caller] -pub fn some_or_debug_panic(option: Option) -> Option { - #[cfg(debug_assertions)] - if option.is_none() { - panic!("Unexpected None"); - } - option -} - /// Normalizes a path by resolving `.` and `..` components without /// requiring the path to exist on disk (unlike `canonicalize`). pub fn normalize_path(path: &Path) -> PathBuf { diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index 9b6707fdb92520e95e874a5be143024beb21b873..9df8721301a82ed26618f7181ba80c43cbc702df 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -203,20 +203,25 @@ fn find_target( let start_offset = start.to_offset(snapshot); let end_offset = end.to_offset(snapshot); - let mut offset = start_offset; let mut first_char_is_num = snapshot - .chars_at(offset) + .chars_at(start_offset) .next() .map_or(false, |ch| ch.is_ascii_hexdigit()); let mut pre_char = String::new(); - let next_offset = offset + let next_offset = start_offset + snapshot .chars_at(start_offset) .next() .map_or(0, |ch| ch.len_utf8()); - // Backward scan to find the start of the number, but stop at start_offset + // Backward scan to find the start of the number, but stop at start_offset. + // We track `offset` as the start position of the current character. Initialize + // to `next_offset` and decrement at the start of each iteration so that `offset` + // always lands on a valid character boundary (not in the middle of a multibyte char). + let mut offset = next_offset; for ch in snapshot.reversed_chars_at(next_offset) { + offset -= ch.len_utf8(); + // Search boundaries if offset.0 == 0 || ch.is_whitespace() || (need_range && offset <= start_offset) { break; @@ -238,7 +243,6 @@ fn find_target( } pre_char.insert(0, ch); - offset -= ch.len_utf8(); } // The backward scan breaks on whitespace, including newlines. Without this @@ -895,4 +899,15 @@ mod test { .await .assert_eq("# Title\n2. item\nˇ2. item\n3. item"); } + + #[gpui::test] + async fn test_increment_with_multibyte_characters(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + // Test cursor after a multibyte character - this would panic before the fix + // because the backward scan would land in the middle of the Korean character + cx.set_state("지ˇ1", Mode::Normal); + cx.simulate_keystrokes("ctrl-a"); + cx.assert_state("지ˇ2", Mode::Normal); + } } diff --git a/crates/vim/src/normal/yank.rs b/crates/vim/src/normal/yank.rs index 8ad4e6b9c02e3c83ba82e6383a7a32fc3b60ec22..6f2abfddd576f51e90b7c8db4f54419a365ce79b 100644 --- a/crates/vim/src/normal/yank.rs +++ b/crates/vim/src/normal/yank.rs @@ -228,7 +228,7 @@ impl Vim { editor.highlight_background( HighlightKey::HighlightOnYank, &ranges_to_highlight, - |_, colors| colors.colors().editor_document_highlight_read_background, + |_, colors| colors.colors().vim_yank_background, cx, ); cx.spawn(async move |this, cx| { diff --git a/crates/web_search_providers/Cargo.toml b/crates/web_search_providers/Cargo.toml index ecdca5883ff541459e94170986df3b7f16036c5a..ff264edcb150063237c633de746b2f6b9f6f250c 100644 --- a/crates/web_search_providers/Cargo.toml +++ b/crates/web_search_providers/Cargo.toml @@ -14,6 +14,7 @@ path = "src/web_search_providers.rs" [dependencies] anyhow.workspace = true client.workspace = true +cloud_api_types.workspace = true cloud_llm_client.workspace = true futures.workspace = true gpui.workspace = true diff --git a/crates/web_search_providers/src/cloud.rs b/crates/web_search_providers/src/cloud.rs index 2f3ccdbb52a884471250ad458e8b7922437cb9ae..c8bc89953f2b2d3ec62bac07e80f2737522824f7 100644 --- a/crates/web_search_providers/src/cloud.rs +++ b/crates/web_search_providers/src/cloud.rs @@ -1,7 +1,8 @@ use std::sync::Arc; use anyhow::{Context as _, Result}; -use client::Client; +use client::{Client, UserStore}; +use cloud_api_types::OrganizationId; use cloud_llm_client::{WebSearchBody, WebSearchResponse}; use futures::AsyncReadExt as _; use gpui::{App, AppContext, Context, Entity, Subscription, Task}; @@ -14,8 +15,8 @@ pub struct CloudWebSearchProvider { } impl CloudWebSearchProvider { - pub fn new(client: Arc, cx: &mut App) -> Self { - let state = cx.new(|cx| State::new(client, cx)); + pub fn new(client: Arc, user_store: Entity, cx: &mut App) -> Self { + let state = cx.new(|cx| State::new(client, user_store, cx)); Self { state } } @@ -23,24 +24,31 @@ impl CloudWebSearchProvider { pub struct State { client: Arc, + user_store: Entity, llm_api_token: LlmApiToken, _llm_token_subscription: Subscription, } impl State { - pub fn new(client: Arc, cx: &mut Context) -> Self { + pub fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); Self { client, + user_store, llm_api_token: LlmApiToken::default(), _llm_token_subscription: cx.subscribe( &refresh_llm_token_listener, |this, _, _event, cx| { let client = this.client.clone(); let llm_api_token = this.llm_api_token.clone(); + let organization_id = this + .user_store + .read(cx) + .current_organization() + .map(|o| o.id.clone()); cx.spawn(async move |_this, _cx| { - llm_api_token.refresh(&client).await?; + llm_api_token.refresh(&client, organization_id).await?; anyhow::Ok(()) }) .detach_and_log_err(cx); @@ -61,21 +69,31 @@ impl WebSearchProvider for CloudWebSearchProvider { let state = self.state.read(cx); let client = state.client.clone(); let llm_api_token = state.llm_api_token.clone(); + let organization_id = state + .user_store + .read(cx) + .current_organization() + .map(|o| o.id.clone()); let body = WebSearchBody { query }; - cx.background_spawn(async move { perform_web_search(client, llm_api_token, body).await }) + cx.background_spawn(async move { + perform_web_search(client, llm_api_token, organization_id, body).await + }) } } async fn perform_web_search( client: Arc, llm_api_token: LlmApiToken, + organization_id: Option, body: WebSearchBody, ) -> Result { const MAX_RETRIES: usize = 3; let http_client = &client.http_client(); let mut retries_remaining = MAX_RETRIES; - let mut token = llm_api_token.acquire(&client).await?; + let mut token = llm_api_token + .acquire(&client, organization_id.clone()) + .await?; loop { if retries_remaining == 0 { @@ -100,7 +118,9 @@ async fn perform_web_search( response.body_mut().read_to_string(&mut body).await?; return Ok(serde_json::from_str(&body)?); } else if response.needs_llm_token_refresh() { - token = llm_api_token.refresh(&client).await?; + token = llm_api_token + .refresh(&client, organization_id.clone()) + .await?; retries_remaining -= 1; } else { // For now we will only retry if the LLM token is expired, diff --git a/crates/web_search_providers/src/web_search_providers.rs b/crates/web_search_providers/src/web_search_providers.rs index 8ab0aee47a414c4cc669ab05e727a827d17c2844..509632429fb167cd489cd4253ceae0ce479b10a8 100644 --- a/crates/web_search_providers/src/web_search_providers.rs +++ b/crates/web_search_providers/src/web_search_providers.rs @@ -1,26 +1,28 @@ mod cloud; -use client::Client; +use client::{Client, UserStore}; use gpui::{App, Context, Entity}; use language_model::LanguageModelRegistry; use std::sync::Arc; use web_search::{WebSearchProviderId, WebSearchRegistry}; -pub fn init(client: Arc, cx: &mut App) { +pub fn init(client: Arc, user_store: Entity, cx: &mut App) { let registry = WebSearchRegistry::global(cx); registry.update(cx, |registry, cx| { - register_web_search_providers(registry, client, cx); + register_web_search_providers(registry, client, user_store, cx); }); } fn register_web_search_providers( registry: &mut WebSearchRegistry, client: Arc, + user_store: Entity, cx: &mut Context, ) { register_zed_web_search_provider( registry, client.clone(), + user_store.clone(), &LanguageModelRegistry::global(cx), cx, ); @@ -29,7 +31,13 @@ fn register_web_search_providers( &LanguageModelRegistry::global(cx), move |this, registry, event, cx| { if let language_model::Event::DefaultModelChanged = event { - register_zed_web_search_provider(this, client.clone(), ®istry, cx) + register_zed_web_search_provider( + this, + client.clone(), + user_store.clone(), + ®istry, + cx, + ) } }, ) @@ -39,6 +47,7 @@ fn register_web_search_providers( fn register_zed_web_search_provider( registry: &mut WebSearchRegistry, client: Arc, + user_store: Entity, language_model_registry: &Entity, cx: &mut Context, ) { @@ -47,7 +56,10 @@ fn register_zed_web_search_provider( .default_model() .is_some_and(|default| default.is_provided_by_zed()); if using_zed_provider { - registry.register_provider(cloud::CloudWebSearchProvider::new(client, cx), cx) + registry.register_provider( + cloud::CloudWebSearchProvider::new(client, user_store, cx), + cx, + ) } else { registry.unregister_provider(WebSearchProviderId( cloud::ZED_WEB_SEARCH_PROVIDER_ID.into(), diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index dcd0bf640fdf279fb1874ba77307ccbd3c431393..84fd10c8c03e4f7411fc8c813b70255f5e00031d 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -14,7 +14,6 @@ doctest = false [features] test-support = [ - "call/test-support", "client/test-support", "http_client/test-support", "db/test-support", @@ -72,7 +71,6 @@ zed_actions.workspace = true windows.workspace = true [dev-dependencies] -call = { workspace = true, features = ["test-support"] } client = { workspace = true, features = ["test-support"] } dap = { workspace = true, features = ["test-support"] } db = { workspace = true, features = ["test-support"] } diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index b29e02f05b367bab557403f3bb34f6ffa45caecc..97a52b606ec951ca015b62f301ba9b898af3d254 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -925,10 +925,10 @@ impl ItemHandle for Entity { }, )); - cx.on_blur( + cx.on_focus_out( &self.read(cx).focus_handle(cx), window, - move |workspace, window, cx| { + move |workspace, _event, window, cx| { if let Some(item) = weak_item.upgrade() && item.workspace_settings(cx).autosave == AutosaveSetting::OnFocusChange { @@ -1371,7 +1371,8 @@ pub mod test { }; use gpui::{ AnyElement, App, AppContext as _, Context, Entity, EntityId, EventEmitter, Focusable, - InteractiveElement, IntoElement, Render, SharedString, Task, WeakEntity, Window, + InteractiveElement, IntoElement, ParentElement, Render, SharedString, Task, WeakEntity, + Window, }; use project::{Project, ProjectEntryId, ProjectPath, WorktreeId}; use std::{any::Any, cell::Cell, sync::Arc}; @@ -1400,6 +1401,7 @@ pub mod test { pub tab_detail: Cell>, serialize: Option Option>>>>, focus_handle: gpui::FocusHandle, + pub child_focus_handles: Vec, } impl project::ProjectItem for TestProjectItem { @@ -1482,6 +1484,7 @@ pub mod test { workspace_id: Default::default(), focus_handle: cx.focus_handle(), serialize: None, + child_focus_handles: Vec::new(), } } @@ -1529,6 +1532,11 @@ pub mod test { self } + pub fn with_child_focus_handles(mut self, count: usize, cx: &mut Context) -> Self { + self.child_focus_handles = (0..count).map(|_| cx.focus_handle()).collect(); + self + } + pub fn set_state(&mut self, state: String, cx: &mut Context) { self.push_to_nav_history(cx); self.state = state; @@ -1543,7 +1551,12 @@ pub mod test { impl Render for TestItem { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { - gpui::div().track_focus(&self.focus_handle(cx)) + let parent = gpui::div().track_focus(&self.focus_handle(cx)); + self.child_focus_handles + .iter() + .fold(parent, |parent, child_handle| { + parent.child(gpui::div().track_focus(child_handle)) + }) } } @@ -1641,23 +1654,30 @@ pub mod test { where Self: Sized, { - Task::ready(Some(cx.new(|cx| Self { - state: self.state.clone(), - label: self.label.clone(), - save_count: self.save_count, - save_as_count: self.save_as_count, - reload_count: self.reload_count, - is_dirty: self.is_dirty, - buffer_kind: self.buffer_kind, - has_conflict: self.has_conflict, - has_deleted_file: self.has_deleted_file, - project_items: self.project_items.clone(), - nav_history: None, - tab_descriptions: None, - tab_detail: Default::default(), - workspace_id: self.workspace_id, - focus_handle: cx.focus_handle(), - serialize: None, + Task::ready(Some(cx.new(|cx| { + Self { + state: self.state.clone(), + label: self.label.clone(), + save_count: self.save_count, + save_as_count: self.save_as_count, + reload_count: self.reload_count, + is_dirty: self.is_dirty, + buffer_kind: self.buffer_kind, + has_conflict: self.has_conflict, + has_deleted_file: self.has_deleted_file, + project_items: self.project_items.clone(), + nav_history: None, + tab_descriptions: None, + tab_detail: Default::default(), + workspace_id: self.workspace_id, + focus_handle: cx.focus_handle(), + serialize: None, + child_focus_handles: self + .child_focus_handles + .iter() + .map(|_| cx.focus_handle()) + .collect(), + } }))) } diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index 478e33418c68efa1cf5da0868fe16d6dec693447..cd77f4fe30461b5f726c3bcd2f5f78b561e4d415 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -5,7 +5,8 @@ use gpui::{ ManagedView, MouseButton, Pixels, Render, Subscription, Task, Tiling, Window, WindowId, actions, deferred, px, }; -use project::Project; +use project::{DisableAiSettings, Project}; +use settings::Settings; use std::future::Future; use std::path::PathBuf; use ui::prelude::*; @@ -14,8 +15,8 @@ use util::ResultExt; const SIDEBAR_RESIZE_HANDLE_SIZE: Pixels = px(6.0); use crate::{ - DockPosition, Item, ModalView, Panel, Toast, Workspace, WorkspaceId, client_side_decorations, - notifications::NotificationId, + CloseIntent, CloseWindow, DockPosition, Event as WorkspaceEvent, Item, ModalView, Panel, Toast, + Workspace, WorkspaceId, client_side_decorations, notifications::NotificationId, }; actions!( @@ -122,6 +123,13 @@ impl MultiWorkspace { } }); let quit_subscription = cx.on_app_quit(Self::app_will_quit); + let settings_subscription = + cx.observe_global_in::(window, |this, window, cx| { + if DisableAiSettings::get_global(cx).disable_ai && this.sidebar_open { + this.close_sidebar(window, cx); + } + }); + Self::subscribe_to_workspace(&workspace, cx); Self { window_id: window.window_handle().window_id(), workspaces: vec![workspace], @@ -132,7 +140,11 @@ impl MultiWorkspace { pending_removal_tasks: Vec::new(), _serialize_task: None, _create_task: None, - _subscriptions: vec![release_subscription, quit_subscription], + _subscriptions: vec![ + release_subscription, + quit_subscription, + settings_subscription, + ], } } @@ -168,7 +180,7 @@ impl MultiWorkspace { } pub fn multi_workspace_enabled(&self, cx: &App) -> bool { - cx.has_flag::() + cx.has_flag::() && !DisableAiSettings::get_global(cx).disable_ai } pub fn toggle_sidebar(&mut self, window: &mut Window, cx: &mut Context) { @@ -237,6 +249,41 @@ impl MultiWorkspace { cx.notify(); } + pub fn close_window(&mut self, _: &CloseWindow, window: &mut Window, cx: &mut Context) { + cx.spawn_in(window, async move |this, cx| { + let workspaces = this.update(cx, |multi_workspace, _cx| { + multi_workspace.workspaces().to_vec() + })?; + + for workspace in workspaces { + let should_continue = workspace + .update_in(cx, |workspace, window, cx| { + workspace.prepare_to_close(CloseIntent::CloseWindow, window, cx) + })? + .await?; + if !should_continue { + return anyhow::Ok(()); + } + } + + cx.update(|window, _cx| { + window.remove_window(); + })?; + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + fn subscribe_to_workspace(workspace: &Entity, cx: &mut Context) { + cx.subscribe(workspace, |this, workspace, event, cx| { + if let WorkspaceEvent::Activate = event { + this.activate(workspace, cx); + } + }) + .detach(); + } + pub fn is_sidebar_open(&self) -> bool { self.sidebar_open } @@ -290,6 +337,7 @@ impl MultiWorkspace { workspace.set_workspace_sidebar_open(true, cx); }); } + Self::subscribe_to_workspace(&workspace, cx); self.workspaces.push(workspace); cx.notify(); self.workspaces.len() - 1 @@ -412,6 +460,7 @@ impl MultiWorkspace { .update(cx, |workspace, cx| workspace.focus_panel::(window, cx)) } + // used in a test pub fn toggle_modal( &mut self, window: &mut Window, @@ -673,10 +722,20 @@ impl Render for MultiWorkspace { None }; + let ui_font = theme::setup_ui_font(window, cx); + let text_color = cx.theme().colors().text; + + let workspace = self.workspace().clone(); + let workspace_key_context = workspace.update(cx, |workspace, cx| workspace.key_context(cx)); + let root = workspace.update(cx, |workspace, cx| workspace.actions(h_flex(), window, cx)); + client_side_decorations( - h_flex() - .key_context("Workspace") + root.key_context(workspace_key_context) + .relative() .size_full() + .font(ui_font) + .text_color(text_color) + .on_action(cx.listener(Self::close_window)) .on_action( cx.listener(|this: &mut Self, _: &NewWorkspaceInWindow, window, cx| { this.create_workspace(window, cx); @@ -692,16 +751,18 @@ impl Render for MultiWorkspace { this.activate_previous_workspace(window, cx); }, )) - .on_action(cx.listener( - |this: &mut Self, _: &ToggleWorkspaceSidebar, window, cx| { - this.toggle_sidebar(window, cx); - }, - )) - .on_action( - cx.listener(|this: &mut Self, _: &FocusWorkspaceSidebar, window, cx| { - this.focus_sidebar(window, cx); - }), - ) + .when(self.multi_workspace_enabled(cx), |this| { + this.on_action(cx.listener( + |this: &mut Self, _: &ToggleWorkspaceSidebar, window, cx| { + this.toggle_sidebar(window, cx); + }, + )) + .on_action(cx.listener( + |this: &mut Self, _: &FocusWorkspaceSidebar, window, cx| { + this.focus_sidebar(window, cx); + }, + )) + }) .when( self.sidebar_open() && self.multi_workspace_enabled(cx), |this| { @@ -723,7 +784,8 @@ impl Render for MultiWorkspace { .size_full() .overflow_hidden() .child(self.workspace().clone()), - ), + ) + .child(self.workspace().read(cx).modal_layer.clone()), window, cx, Tiling { @@ -733,3 +795,92 @@ impl Render for MultiWorkspace { ) } } + +#[cfg(test)] +mod tests { + use super::*; + use fs::FakeFs; + use gpui::TestAppContext; + use settings::SettingsStore; + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + theme::init(theme::LoadThemes::JustBase, cx); + DisableAiSettings::register(cx); + cx.update_flags(false, vec!["agent-v2".into()]); + }); + } + + #[gpui::test] + async fn test_sidebar_disabled_when_disable_ai_is_enabled(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + + multi_workspace.read_with(cx, |mw, cx| { + assert!(mw.multi_workspace_enabled(cx)); + }); + + multi_workspace.update_in(cx, |mw, _window, cx| { + mw.open_sidebar(cx); + assert!(mw.is_sidebar_open()); + }); + + cx.update(|_window, cx| { + DisableAiSettings::override_global(DisableAiSettings { disable_ai: true }, cx); + }); + cx.run_until_parked(); + + multi_workspace.read_with(cx, |mw, cx| { + assert!( + !mw.is_sidebar_open(), + "Sidebar should be closed when disable_ai is true" + ); + assert!( + !mw.multi_workspace_enabled(cx), + "Multi-workspace should be disabled when disable_ai is true" + ); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.toggle_sidebar(window, cx); + }); + multi_workspace.read_with(cx, |mw, _cx| { + assert!( + !mw.is_sidebar_open(), + "Sidebar should remain closed when toggled with disable_ai true" + ); + }); + + cx.update(|_window, cx| { + DisableAiSettings::override_global(DisableAiSettings { disable_ai: false }, cx); + }); + cx.run_until_parked(); + + multi_workspace.read_with(cx, |mw, cx| { + assert!( + mw.multi_workspace_enabled(cx), + "Multi-workspace should be enabled after re-enabling AI" + ); + assert!( + !mw.is_sidebar_open(), + "Sidebar should still be closed after re-enabling AI (not auto-opened)" + ); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.toggle_sidebar(window, cx); + }); + multi_workspace.read_with(cx, |mw, _cx| { + assert!( + mw.is_sidebar_open(), + "Sidebar should open when toggled after re-enabling AI" + ); + }); + } +} diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 66e5eeb4734557c818f42b6537859634435fd295..81283427e83afb820b113250545d90f787030e25 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -1468,7 +1468,8 @@ impl Pane { fn update_active_tab(&mut self, index: usize) { if !self.is_tab_pinned(index) { self.suppress_scroll = false; - self.tab_bar_scroll_handle.scroll_to_item(index); + self.tab_bar_scroll_handle + .scroll_to_item(index - self.pinned_tab_count); } } @@ -3449,7 +3450,7 @@ impl Pane { cx, ) .children(pinned_tabs.len().ne(&0).then(|| { - let max_scroll = self.tab_bar_scroll_handle.max_offset().width; + let max_scroll = self.tab_bar_scroll_handle.max_offset().x; // We need to check both because offset returns delta values even when the scroll handle is not scrollable let is_scrolled = self.tab_bar_scroll_handle.offset().x < px(0.); // Avoid flickering when max_offset is very small (< 2px). @@ -7935,6 +7936,71 @@ mod tests { ); } + #[gpui::test] + async fn test_pinned_tabs_scroll_to_item_uses_correct_index(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + + cx.simulate_resize(size(px(400.), px(300.))); + + for label in ["A", "B", "C"] { + add_labeled_item(&pane, label, false, cx); + } + + pane.update_in(cx, |pane, window, cx| { + pane.pin_tab_at(0, window, cx); + pane.pin_tab_at(1, window, cx); + pane.pin_tab_at(2, window, cx); + }); + + for label in ["D", "E", "F", "G", "H", "I", "J", "K"] { + add_labeled_item(&pane, label, false, cx); + } + + assert_item_labels( + &pane, + ["A!", "B!", "C!", "D", "E", "F", "G", "H", "I", "J", "K*"], + cx, + ); + + cx.run_until_parked(); + + // Verify overflow exists (precondition for scroll test) + let scroll_handle = + pane.update_in(cx, |pane, _window, _cx| pane.tab_bar_scroll_handle.clone()); + assert!( + scroll_handle.max_offset().x > px(0.), + "Test requires tab overflow to verify scrolling. Increase tab count or reduce window width." + ); + + // Activate a different tab first, then activate K + // This ensures we're not just re-activating an already-active tab + pane.update_in(cx, |pane, window, cx| { + pane.activate_item(3, true, true, window, cx); + }); + cx.run_until_parked(); + + pane.update_in(cx, |pane, window, cx| { + pane.activate_item(10, true, true, window, cx); + }); + cx.run_until_parked(); + + let scroll_handle = + pane.update_in(cx, |pane, _window, _cx| pane.tab_bar_scroll_handle.clone()); + let k_tab_bounds = cx.debug_bounds("TAB-10").unwrap(); + let scroll_bounds = scroll_handle.bounds(); + + assert!( + k_tab_bounds.left() >= scroll_bounds.left(), + "Active tab K should be scrolled into view" + ); + } + #[gpui::test] async fn test_close_all_items_including_pinned(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/workspace/src/pane_group.rs b/crates/workspace/src/pane_group.rs index 1d28b05514baa53244926bfad906e667b0b287cd..0921a19486718c5375ed17ebbb3d7e314546f8d7 100644 --- a/crates/workspace/src/pane_group.rs +++ b/crates/workspace/src/pane_group.rs @@ -61,22 +61,33 @@ impl PaneGroup { new_pane: &Entity, direction: SplitDirection, cx: &mut App, - ) -> Result<()> { - let result = match &mut self.root { + ) { + let found = match &mut self.root { Member::Pane(pane) => { if pane == old_pane { self.root = Member::new_axis(old_pane.clone(), new_pane.clone(), direction); - Ok(()) + true } else { - anyhow::bail!("Pane not found"); + false } } Member::Axis(axis) => axis.split(old_pane, new_pane, direction), }; - if result.is_ok() { - self.mark_positions(cx); + + // If the pane wasn't found, fall back to splitting the first pane in the tree. + if !found { + let first_pane = self.root.first_pane(); + match &mut self.root { + Member::Pane(_) => { + self.root = Member::new_axis(first_pane, new_pane.clone(), direction); + } + Member::Axis(axis) => { + let _ = axis.split(&first_pane, new_pane, direction); + } + } } - result + + self.mark_positions(cx); } pub fn bounding_box_for_pane(&self, pane: &Entity) -> Option> { @@ -612,12 +623,12 @@ impl PaneAxis { old_pane: &Entity, new_pane: &Entity, direction: SplitDirection, - ) -> Result<()> { + ) -> bool { for (mut idx, member) in self.members.iter_mut().enumerate() { match member { Member::Axis(axis) => { - if axis.split(old_pane, new_pane, direction).is_ok() { - return Ok(()); + if axis.split(old_pane, new_pane, direction) { + return true; } } Member::Pane(pane) => { @@ -631,12 +642,12 @@ impl PaneAxis { *member = Member::new_axis(old_pane.clone(), new_pane.clone(), direction); } - return Ok(()); + return true; } } } } - anyhow::bail!("Pane not found"); + false } fn insert_pane(&mut self, idx: usize, new_pane: &Entity) { diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index ba6ee548348cf1dc3da612e112d497c1da011559..492b7a8f385730feaa06dfe3b5e8b4cc0a20bb59 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -4359,4 +4359,114 @@ mod tests { "Pending removal task should have deleted the workspace row when awaited" ); } + + #[gpui::test] + async fn test_create_workspace_bounds_observer_uses_fresh_id(cx: &mut gpui::TestAppContext) { + use crate::multi_workspace::MultiWorkspace; + use feature_flags::FeatureFlagAppExt; + use project::Project; + + crate::tests::init_test(cx); + + cx.update(|cx| { + cx.set_staff(true); + cx.update_flags(true, vec!["agent-v2".to_string()]); + }); + + let fs = fs::FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + multi_workspace.update_in(cx, |mw, _, cx| { + mw.set_random_database_id(cx); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.create_workspace(window, cx); + }); + + cx.run_until_parked(); + + let new_workspace_db_id = + multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).database_id()); + assert!( + new_workspace_db_id.is_some(), + "After run_until_parked, the workspace should have a database_id" + ); + + let workspace_id = new_workspace_db_id.unwrap(); + + assert!( + DB.workspace_for_id(workspace_id).is_some(), + "The workspace row should exist in the DB" + ); + + cx.simulate_resize(gpui::size(px(1024.0), px(768.0))); + + // Advance the clock past the 100ms debounce timer so the bounds + // observer task fires + cx.executor().advance_clock(Duration::from_millis(200)); + cx.run_until_parked(); + + let serialized = DB + .workspace_for_id(workspace_id) + .expect("workspace row should still exist"); + assert!( + serialized.window_bounds.is_some(), + "The bounds observer should write bounds for the workspace's real DB ID, \ + even when the workspace was created via create_workspace (where the ID \ + is assigned asynchronously after construction)." + ); + } + + #[gpui::test] + async fn test_flush_serialization_writes_bounds(cx: &mut gpui::TestAppContext) { + use crate::multi_workspace::MultiWorkspace; + use feature_flags::FeatureFlagAppExt; + use project::Project; + + crate::tests::init_test(cx); + + cx.update(|cx| { + cx.set_staff(true); + cx.update_flags(true, vec!["agent-v2".to_string()]); + }); + + let fs = fs::FakeFs::new(cx.executor()); + let dir = tempfile::TempDir::with_prefix("flush_bounds_test").unwrap(); + fs.insert_tree(dir.path(), json!({})).await; + + let project = Project::test(fs.clone(), [dir.path()], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace_id = DB.next_id().await.unwrap(); + multi_workspace.update_in(cx, |mw, _, cx| { + mw.workspace().update(cx, |ws, _cx| { + ws.set_database_id(workspace_id); + }); + }); + + let task = multi_workspace.update_in(cx, |mw, window, cx| { + mw.workspace() + .update(cx, |ws, cx| ws.flush_serialization(window, cx)) + }); + task.await; + + let after = DB + .workspace_for_id(workspace_id) + .expect("workspace row should exist after flush_serialization"); + assert!( + !after.paths.is_empty(), + "flush_serialization should have written paths via save_workspace" + ); + assert!( + after.window_bounds.is_some(), + "flush_serialization should ensure window bounds are persisted to the DB \ + before the process exits." + ); + } } diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index cdb646ec3b8248bdd0b5784424ed7b8df8ac0ee8..0971ebd0ddc9265ccf9ea10da7745ba59914db30 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -93,9 +93,9 @@ pub(crate) struct SerializedWorkspace { #[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize)] pub struct DockStructure { - pub(crate) left: DockData, - pub(crate) right: DockData, - pub(crate) bottom: DockData, + pub left: DockData, + pub right: DockData, + pub bottom: DockData, } impl RemoteConnectionKind { @@ -143,9 +143,9 @@ impl Bind for DockStructure { #[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize)] pub struct DockData { - pub(crate) visible: bool, - pub(crate) active_panel: Option, - pub(crate) zoom: bool, + pub visible: bool, + pub active_panel: Option, + pub zoom: bool, } impl Column for DockData { diff --git a/crates/workspace/src/welcome.rs b/crates/workspace/src/welcome.rs index 1caa5b56e5f38db00ad59a4aca3a2a830ee023b7..1a16b731b44db9e1678bba9c316e388139d39058 100644 --- a/crates/workspace/src/welcome.rs +++ b/crates/workspace/src/welcome.rs @@ -151,7 +151,7 @@ const CONTENT: (Section<4>, Section<3>) = ( SectionEntry { icon: IconName::FolderOpen, title: "Open Project", - action: &Open, + action: &Open::DEFAULT, }, SectionEntry { icon: IconName::CloudDownload, diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index c1d26476544ecf5db51a9c7b358ad12c84aa168f..aba2fc9d98ed6e2178a925029ae7e040004cb102 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -7,11 +7,14 @@ mod multi_workspace; pub mod notifications; pub mod pane; pub mod pane_group; -mod path_list; +pub mod path_list { + pub use util::path_list::{PathList, SerializedPathList}; +} mod persistence; pub mod searchable; mod security_modal; pub mod shared_screen; +use db::smol::future::yield_now; pub use shared_screen::SharedScreen; mod status_bar; pub mod tasks; @@ -28,7 +31,7 @@ pub use multi_workspace::{ NextWorkspaceInWindow, PreviousWorkspaceInWindow, Sidebar, SidebarEvent, SidebarHandle, ToggleWorkspaceSidebar, }; -pub use path_list::PathList; +pub use path_list::{PathList, SerializedPathList}; pub use toast_layer::{ToastAction, ToastLayer, ToastView}; use anyhow::{Context as _, Result, anyhow}; @@ -76,7 +79,10 @@ pub use pane_group::{ use persistence::{DB, SerializedWindowBounds, model::SerializedWorkspace}; pub use persistence::{ DB as WORKSPACE_DB, WorkspaceDb, delete_unloaded_items, - model::{ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation, SessionWorkspace}, + model::{ + DockStructure, ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation, + SessionWorkspace, + }, read_serialized_multi_workspaces, }; use postage::stream::Stream; @@ -146,7 +152,7 @@ use crate::{item::ItemBufferKind, notifications::NotificationId}; use crate::{ persistence::{ SerializedAxis, - model::{DockData, DockStructure, SerializedItem, SerializedPane, SerializedPaneGroup}, + model::{DockData, SerializedItem, SerializedPane, SerializedPaneGroup}, }, security_modal::SecurityModal, }; @@ -203,6 +209,34 @@ pub trait DebuggerProvider { fn active_thread_state(&self, cx: &App) -> Option; } +/// Opens a file or directory. +#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)] +#[action(namespace = workspace)] +pub struct Open { + /// When true, opens in a new window. When false, adds to the current + /// window as a new workspace (multi-workspace). + #[serde(default = "Open::default_create_new_window")] + pub create_new_window: bool, +} + +impl Open { + pub const DEFAULT: Self = Self { + create_new_window: true, + }; + + /// Used by `#[serde(default)]` on the `create_new_window` field so that + /// the serde default and `Open::DEFAULT` stay in sync. + fn default_create_new_window() -> bool { + Self::DEFAULT.create_new_window + } +} + +impl Default for Open { + fn default() -> Self { + Self::DEFAULT + } +} + actions!( workspace, [ @@ -248,8 +282,6 @@ actions!( NewSearch, /// Opens a new window. NewWindow, - /// Opens a file or directory. - Open, /// Opens multiple files. OpenFiles, /// Opens the current location in terminal. @@ -620,19 +652,19 @@ fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, c .update(cx, |multi_workspace, window, cx| { let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| { - prompt_for_open_path_and_open(workspace, app_state, options, window, cx); + prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx); }); }) .ok(); } else { - let task = Workspace::new_local(Vec::new(), app_state.clone(), None, None, None, cx); + let task = Workspace::new_local(Vec::new(), app_state.clone(), None, None, None, true, cx); cx.spawn(async move |cx| { let (window, _) = task.await?; window.update(cx, |multi_workspace, window, cx| { window.activate_window(); let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| { - prompt_for_open_path_and_open(workspace, app_state, options, window, cx); + prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx); }); })?; anyhow::Ok(()) @@ -645,6 +677,7 @@ pub fn prompt_for_open_path_and_open( workspace: &mut Workspace, app_state: Arc, options: PathPromptOptions, + create_new_window: bool, window: &mut Window, cx: &mut Context, ) { @@ -654,10 +687,24 @@ pub fn prompt_for_open_path_and_open( window, cx, ); + let multi_workspace_handle = window.window_handle().downcast::(); cx.spawn_in(window, async move |this, cx| { let Some(paths) = paths.await.log_err().flatten() else { return; }; + if !create_new_window { + if let Some(handle) = multi_workspace_handle { + if let Some(task) = handle + .update(cx, |multi_workspace, window, cx| { + multi_workspace.open_project(paths, window, cx) + }) + .log_err() + { + task.await.log_err(); + } + return; + } + } if let Some(task) = this .update_in(cx, |this, window, cx| { this.open_workspace_for_paths(false, paths, window, cx) @@ -1182,6 +1229,7 @@ pub enum Event { }, ZoomChanged, ModalOpened, + Activate, } #[derive(Debug, Clone)] @@ -1250,7 +1298,7 @@ pub struct Workspace { last_active_center_pane: Option>, last_active_view_id: Option, status_bar: Entity, - modal_layer: Entity, + pub(crate) modal_layer: Entity, toast_layer: Entity, titlebar_item: Option, notifications: Notifications, @@ -1286,6 +1334,7 @@ pub struct Workspace { scheduled_tasks: Vec>, last_open_dock_positions: Vec, removing: bool, + _panels_task: Option>>, } impl EventEmitter for Workspace {} @@ -1601,36 +1650,7 @@ impl Workspace { .timer(Duration::from_millis(100)) .await; this.update_in(cx, |this, window, cx| { - if let Some(display) = window.display(cx) - && let Ok(display_uuid) = display.uuid() - { - let window_bounds = window.inner_window_bounds(); - let has_paths = !this.root_paths(cx).is_empty(); - if !has_paths { - cx.background_executor() - .spawn(persistence::write_default_window_bounds( - window_bounds, - display_uuid, - )) - .detach_and_log_err(cx); - } - if let Some(database_id) = workspace_id { - cx.background_executor() - .spawn(DB.set_window_open_status( - database_id, - SerializedWindowBounds(window_bounds), - display_uuid, - )) - .detach_and_log_err(cx); - } else { - cx.background_executor() - .spawn(persistence::write_default_window_bounds( - window_bounds, - display_uuid, - )) - .detach_and_log_err(cx); - } - } + this.save_window_bounds(window, cx).detach(); this.bounds_save_task_queued.take(); }) .ok(); @@ -1685,6 +1705,7 @@ impl Workspace { left_dock, bottom_dock, right_dock, + _panels_task: None, project: project.clone(), follower_states: Default::default(), last_leaders_by_pane: Default::default(), @@ -1728,6 +1749,7 @@ impl Workspace { requesting_window: Option>, env: Option>, init: Option) + Send>>, + activate: bool, cx: &mut App, ) -> Task< anyhow::Result<( @@ -1855,7 +1877,11 @@ impl Workspace { workspace }); - multi_workspace.activate(workspace.clone(), cx); + if activate { + multi_workspace.activate(workspace.clone(), cx); + } else { + multi_workspace.add_workspace(workspace.clone(), cx); + } workspace })?; (window, workspace) @@ -2009,6 +2035,76 @@ impl Workspace { [&self.left_dock, &self.bottom_dock, &self.right_dock] } + pub fn capture_dock_state(&self, _window: &Window, cx: &App) -> DockStructure { + let left_dock = self.left_dock.read(cx); + let left_visible = left_dock.is_open(); + let left_active_panel = left_dock + .active_panel() + .map(|panel| panel.persistent_name().to_string()); + // `zoomed_position` is kept in sync with individual panel zoom state + // by the dock code in `Dock::new` and `Dock::add_panel`. + let left_dock_zoom = self.zoomed_position == Some(DockPosition::Left); + + let right_dock = self.right_dock.read(cx); + let right_visible = right_dock.is_open(); + let right_active_panel = right_dock + .active_panel() + .map(|panel| panel.persistent_name().to_string()); + let right_dock_zoom = self.zoomed_position == Some(DockPosition::Right); + + let bottom_dock = self.bottom_dock.read(cx); + let bottom_visible = bottom_dock.is_open(); + let bottom_active_panel = bottom_dock + .active_panel() + .map(|panel| panel.persistent_name().to_string()); + let bottom_dock_zoom = self.zoomed_position == Some(DockPosition::Bottom); + + DockStructure { + left: DockData { + visible: left_visible, + active_panel: left_active_panel, + zoom: left_dock_zoom, + }, + right: DockData { + visible: right_visible, + active_panel: right_active_panel, + zoom: right_dock_zoom, + }, + bottom: DockData { + visible: bottom_visible, + active_panel: bottom_active_panel, + zoom: bottom_dock_zoom, + }, + } + } + + pub fn set_dock_structure( + &self, + docks: DockStructure, + window: &mut Window, + cx: &mut Context, + ) { + for (dock, data) in [ + (&self.left_dock, docks.left), + (&self.bottom_dock, docks.bottom), + (&self.right_dock, docks.right), + ] { + dock.update(cx, |dock, cx| { + dock.serialized_dock = Some(data); + dock.restore_state(window, cx); + }); + } + } + + pub fn open_item_abs_paths(&self, cx: &App) -> Vec { + self.items(cx) + .filter_map(|item| { + let project_path = item.project_path(cx)?; + self.project.read(cx).absolute_path(&project_path, cx) + }) + .collect() + } + pub fn dock_at_position(&self, position: DockPosition) -> &Entity { match position { DockPosition::Left => &self.left_dock, @@ -2068,6 +2164,14 @@ impl Workspace { &self.app_state } + pub fn set_panels_task(&mut self, task: Task>) { + self._panels_task = Some(task); + } + + pub fn take_panels_task(&mut self) -> Option>> { + self._panels_task.take() + } + pub fn user_store(&self) -> &Entity { &self.app_state.user_store } @@ -2573,7 +2677,15 @@ impl Workspace { Task::ready(Ok(callback(self, window, cx))) } else { let env = self.project.read(cx).cli_environment(cx); - let task = Self::new_local(Vec::new(), self.app_state.clone(), None, env, None, cx); + let task = Self::new_local( + Vec::new(), + self.app_state.clone(), + None, + env, + None, + true, + cx, + ); cx.spawn_in(window, async move |_vh, cx| { let (multi_workspace_window, _) = task.await?; multi_workspace_window.update(cx, |multi_workspace, window, cx| { @@ -2603,7 +2715,15 @@ impl Workspace { Task::ready(Ok(callback(self, window, cx))) } else { let env = self.project.read(cx).cli_environment(cx); - let task = Self::new_local(Vec::new(), self.app_state.clone(), None, env, None, cx); + let task = Self::new_local( + Vec::new(), + self.app_state.clone(), + None, + env, + None, + true, + cx, + ); cx.spawn_in(window, async move |_vh, cx| { let (multi_workspace_window, _) = task.await?; multi_workspace_window.update(cx, |multi_workspace, window, cx| { @@ -2658,17 +2778,6 @@ impl Workspace { }); } - pub fn close_window(&mut self, _: &CloseWindow, window: &mut Window, cx: &mut Context) { - let prepare = self.prepare_to_close(CloseIntent::CloseWindow, window, cx); - cx.spawn_in(window, async move |_, cx| { - if prepare.await? { - cx.update(|window, _cx| window.remove_window())?; - } - anyhow::Ok(()) - }) - .detach_and_log_err(cx) - } - pub fn move_focused_panel_to_next_position( &mut self, _: &MoveFocusedPanelToNextPosition, @@ -2746,6 +2855,7 @@ impl Workspace { .unwrap_or(false) { if close_intent == CloseIntent::CloseWindow { + this.update(cx, |_, cx| cx.emit(Event::Activate))?; let answer = cx.update(|window, cx| { window.prompt( PromptLevel::Warning, @@ -2856,13 +2966,15 @@ impl Workspace { .spawn(cx, async move |cx| { // limit to 100 keystrokes to avoid infinite recursion. for _ in 0..100 { - let mut state = keystrokes.borrow_mut(); - let Some(keystroke) = state.queue.pop_front() else { - state.dispatched.clear(); - state.task.take(); - return; + let keystroke = { + let mut state = keystrokes.borrow_mut(); + let Some(keystroke) = state.queue.pop_front() else { + state.dispatched.clear(); + state.task.take(); + return; + }; + keystroke }; - drop(state); cx.update(|window, cx| { let focused = window.focused(cx); window.dispatch_keystroke(keystroke.clone(), cx); @@ -2877,6 +2989,10 @@ impl Workspace { } }) .ok(); + + // Yield between synthetic keystrokes so deferred focus and + // other effects can settle before dispatching the next key. + yield_now().await; } *keystrokes.borrow_mut() = Default::default(); @@ -2934,6 +3050,10 @@ impl Workspace { futures::future::try_join_all(serialize_tasks).await?; + if !remaining_dirty_items.is_empty() { + workspace.update(cx, |_, cx| cx.emit(Event::Activate))?; + } + if remaining_dirty_items.len() > 1 { let answer = workspace.update_in(cx, |_, window, cx| { let detail = Pane::file_names_for_prompt( @@ -4275,14 +4395,7 @@ impl Workspace { .find_pane_in_direction(direction, cx) .unwrap_or_else(|| self.active_pane.clone()); let new_pane = self.add_pane(window, cx); - if self - .center - .split(&split_off_pane, &new_pane, direction, cx) - .log_err() - .is_none() - { - return; - }; + self.center.split(&split_off_pane, &new_pane, direction, cx); new_pane } }; @@ -4465,14 +4578,8 @@ impl Workspace { return; } let new_pane = self.add_pane(window, cx); - if self - .center - .split(&self.active_pane, &new_pane, action.direction, cx) - .log_err() - .is_none() - { - return; - }; + self.center + .split(&self.active_pane, &new_pane, action.direction, cx); new_pane } }; @@ -4770,8 +4877,7 @@ impl Workspace { ) -> Entity { let new_pane = self.add_pane(window, cx); self.center - .split(&pane_to_split, &new_pane, split_direction, cx) - .unwrap(); + .split(&pane_to_split, &new_pane, split_direction, cx); cx.notify(); new_pane } @@ -4790,7 +4896,7 @@ impl Workspace { new_pane.update(cx, |pane, cx| { pane.add_item(item, true, true, None, window, cx) }); - self.center.split(&pane, &new_pane, direction, cx).unwrap(); + self.center.split(&pane, &new_pane, direction, cx); cx.notify(); } @@ -4817,7 +4923,7 @@ impl Workspace { pane.set_nav_history(nav_history, cx); pane.add_item(clone, true, true, None, window, cx) }); - this.center.split(&pane, &new_pane, direction, cx).unwrap(); + this.center.split(&pane, &new_pane, direction, cx); cx.notify(); new_pane }) @@ -5871,6 +5977,40 @@ impl Workspace { self.session_id.clone() } + fn save_window_bounds(&self, window: &mut Window, cx: &mut App) -> Task<()> { + let Some(display) = window.display(cx) else { + return Task::ready(()); + }; + let Ok(display_uuid) = display.uuid() else { + return Task::ready(()); + }; + + let window_bounds = window.inner_window_bounds(); + let database_id = self.database_id; + let has_paths = !self.root_paths(cx).is_empty(); + + cx.background_executor().spawn(async move { + if !has_paths { + persistence::write_default_window_bounds(window_bounds, display_uuid) + .await + .log_err(); + } + if let Some(database_id) = database_id { + DB.set_window_open_status( + database_id, + SerializedWindowBounds(window_bounds), + display_uuid, + ) + .await + .log_err(); + } else { + persistence::write_default_window_bounds(window_bounds, display_uuid) + .await + .log_err(); + } + }) + } + /// Bypass the 200ms serialization throttle and write workspace state to /// the DB immediately. Returns a task the caller can await to ensure the /// write completes. Used by the quit handler so the most recent state @@ -5878,7 +6018,14 @@ impl Workspace { pub fn flush_serialization(&mut self, window: &mut Window, cx: &mut App) -> Task<()> { self._schedule_serialize_workspace.take(); self._serialize_workspace_task.take(); - self.serialize_workspace_internal(window, cx) + self.bounds_save_task_queued.take(); + + let bounds_task = self.save_window_bounds(window, cx); + let serialize_task = self.serialize_workspace_internal(window, cx); + cx.spawn(async move |_| { + bounds_task.await; + serialize_task.await; + }) } pub fn root_paths(&self, cx: &App) -> Vec> { @@ -6010,53 +6157,7 @@ impl Workspace { window: &mut Window, cx: &mut App, ) -> DockStructure { - let left_dock = this.left_dock.read(cx); - let left_visible = left_dock.is_open(); - let left_active_panel = left_dock - .active_panel() - .map(|panel| panel.persistent_name().to_string()); - let left_dock_zoom = left_dock - .active_panel() - .map(|panel| panel.is_zoomed(window, cx)) - .unwrap_or(false); - - let right_dock = this.right_dock.read(cx); - let right_visible = right_dock.is_open(); - let right_active_panel = right_dock - .active_panel() - .map(|panel| panel.persistent_name().to_string()); - let right_dock_zoom = right_dock - .active_panel() - .map(|panel| panel.is_zoomed(window, cx)) - .unwrap_or(false); - - let bottom_dock = this.bottom_dock.read(cx); - let bottom_visible = bottom_dock.is_open(); - let bottom_active_panel = bottom_dock - .active_panel() - .map(|panel| panel.persistent_name().to_string()); - let bottom_dock_zoom = bottom_dock - .active_panel() - .map(|panel| panel.is_zoomed(window, cx)) - .unwrap_or(false); - - DockStructure { - left: DockData { - visible: left_visible, - active_panel: left_active_panel, - zoom: left_dock_zoom, - }, - right: DockData { - visible: right_visible, - active_panel: right_active_panel, - zoom: right_dock_zoom, - }, - bottom: DockData { - visible: bottom_visible, - active_panel: bottom_active_panel, - zoom: bottom_dock_zoom, - }, - } + this.capture_dock_state(window, cx) } match self.workspace_location(cx) { @@ -6340,7 +6441,47 @@ impl Workspace { }) } - fn actions(&self, div: Div, window: &mut Window, cx: &mut Context) -> Div { + pub fn key_context(&self, cx: &App) -> KeyContext { + let mut context = KeyContext::new_with_defaults(); + context.add("Workspace"); + context.set("keyboard_layout", cx.keyboard_layout().name().to_string()); + if let Some(status) = self + .debugger_provider + .as_ref() + .and_then(|provider| provider.active_thread_state(cx)) + { + match status { + ThreadStatus::Running | ThreadStatus::Stepping => { + context.add("debugger_running"); + } + ThreadStatus::Stopped => context.add("debugger_stopped"), + ThreadStatus::Exited | ThreadStatus::Ended => {} + } + } + + if self.left_dock.read(cx).is_open() { + if let Some(active_panel) = self.left_dock.read(cx).active_panel() { + context.set("left_dock", active_panel.panel_key()); + } + } + + if self.right_dock.read(cx).is_open() { + if let Some(active_panel) = self.right_dock.read(cx).active_panel() { + context.set("right_dock", active_panel.panel_key()); + } + } + + if self.bottom_dock.read(cx).is_open() { + if let Some(active_panel) = self.bottom_dock.read(cx).active_panel() { + context.set("bottom_dock", active_panel.panel_key()); + } + } + + context + } + + /// Multiworkspace uses this to add workspace action handling to itself + pub fn actions(&self, div: Div, window: &mut Window, cx: &mut Context) -> Div { self.add_workspace_actions_listeners(div, window, cx) .on_action(cx.listener( |_workspace, action_sequence: &settings::ActionSequence, window, cx| { @@ -6356,7 +6497,6 @@ impl Workspace { .on_action(cx.listener(Self::send_keystrokes)) .on_action(cx.listener(Self::add_folder_to_project)) .on_action(cx.listener(Self::follow_next_collaborator)) - .on_action(cx.listener(Self::close_window)) .on_action(cx.listener(Self::activate_pane_at_index)) .on_action(cx.listener(Self::move_item_to_pane_at_index)) .on_action(cx.listener(Self::move_focused_panel_to_next_position)) @@ -7398,40 +7538,6 @@ impl Render for Workspace { if FIRST_PAINT.swap(false, std::sync::atomic::Ordering::Relaxed) { log::info!("Rendered first frame"); } - let mut context = KeyContext::new_with_defaults(); - context.add("Workspace"); - context.set("keyboard_layout", cx.keyboard_layout().name().to_string()); - if let Some(status) = self - .debugger_provider - .as_ref() - .and_then(|provider| provider.active_thread_state(cx)) - { - match status { - ThreadStatus::Running | ThreadStatus::Stepping => { - context.add("debugger_running"); - } - ThreadStatus::Stopped => context.add("debugger_stopped"), - ThreadStatus::Exited | ThreadStatus::Ended => {} - } - } - - if self.left_dock.read(cx).is_open() { - if let Some(active_panel) = self.left_dock.read(cx).active_panel() { - context.set("left_dock", active_panel.panel_key()); - } - } - - if self.right_dock.read(cx).is_open() { - if let Some(active_panel) = self.right_dock.read(cx).active_panel() { - context.set("right_dock", active_panel.panel_key()); - } - } - - if self.bottom_dock.read(cx).is_open() { - if let Some(active_panel) = self.bottom_dock.read(cx).active_panel() { - context.set("bottom_dock", active_panel.panel_key()); - } - } let centered_layout = self.centered_layout && self.center.panes().len() == 1 @@ -7469,8 +7575,7 @@ impl Render for Workspace { .collect::>(); let bottom_dock_layout = WorkspaceSettings::get_global(cx).bottom_dock_layout; - self.actions(div(), window, cx) - .key_context(context) + div() .relative() .size_full() .flex() @@ -7870,7 +7975,6 @@ impl Render for Workspace { .when(self.status_bar_visible(cx), |parent| { parent.child(self.status_bar.clone()) }) - .child(self.modal_layer.clone()) .child(self.toast_layer.clone()), ) } @@ -8082,6 +8186,7 @@ pub async fn restore_multiworkspace( None, None, None, + true, cx, ) }) @@ -8111,6 +8216,7 @@ pub async fn restore_multiworkspace( Some(window_handle), None, None, + true, cx, ) }) @@ -8380,6 +8486,7 @@ pub fn join_channel( requesting_window, None, None, + true, cx, ) }) @@ -8452,7 +8559,7 @@ pub async fn get_any_active_multi_workspace( // find an existing workspace to focus and show call controls let active_window = activate_any_workspace_window(&mut cx); if active_window.is_none() { - cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), None, None, None, cx)) + cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), None, None, None, true, cx)) .await?; } activate_any_workspace_window(&mut cx).context("could not open zed") @@ -8840,6 +8947,7 @@ pub fn open_paths( open_options.replace_window, open_options.env, None, + true, cx, ) }) @@ -8903,6 +9011,7 @@ pub fn open_new( open_options.replace_window, open_options.env, Some(Box::new(init)), + true, cx, ); cx.spawn(async move |cx| { @@ -10054,6 +10163,87 @@ mod tests { assert!(!task.await.unwrap()); } + #[gpui::test] + async fn test_multi_workspace_close_window_multiple_workspaces_cancel(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root", json!({ "one": "" })).await; + + let project_a = Project::test(fs.clone(), ["root".as_ref()], cx).await; + let project_b = Project::test(fs, ["root".as_ref()], cx).await; + let multi_workspace_handle = + cx.add_window(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + cx.run_until_parked(); + + let workspace_a = multi_workspace_handle + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + + let workspace_b = multi_workspace_handle + .update(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx) + }) + .unwrap(); + + // Activate workspace A + multi_workspace_handle + .update(cx, |mw, window, cx| { + mw.activate_index(0, window, cx); + }) + .unwrap(); + + let cx = &mut VisualTestContext::from_window(multi_workspace_handle.into(), cx); + + // Workspace A has a clean item + let item_a = cx.new(TestItem::new); + workspace_a.update_in(cx, |w, window, cx| { + w.add_item_to_active_pane(Box::new(item_a.clone()), None, true, window, cx) + }); + + // Workspace B has a dirty item + let item_b = cx.new(|cx| TestItem::new(cx).with_dirty(true)); + workspace_b.update_in(cx, |w, window, cx| { + w.add_item_to_active_pane(Box::new(item_b.clone()), None, true, window, cx) + }); + + // Verify workspace A is active + multi_workspace_handle + .read_with(cx, |mw, _| { + assert_eq!(mw.active_workspace_index(), 0); + }) + .unwrap(); + + // Dispatch CloseWindow — workspace A will pass, workspace B will prompt + multi_workspace_handle + .update(cx, |mw, window, cx| { + mw.close_window(&CloseWindow, window, cx); + }) + .unwrap(); + cx.run_until_parked(); + + // Workspace B should now be active since it has dirty items that need attention + multi_workspace_handle + .read_with(cx, |mw, _| { + assert_eq!( + mw.active_workspace_index(), + 1, + "workspace B should be activated when it prompts" + ); + }) + .unwrap(); + + // User cancels the save prompt from workspace B + cx.simulate_prompt_answer("Cancel"); + cx.run_until_parked(); + + // Window should still exist because workspace B's close was cancelled + assert!( + multi_workspace_handle.update(cx, |_, _, _| ()).is_ok(), + "window should still exist after cancelling one workspace's close" + ); + } + #[gpui::test] async fn test_close_window_with_serializable_items(cx: &mut TestAppContext) { init_test(cx); @@ -10502,6 +10692,85 @@ mod tests { item.read_with(cx, |item, _| assert_eq!(item.save_count, 6)); } + #[gpui::test] + async fn test_autosave_on_focus_change_in_multibuffer(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + // Create a multibuffer-like item with two child focus handles, + // simulating individual buffer editors within a multibuffer. + let item = cx.new(|cx| { + TestItem::new(cx) + .with_project_items(&[TestProjectItem::new(1, "1.txt", cx)]) + .with_child_focus_handles(2, cx) + }); + workspace.update_in(cx, |workspace, window, cx| { + workspace.add_item_to_active_pane(Box::new(item.clone()), None, true, window, cx); + }); + + // Set autosave to OnFocusChange and focus the first child handle, + // simulating the user's cursor being inside one of the multibuffer's excerpts. + item.update_in(cx, |item, window, cx| { + SettingsStore::update_global(cx, |settings, cx| { + settings.update_user_settings(cx, |settings| { + settings.workspace.autosave = Some(AutosaveSetting::OnFocusChange); + }) + }); + item.is_dirty = true; + window.focus(&item.child_focus_handles[0], cx); + }); + cx.executor().run_until_parked(); + item.read_with(cx, |item, _| assert_eq!(item.save_count, 0)); + + // Moving focus from one child to another within the same item should + // NOT trigger autosave — focus is still within the item's focus hierarchy. + item.update_in(cx, |item, window, cx| { + window.focus(&item.child_focus_handles[1], cx); + }); + cx.executor().run_until_parked(); + item.read_with(cx, |item, _| { + assert_eq!( + item.save_count, 0, + "Switching focus between children within the same item should not autosave" + ); + }); + + // Blurring the item saves the file. This is the core regression scenario: + // with `on_blur`, this would NOT trigger because `on_blur` only fires when + // the item's own focus handle is the leaf that lost focus. In a multibuffer, + // the leaf is always a child focus handle, so `on_blur` never detected + // focus leaving the item. + item.update_in(cx, |_, window, _| window.blur()); + cx.executor().run_until_parked(); + item.read_with(cx, |item, _| { + assert_eq!( + item.save_count, 1, + "Blurring should trigger autosave when focus was on a child of the item" + ); + }); + + // Deactivating the window should also trigger autosave when a child of + // the multibuffer item currently owns focus. + item.update_in(cx, |item, window, cx| { + item.is_dirty = true; + window.focus(&item.child_focus_handles[0], cx); + }); + cx.executor().run_until_parked(); + item.read_with(cx, |item, _| assert_eq!(item.save_count, 1)); + + cx.deactivate_window(); + item.read_with(cx, |item, _| { + assert_eq!( + item.save_count, 2, + "Deactivating window should trigger autosave when focus was on a child" + ); + }); + } + #[gpui::test] async fn test_pane_navigation(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -10564,8 +10833,9 @@ mod tests { init_test(cx); let fs = FakeFs::new(cx.executor()); let project = Project::test(fs, [], cx).await; - let (workspace, cx) = - cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); workspace.update_in(cx, |workspace, window, cx| { let first_item = cx.new(|cx| { @@ -11059,8 +11329,9 @@ mod tests { init_test(cx); let fs = FakeFs::new(cx.executor()); let project = Project::test(fs, [], cx).await; - let (workspace, cx) = - cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); // Open two docks (left and right) with one panel each let (left_panel, right_panel) = workspace.update_in(cx, |workspace, window, cx| { @@ -11491,8 +11762,9 @@ mod tests { let fs = FakeFs::new(cx.executor()); let project = Project::test(fs, [], cx).await; - let (workspace, cx) = - cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let (panel_1, panel_2) = workspace.update_in(cx, |workspace, window, cx| { let panel_1 = cx.new(|cx| TestPanel::new(DockPosition::Left, 100, cx)); @@ -12399,8 +12671,9 @@ mod tests { init_test(cx); let fs = FakeFs::new(cx.executor()); let project = Project::test(fs, [], cx).await; - let (workspace, cx) = - cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); // Add a new panel to the right dock, opening the dock and setting the // focus to the new panel. @@ -13089,8 +13362,9 @@ mod tests { let fs = FakeFs::new(cx.executor()); let project = Project::test(fs, [], cx).await; - let (workspace, cx) = - cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let panel = workspace.update_in(cx, |workspace, window, cx| { let panel = cx.new(|cx| TestPanel::new(DockPosition::Right, 100, cx)); workspace.add_panel(panel.clone(), window, cx); @@ -13154,6 +13428,7 @@ mod tests { let multi_workspace_handle = cx.add_window(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + cx.run_until_parked(); let workspace_a = multi_workspace_handle .read_with(cx, |mw, _| mw.workspace().clone()) diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 69b0be24e7ffb09d3fe759ec0bd3d54b54db21d3..9e62beb3c375fb8d580be02382091cafe04d31e2 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -2945,7 +2945,7 @@ impl BackgroundScannerState { self.snapshot.check_invariants(false); } - fn remove_path(&mut self, path: &RelPath) { + fn remove_path(&mut self, path: &RelPath, watcher: &dyn Watcher) { log::trace!("background scanner removing path {path:?}"); let mut new_entries; let removed_entries; @@ -2961,7 +2961,12 @@ impl BackgroundScannerState { self.snapshot.entries_by_path = new_entries; let mut removed_ids = Vec::with_capacity(removed_entries.summary().count); + let mut removed_dir_abs_paths = Vec::new(); for entry in removed_entries.cursor::<()>(()) { + if entry.is_dir() { + removed_dir_abs_paths.push(self.snapshot.absolutize(&entry.path)); + } + match self.removed_entries.entry(entry.inode) { hash_map::Entry::Occupied(mut e) => { let prev_removed_entry = e.get_mut(); @@ -2997,6 +3002,10 @@ impl BackgroundScannerState { .git_repositories .retain(|id, _| removed_ids.binary_search(id).is_err()); + for removed_dir_abs_path in removed_dir_abs_paths { + watcher.remove(&removed_dir_abs_path).log_err(); + } + #[cfg(feature = "test-support")] self.snapshot.check_invariants(false); } @@ -4461,7 +4470,10 @@ impl BackgroundScanner { if self.settings.is_path_excluded(&child_path) { log::debug!("skipping excluded child entry {child_path:?}"); - self.state.lock().await.remove_path(&child_path); + self.state + .lock() + .await + .remove_path(&child_path, self.watcher.as_ref()); continue; } @@ -4651,7 +4663,7 @@ impl BackgroundScanner { // detected regardless of the order of the paths. for (path, metadata) in relative_paths.iter().zip(metadata.iter()) { if matches!(metadata, Ok(None)) || doing_recursive_update { - state.remove_path(path); + state.remove_path(path, self.watcher.as_ref()); } } diff --git a/crates/x_ai/src/x_ai.rs b/crates/x_ai/src/x_ai.rs index 072a893a6a8f4fc7fbc8a6f4f5ed43316915b974..1abb2b53771fa1e29e2979560e9f394744b26158 100644 --- a/crates/x_ai/src/x_ai.rs +++ b/crates/x_ai/src/x_ai.rs @@ -165,6 +165,18 @@ impl Model { } } + pub fn requires_json_schema_subset(&self) -> bool { + match self { + Self::Grok4 + | Self::Grok4FastReasoning + | Self::Grok4FastNonReasoning + | Self::Grok41FastNonReasoning + | Self::Grok41FastReasoning + | Self::GrokCodeFast1 => true, + _ => false, + } + } + pub fn supports_prompt_cache_key(&self) -> bool { false } diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 856c9b9dab4884773ec7d53dd210e81bbc4bedbf..5bec10439f75a4d3188ef977cf5f3e4c4733d8c6 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.226.0" +version = "0.228.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] @@ -17,7 +17,6 @@ test-support = [ "gpui/test-support", "gpui_platform/screen-capture", "dep:image", - "dep:semver", "workspace/test-support", "project/test-support", "editor/test-support", @@ -32,7 +31,6 @@ visual-tests = [ "gpui_platform/screen-capture", "gpui_platform/test-support", "dep:image", - "dep:semver", "dep:tempfile", "dep:action_log", "dep:agent_servers", @@ -50,7 +48,6 @@ visual-tests = [ "language_model/test-support", "fs/test-support", "recent_projects/test-support", - "sidebar/test-support", "title_bar/test-support", ] @@ -76,7 +73,6 @@ assets.workspace = true audio.workspace = true auto_update.workspace = true auto_update_ui.workspace = true -bincode.workspace = true breadcrumbs.workspace = true call.workspace = true chrono.workspace = true @@ -94,6 +90,7 @@ copilot.workspace = true copilot_chat.workspace = true copilot_ui.workspace = true crashes.workspace = true +csv_preview.workspace = true dap_adapters.workspace = true db.workspace = true debug_adapter_extension.workspace = true @@ -118,14 +115,10 @@ git_hosting_providers.workspace = true git_ui.workspace = true go_to_line.workspace = true system_specs.workspace = true -gpui = { workspace = true, features = [ - "wayland", - "windows-manifest", - "x11", -] } +gpui.workspace = true gpui_platform = {workspace = true, features=["screen-capture", "font-kit", "wayland", "x11"]} image = { workspace = true, optional = true } -semver = { workspace = true, optional = true } +semver.workspace = true tempfile = { workspace = true, optional = true } clock = { workspace = true, optional = true } acp_thread.workspace = true @@ -193,7 +186,6 @@ sidebar.workspace = true smol.workspace = true snippet_provider.workspace = true snippets_ui.workspace = true -supermaven.workspace = true svg_preview.workspace = true sysinfo.workspace = true tab_switcher.workspace = true @@ -232,13 +224,23 @@ zlog_settings.workspace = true [target.'cfg(target_os = "windows")'.dependencies] etw_tracing.workspace = true windows.workspace = true +gpui = { workspace = true, features = [ + "windows-manifest", +] } [target.'cfg(target_os = "windows")'.build-dependencies] winresource = "0.1" [target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies] +gpui = { workspace = true, features = [ + "wayland", + "x11", +] } ashpd.workspace = true +[target.'cfg(target_os = "linux")'.build-dependencies] +pkg-config = "0.3.22" + [dev-dependencies] call = { workspace = true, features = ["test-support"] } dap = { workspace = true, features = ["test-support"] } diff --git a/crates/zed/build.rs b/crates/zed/build.rs index 7e22752d35d9115efd735bfc7b2690f4bf5680d3..e169760acf16d6caa44aeb2004cd823a355f36ee 100644 --- a/crates/zed/build.rs +++ b/crates/zed/build.rs @@ -2,6 +2,25 @@ use std::process::Command; fn main() { + #[cfg(target_os = "linux")] + { + // Add rpaths for libraries that webrtc-sys dlopens at runtime. + // This is mostly required for hosts with non-standard SO installation + // locations such as NixOS. + let dlopened_libs = ["libva", "libva-drm"]; + + let mut rpath_dirs = std::collections::BTreeSet::new(); + for lib in &dlopened_libs { + if let Some(libdir) = pkg_config::get_variable(lib, "libdir").ok() { + rpath_dirs.insert(libdir); + } + } + + for dir in &rpath_dirs { + println!("cargo:rustc-link-arg=-Wl,-rpath,{dir}"); + } + } + if cfg!(target_os = "macos") { println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.15.7"); diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 95ff6f03b1b7902e254c5e405c5d8b50e1f48773..0d50339f6c9d42ffa653e5c7565ae6e22441bdca 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -335,7 +335,13 @@ fn main() { crashes::init( InitCrashHandler { session_id, - zed_version: app_version.to_string(), + // strip the build and channel information from the version string, we send them separately + zed_version: semver::Version::new( + app_version.major, + app_version.minor, + app_version.patch, + ) + .to_string(), binary: "zed".to_string(), release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), commit_sha: app_commit_sha @@ -573,6 +579,19 @@ fn main() { session.id().to_owned(), cx, ); + cx.subscribe(&user_store, { + let telemetry = telemetry.clone(); + move |_, evt: &client::user::Event, _| match evt { + client::user::Event::PrivateUserInfoUpdated => { + crashes::set_user_info(crashes::UserInfo { + metrics_id: telemetry.metrics_id().map(|s| s.to_string()), + is_staff: telemetry.is_staff(), + }); + } + _ => {} + } + }) + .detach(); // We should rename these in the future to `first app open`, `first app open for release channel`, and `app open` if let (Some(system_id), Some(installation_id)) = (&system_id, &installation_id) { @@ -638,7 +657,6 @@ fn main() { ); copilot_ui::init(&app_state, cx); - supermaven::init(app_state.client.clone(), cx); language_model::init(app_state.client.clone(), cx); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); acp_tools::init(cx); @@ -646,7 +664,7 @@ fn main() { zed::remote_debug::init(cx); edit_prediction_ui::init(cx); web_search::init(cx); - web_search_providers::init(app_state.client.clone(), cx); + web_search_providers::init(app_state.client.clone(), app_state.user_store.clone(), cx); snippet_provider::init(cx); edit_prediction_registry::init(app_state.client.clone(), app_state.user_store.clone(), cx); let prompt_builder = PromptBuilder::load(app_state.fs.clone(), stdout_is_a_pty(), cx); @@ -716,6 +734,7 @@ fn main() { git_graph::init(cx); feedback::init(cx); markdown_preview::init(cx); + csv_preview::init(cx); svg_preview::init(cx); onboarding::init(cx); settings_ui::init(cx); @@ -951,7 +970,12 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut thread_store .update(&mut cx.clone(), |store, cx| { - store.save_thread(save_session_id.clone(), db_thread, cx) + store.save_thread( + save_session_id.clone(), + db_thread, + Default::default(), + cx, + ) }) .await?; @@ -1393,7 +1417,7 @@ pub(crate) async fn restore_or_create_workspace( .update(cx, |multi_workspace, _, cx| { multi_workspace.workspace().update(cx, |workspace, cx| { workspace.show_toast( - Toast::new(NotificationId::unique::<()>(), message), + Toast::new(NotificationId::unique::<()>(), message.clone()), cx, ) }); @@ -1405,11 +1429,23 @@ pub(crate) async fn restore_or_create_workspace( }); // If we couldn't show a toast (no windows opened successfully), - // we've already logged the errors above, so the user can check logs + // open a fallback empty workspace and show the error there if !toast_shown { - log::error!( - "Failed to show notification for window restoration errors, because no workspace windows were available." - ); + log::error!("All workspace restorations failed. Opening fallback empty workspace."); + cx.update(|cx| { + workspace::open_new( + Default::default(), + app_state.clone(), + cx, + |workspace, _window, cx| { + workspace.show_toast( + Toast::new(NotificationId::unique::<()>(), message), + cx, + ); + }, + ) + }) + .await?; } } } else if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) { diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 84b52452919942b506c924743a6749d5af5c162a..2f284027929b19e5b0d5ac084267cf5548cda667 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -144,7 +144,7 @@ fn cleanup_old_hang_traces() { entry .path() .extension() - .is_some_and(|ext| ext == "miniprof") + .is_some_and(|ext| ext == "json" || ext == "miniprof") }) .collect(); @@ -175,7 +175,7 @@ fn save_hang_trace( .collect::>(); let trace_path = paths::hang_traces_dir().join(&format!( - "hang-{}.miniprof", + "hang-{}.miniprof.json", hang_time.format("%Y-%m-%d_%H-%M-%S") )); @@ -193,7 +193,7 @@ fn save_hang_trace( entry .path() .extension() - .is_some_and(|ext| ext == "miniprof") + .is_some_and(|ext| ext == "json" || ext == "miniprof") }) .collect(); @@ -288,16 +288,23 @@ async fn upload_minidump( form = form.text("minidump_error", minidump_error); } - if let Some(id) = client.telemetry().metrics_id() { - form = form.text("sentry[user][id]", id.to_string()); + if let Some(is_staff) = &metadata + .user_info + .as_ref() + .and_then(|user_info| user_info.is_staff) + { form = form.text( "sentry[user][is_staff]", - if client.telemetry().is_staff().unwrap_or_default() { - "true" - } else { - "false" - }, + if *is_staff { "true" } else { "false" }, ); + } + + if let Some(metrics_id) = metadata + .user_info + .as_ref() + .and_then(|user_info| user_info.metrics_id.as_ref()) + { + form = form.text("sentry[user][id]", metrics_id.clone()); } else if let Some(id) = client.telemetry().installation_id() { form = form.text("sentry[user][id]", format!("installation-{}", id)) } @@ -397,7 +404,7 @@ struct BuildTiming { duration_ms: f32, first_crate: String, target: String, - lock_wait_ms: f32, + blocked_ms: f32, command: String, } @@ -452,7 +459,7 @@ async fn upload_build_timings(_client: Arc) -> Result<()> { duration_ms = timing.duration_ms, first_crate = timing.first_crate, target = timing.target, - lock_wait_ms = timing.lock_wait_ms, + blocked_ms = timing.blocked_ms, command = timing.command, cpu_count = cpu_count, ram_size_gb = ram_size_gb diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index 6b1aca5939bf9a7874dd7a590d64a133f96a9dea..57d2f4462b959ebe31abd3a3ecec298977e0a877 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -42,6 +42,55 @@ fn main() { std::process::exit(1); } +#[cfg(target_os = "macos")] +fn main() { + // Set ZED_STATELESS early to prevent file system access to real config directories + // This must be done before any code accesses zed_env_vars::ZED_STATELESS + // SAFETY: We're at the start of main(), before any threads are spawned + unsafe { + std::env::set_var("ZED_STATELESS", "1"); + } + + env_logger::builder() + .filter_level(log::LevelFilter::Info) + .init(); + + let update_baseline = std::env::var("UPDATE_BASELINE").is_ok(); + + // Create a temporary directory for test files + // Canonicalize the path to resolve symlinks (on macOS, /var -> /private/var) + // which prevents "path does not exist" errors during worktree scanning + // Use keep() to prevent auto-cleanup - background worktree tasks may still be running + // when tests complete, so we let the OS clean up temp directories on process exit + let temp_dir = tempfile::tempdir().expect("Failed to create temp directory"); + let temp_path = temp_dir.keep(); + let canonical_temp = temp_path + .canonicalize() + .expect("Failed to canonicalize temp directory"); + let project_path = canonical_temp.join("project"); + std::fs::create_dir_all(&project_path).expect("Failed to create project directory"); + + // Create test files in the real filesystem + create_test_files(&project_path); + + let test_result = std::panic::catch_unwind(|| run_visual_tests(project_path, update_baseline)); + + // Note: We don't delete temp_path here because background worktree tasks may still + // be running. The directory will be cleaned up when the process exits or by the OS. + + match test_result { + Ok(Ok(())) => {} + Ok(Err(e)) => { + eprintln!("Visual tests failed: {}", e); + std::process::exit(1); + } + Err(_) => { + eprintln!("Visual tests panicked"); + std::process::exit(1); + } + } +} + // All macOS-specific imports grouped together #[cfg(target_os = "macos")] use { @@ -50,7 +99,6 @@ use { agent_servers::{AgentServer, AgentServerDelegate}, anyhow::{Context as _, Result}, assets::Assets, - chrono::{Duration as ChronoDuration, Utc}, editor::display_map::DisplayRow, feature_flags::FeatureFlagAppExt as _, git_ui::project_diff::ProjectDiff, @@ -60,7 +108,6 @@ use { }, image::RgbaImage, project_panel::ProjectPanel, - recent_projects::RecentProjectEntry, settings::{NotifyWhenAgentWaiting, Settings as _}, settings_ui::SettingsWindow, std::{ @@ -71,7 +118,7 @@ use { time::Duration, }, util::ResultExt as _, - workspace::{AppState, MultiWorkspace, Workspace, WorkspaceId}, + workspace::{AppState, MultiWorkspace, Panel as _, Workspace}, zed_actions::OpenSettingsAt, }; @@ -97,55 +144,6 @@ mod constants { #[cfg(target_os = "macos")] use constants::*; -#[cfg(target_os = "macos")] -fn main() { - // Set ZED_STATELESS early to prevent file system access to real config directories - // This must be done before any code accesses zed_env_vars::ZED_STATELESS - // SAFETY: We're at the start of main(), before any threads are spawned - unsafe { - std::env::set_var("ZED_STATELESS", "1"); - } - - env_logger::builder() - .filter_level(log::LevelFilter::Info) - .init(); - - let update_baseline = std::env::var("UPDATE_BASELINE").is_ok(); - - // Create a temporary directory for test files - // Canonicalize the path to resolve symlinks (on macOS, /var -> /private/var) - // which prevents "path does not exist" errors during worktree scanning - // Use keep() to prevent auto-cleanup - background worktree tasks may still be running - // when tests complete, so we let the OS clean up temp directories on process exit - let temp_dir = tempfile::tempdir().expect("Failed to create temp directory"); - let temp_path = temp_dir.keep(); - let canonical_temp = temp_path - .canonicalize() - .expect("Failed to canonicalize temp directory"); - let project_path = canonical_temp.join("project"); - std::fs::create_dir_all(&project_path).expect("Failed to create project directory"); - - // Create test files in the real filesystem - create_test_files(&project_path); - - let test_result = std::panic::catch_unwind(|| run_visual_tests(project_path, update_baseline)); - - // Note: We don't delete temp_path here because background worktree tasks may still - // be running. The directory will be cleaned up when the process exits or by the OS. - - match test_result { - Ok(Ok(())) => {} - Ok(Err(e)) => { - eprintln!("Visual tests failed: {}", e); - std::process::exit(1); - } - Err(_) => { - eprintln!("Visual tests panicked"); - std::process::exit(1); - } - } -} - #[cfg(target_os = "macos")] fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> { // Create the visual test context with deterministic task scheduling @@ -548,6 +546,27 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> } } + // Run Test 11: Thread target selector visual tests + #[cfg(feature = "visual-tests")] + { + println!("\n--- Test 11: start_thread_in_selector (6 variants) ---"); + match run_start_thread_in_selector_visual_tests(app_state.clone(), &mut cx, update_baseline) + { + Ok(TestResult::Passed) => { + println!("✓ start_thread_in_selector: PASSED"); + passed += 1; + } + Ok(TestResult::BaselineUpdated(_)) => { + println!("✓ start_thread_in_selector: Baselines updated"); + updated += 1; + } + Err(e) => { + eprintln!("✗ start_thread_in_selector: FAILED - {}", e); + failed += 1; + } + } + } + // Run Test 9: Tool Permissions Settings UI visual test println!("\n--- Test 9: tool_permissions_settings ---"); match run_tool_permissions_visual_tests(app_state.clone(), &mut cx, update_baseline) { @@ -1945,11 +1964,10 @@ impl AgentServer for StubAgentServer { fn connect( &self, - _root_dir: Option<&Path>, _delegate: AgentServerDelegate, _cx: &mut App, - ) -> gpui::Task, Option)>> { - gpui::Task::ready(Ok((Rc::new(self.connection.clone()), None))) + ) -> gpui::Task>> { + gpui::Task::ready(Ok(Rc::new(self.connection.clone()))) } fn into_any(self: Rc) -> Rc { @@ -1963,7 +1981,7 @@ fn run_agent_thread_view_test( cx: &mut VisualTestAppContext, update_baseline: bool, ) -> Result { - use agent::AgentTool; + use agent::{AgentTool, ToolInput}; use agent_ui::AgentPanel; // Create a temporary directory with the test image @@ -2012,32 +2030,9 @@ fn run_agent_thread_view_test( // Create the necessary entities for the ReadFileTool let action_log = cx.update(|cx| cx.new(|_| action_log::ActionLog::new(project.clone()))); - let context_server_registry = cx.update(|cx| { - cx.new(|cx| agent::ContextServerRegistry::new(project.read(cx).context_server_store(), cx)) - }); - let fake_model = Arc::new(language_model::fake_provider::FakeLanguageModel::default()); - let project_context = cx.update(|cx| cx.new(|_| prompt_store::ProjectContext::default())); - - // Create the agent Thread - let thread = cx.update(|cx| { - cx.new(|cx| { - agent::Thread::new( - project.clone(), - project_context, - context_server_registry, - agent::Templates::new(), - Some(fake_model), - cx, - ) - }) - }); // Create the ReadFileTool - let tool = Arc::new(agent::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let tool = Arc::new(agent::ReadFileTool::new(project.clone(), action_log, true)); // Create a test event stream to capture tool output let (event_stream, mut event_receiver) = agent::ToolCallEventStream::test(); @@ -2048,7 +2043,10 @@ fn run_agent_thread_view_test( start_line: None, end_line: None, }; - let run_task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + let run_task = cx.update(|cx| { + tool.clone() + .run(ToolInput::resolved(input), event_stream, cx) + }); cx.background_executor.allow_parking(); let run_result = cx.foreground_executor.block_test(run_task); @@ -2528,16 +2526,6 @@ fn run_multi_workspace_sidebar_visual_tests( std::fs::create_dir_all(&workspace1_dir)?; std::fs::create_dir_all(&workspace2_dir)?; - // Create directories for recent projects (they must exist on disk for display) - let recent1_dir = canonical_temp.join("tiny-project"); - let recent2_dir = canonical_temp.join("font-kit"); - let recent3_dir = canonical_temp.join("ideas"); - let recent4_dir = canonical_temp.join("tmp"); - std::fs::create_dir_all(&recent1_dir)?; - std::fs::create_dir_all(&recent2_dir)?; - std::fs::create_dir_all(&recent3_dir)?; - std::fs::create_dir_all(&recent4_dir)?; - // Enable the agent-v2 feature flag so multi-workspace is active cx.update(|cx| { cx.update_flags(true, vec!["agent-v2".to_string()]); @@ -2677,83 +2665,78 @@ fn run_multi_workspace_sidebar_visual_tests( cx.run_until_parked(); - // Inject recent project entries into the sidebar. - // We update the sidebar entity directly (not through the MultiWorkspace window update) - // to avoid a re-entrant read panic: rebuild_entries reads MultiWorkspace, so we can't - // be inside a MultiWorkspace update when that happens. - cx.update(|cx| { - sidebar.update(cx, |sidebar, cx| { - let now = Utc::now(); - let today_timestamp = now; - let yesterday_timestamp = now - ChronoDuration::days(1); - let past_week_timestamp = now - ChronoDuration::days(10); - let all_timestamp = now - ChronoDuration::days(60); - - let recent_projects = vec![ - RecentProjectEntry { - name: "tiny-project".into(), - full_path: recent1_dir.to_string_lossy().to_string().into(), - paths: vec![recent1_dir.clone()], - workspace_id: WorkspaceId::default(), - timestamp: today_timestamp, - }, - RecentProjectEntry { - name: "font-kit".into(), - full_path: recent2_dir.to_string_lossy().to_string().into(), - paths: vec![recent2_dir.clone()], - workspace_id: WorkspaceId::default(), - timestamp: yesterday_timestamp, - }, - RecentProjectEntry { - name: "ideas".into(), - full_path: recent3_dir.to_string_lossy().to_string().into(), - paths: vec![recent3_dir.clone()], - workspace_id: WorkspaceId::default(), - timestamp: past_week_timestamp, - }, - RecentProjectEntry { - name: "tmp".into(), - full_path: recent4_dir.to_string_lossy().to_string().into(), - paths: vec![recent4_dir.clone()], - workspace_id: WorkspaceId::default(), - timestamp: all_timestamp, - }, - ]; - sidebar.set_test_recent_projects(recent_projects, cx); - }); - }); - - // Set thread info directly on the sidebar for visual testing - cx.update(|cx| { - sidebar.update(cx, |sidebar, _cx| { - sidebar.set_test_thread_info( - 0, - "Refine thread view scrolling behavior".into(), - ui::AgentThreadStatus::Completed, - ); - sidebar.set_test_thread_info( - 1, - "Add line numbers option to FileEditBlock".into(), - ui::AgentThreadStatus::Running, - ); - }); - }); + // Save test threads to the ThreadStore for each workspace + let save_tasks = multi_workspace_window + .update(cx, |multi_workspace, _window, cx| { + let thread_store = agent::ThreadStore::global(cx); + let workspaces = multi_workspace.workspaces().to_vec(); + let mut tasks = Vec::new(); + + for (index, workspace) in workspaces.iter().enumerate() { + let workspace_ref = workspace.read(cx); + let mut paths = Vec::new(); + for worktree in workspace_ref.worktrees(cx) { + let worktree_ref = worktree.read(cx); + if worktree_ref.is_visible() { + paths.push(worktree_ref.abs_path().to_path_buf()); + } + } + let path_list = util::path_list::PathList::new(&paths); + + let (session_id, title, updated_at) = match index { + 0 => ( + "visual-test-thread-0", + "Refine thread view scrolling behavior", + chrono::TimeZone::with_ymd_and_hms(&chrono::Utc, 2024, 6, 15, 10, 30, 0) + .unwrap(), + ), + 1 => ( + "visual-test-thread-1", + "Add line numbers option to FileEditBlock", + chrono::TimeZone::with_ymd_and_hms(&chrono::Utc, 2024, 6, 15, 11, 0, 0) + .unwrap(), + ), + _ => continue, + }; + + let task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(session_id)), + agent::DbThread { + title: title.to_string().into(), + messages: Vec::new(), + updated_at, + detailed_summary: None, + initial_project_snapshot: None, + cumulative_token_usage: Default::default(), + request_token_usage: Default::default(), + model: None, + profile: None, + imported: false, + subagent_context: None, + speed: None, + thinking_enabled: false, + thinking_effort: None, + ui_scroll_position: None, + draft_prompt: None, + }, + path_list, + cx, + ) + }); + tasks.push(task); + } + tasks + }) + .context("Failed to create test threads")?; - // Set last-worked-on thread titles on some recent projects for visual testing - cx.update(|cx| { - sidebar.update(cx, |sidebar, cx| { - sidebar.set_test_recent_project_thread_title( - recent1_dir.to_string_lossy().to_string().into(), - "Fix flaky test in CI pipeline".into(), - cx, - ); - sidebar.set_test_recent_project_thread_title( - recent2_dir.to_string_lossy().to_string().into(), - "Upgrade font rendering engine".into(), - cx, - ); - }); - }); + cx.background_executor.allow_parking(); + for task in save_tasks { + cx.foreground_executor + .block_test(task) + .context("Failed to save test thread")?; + } + cx.background_executor.forbid_parking(); cx.run_until_parked(); @@ -2909,12 +2892,12 @@ impl gpui::Render for ThreadItemIconDecorationsTestView { container() .child(ThreadItem::new("ti-none", "Default idle thread").timestamp("1:00 AM")), ) - .child(section_label("Blue dot (generation done)")) + .child(section_label("Blue dot (notified)")) .child( container().child( ThreadItem::new("ti-done", "Generation completed successfully") .timestamp("1:05 AM") - .generation_done(true), + .notified(true), ), ) .child(section_label("Yellow triangle (waiting for confirmation)")) @@ -2939,18 +2922,17 @@ impl gpui::Render for ThreadItemIconDecorationsTestView { ThreadItem::new("ti-running", "Generating response...") .icon(IconName::AiClaude) .timestamp("1:20 AM") - .running(true), + .status(ui::AgentThreadStatus::Running), ), ) .child(section_label( - "Spinner + yellow triangle (running + waiting)", + "Spinner + yellow triangle (waiting for confirmation)", )) .child( container().child( ThreadItem::new("ti-running-waiting", "Running but needs confirmation") .icon(IconName::AiClaude) .timestamp("1:25 AM") - .running(true) .status(ui::AgentThreadStatus::WaitingForConfirmation), ), ) @@ -3064,3 +3046,626 @@ fn run_error_wrapping_visual_tests( Ok(test_result) } + +#[cfg(all(target_os = "macos", feature = "visual-tests"))] +/// Runs a git command in the given directory and returns an error with +/// stderr/stdout context if the command fails (non-zero exit status). +fn run_git_command(args: &[&str], dir: &std::path::Path) -> Result<()> { + let output = std::process::Command::new("git") + .args(args) + .current_dir(dir) + .output() + .with_context(|| format!("failed to spawn `git {}`", args.join(" ")))?; + + if !output.status.success() { + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + anyhow::bail!( + "`git {}` failed (exit {})\nstdout: {}\nstderr: {}", + args.join(" "), + output.status, + stdout.trim(), + stderr.trim(), + ); + } + Ok(()) +} + +#[cfg(all(target_os = "macos", feature = "visual-tests"))] +fn run_start_thread_in_selector_visual_tests( + app_state: Arc, + cx: &mut VisualTestAppContext, + update_baseline: bool, +) -> Result { + use agent_ui::{AgentPanel, StartThreadIn, WorktreeCreationStatus}; + + // Enable feature flags so the thread target selector renders + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + }); + + // Create a temp directory with a real git repo so "New Worktree" is enabled + let temp_dir = tempfile::tempdir()?; + let temp_path = temp_dir.keep(); + let canonical_temp = temp_path.canonicalize()?; + let project_path = canonical_temp.join("project"); + std::fs::create_dir_all(&project_path)?; + + // Initialize git repo + run_git_command(&["init"], &project_path)?; + run_git_command(&["config", "user.email", "test@test.com"], &project_path)?; + run_git_command(&["config", "user.name", "Test User"], &project_path)?; + + // Create source files + let src_dir = project_path.join("src"); + std::fs::create_dir_all(&src_dir)?; + std::fs::write( + src_dir.join("main.rs"), + r#"fn main() { + println!("Hello, world!"); + + let x = 42; + let y = x * 2; + + if y > 50 { + println!("y is greater than 50"); + } else { + println!("y is not greater than 50"); + } + + for i in 0..10 { + println!("i = {}", i); + } +} + +fn helper_function(a: i32, b: i32) -> i32 { + a + b +} +"#, + )?; + + std::fs::write( + project_path.join("Cargo.toml"), + r#"[package] +name = "test_project" +version = "0.1.0" +edition = "2021" +"#, + )?; + + // Commit so git status is clean + run_git_command(&["add", "."], &project_path)?; + run_git_command(&["commit", "-m", "Initial commit"], &project_path)?; + + let project = cx.update(|cx| { + project::Project::local( + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + None, + project::LocalProjectFlags { + init_worktree_trust: false, + ..Default::default() + }, + cx, + ) + }); + + // Use a wide window so we see project panel + editor + agent panel + let window_size = size(px(1280.0), px(800.0)); + let bounds = Bounds { + origin: point(px(0.0), px(0.0)), + size: window_size, + }; + + let workspace_window: WindowHandle = cx + .update(|cx| { + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + focus: false, + show: false, + ..Default::default() + }, + |window, cx| { + let workspace = cx.new(|cx| { + Workspace::new(None, project.clone(), app_state.clone(), window, cx) + }); + cx.new(|cx| MultiWorkspace::new(workspace, window, cx)) + }, + ) + }) + .context("Failed to open thread target selector test window")?; + + cx.run_until_parked(); + + // Create and register the workspace sidebar + let sidebar = workspace_window + .update(cx, |_multi_workspace, window, cx| { + let multi_workspace_handle = cx.entity(); + cx.new(|cx| sidebar::Sidebar::new(multi_workspace_handle, window, cx)) + }) + .context("Failed to create sidebar")?; + + workspace_window + .update(cx, |multi_workspace, window, cx| { + multi_workspace.register_sidebar(sidebar.clone(), window, cx); + }) + .context("Failed to register sidebar")?; + + // Open the sidebar + workspace_window + .update(cx, |multi_workspace, window, cx| { + multi_workspace.toggle_sidebar(window, cx); + }) + .context("Failed to toggle sidebar")?; + + cx.run_until_parked(); + + // Add the git project as a worktree + let add_worktree_task = workspace_window + .update(cx, |multi_workspace, _window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + let project = workspace.read(cx).project().clone(); + project.update(cx, |project, cx| { + project.find_or_create_worktree(&project_path, true, cx) + }) + }) + .context("Failed to start adding worktree")?; + + cx.background_executor.allow_parking(); + cx.foreground_executor + .block_test(add_worktree_task) + .context("Failed to add worktree")?; + cx.background_executor.forbid_parking(); + + cx.run_until_parked(); + + // Wait for worktree scan and git status + for _ in 0..5 { + cx.advance_clock(Duration::from_millis(100)); + cx.run_until_parked(); + } + + // Open the project panel + let (weak_workspace, async_window_cx) = workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + (workspace.read(cx).weak_handle(), window.to_async(cx)) + }) + .context("Failed to get workspace handle")?; + + cx.background_executor.allow_parking(); + let project_panel = cx + .foreground_executor + .block_test(ProjectPanel::load(weak_workspace, async_window_cx)) + .context("Failed to load project panel")?; + cx.background_executor.forbid_parking(); + + workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + workspace.update(cx, |workspace, cx| { + workspace.add_panel(project_panel, window, cx); + workspace.open_panel::(window, cx); + }); + }) + .context("Failed to add project panel")?; + + cx.run_until_parked(); + + // Open main.rs in the editor + let open_file_task = workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + workspace.update(cx, |workspace, cx| { + let worktree = workspace.project().read(cx).worktrees(cx).next(); + if let Some(worktree) = worktree { + let worktree_id = worktree.read(cx).id(); + let rel_path: std::sync::Arc = + util::rel_path::rel_path("src/main.rs").into(); + let project_path: project::ProjectPath = (worktree_id, rel_path).into(); + Some(workspace.open_path(project_path, None, true, window, cx)) + } else { + None + } + }) + }) + .log_err() + .flatten(); + + if let Some(task) = open_file_task { + cx.background_executor.allow_parking(); + cx.foreground_executor.block_test(task).log_err(); + cx.background_executor.forbid_parking(); + } + + cx.run_until_parked(); + + // Load the AgentPanel + let (weak_workspace, async_window_cx) = workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + (workspace.read(cx).weak_handle(), window.to_async(cx)) + }) + .context("Failed to get workspace handle for agent panel")?; + + let prompt_builder = + cx.update(|cx| prompt_store::PromptBuilder::load(app_state.fs.clone(), false, cx)); + + // Register an observer so that workspaces created by the worktree creation + // flow get AgentPanel and ProjectPanel loaded automatically. Without this, + // `workspace.panel::(cx)` returns None in the new workspace and + // the creation flow's `focus_panel::` call is a no-op. + let _workspace_observer = cx.update({ + let prompt_builder = prompt_builder.clone(); + |cx| { + cx.observe_new(move |workspace: &mut Workspace, window, cx| { + let Some(window) = window else { return }; + let prompt_builder = prompt_builder.clone(); + let panels_task = cx.spawn_in(window, async move |workspace_handle, cx| { + let project_panel = ProjectPanel::load(workspace_handle.clone(), cx.clone()); + let agent_panel = + AgentPanel::load(workspace_handle.clone(), prompt_builder, cx.clone()); + if let Ok(panel) = project_panel.await { + workspace_handle + .update_in(cx, |workspace, window, cx| { + workspace.add_panel(panel, window, cx); + }) + .log_err(); + } + if let Ok(panel) = agent_panel.await { + workspace_handle + .update_in(cx, |workspace, window, cx| { + workspace.add_panel(panel, window, cx); + }) + .log_err(); + } + anyhow::Ok(()) + }); + workspace.set_panels_task(panels_task); + }) + } + }); + + cx.background_executor.allow_parking(); + let panel = cx + .foreground_executor + .block_test(AgentPanel::load( + weak_workspace, + prompt_builder, + async_window_cx, + )) + .context("Failed to load AgentPanel")?; + cx.background_executor.forbid_parking(); + + workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + workspace.update(cx, |workspace, cx| { + workspace.add_panel(panel.clone(), window, cx); + workspace.open_panel::(window, cx); + }); + }) + .context("Failed to add and open AgentPanel")?; + + cx.run_until_parked(); + + // Inject the stub server and open a thread so the toolbar is visible + let connection = StubAgentConnection::new(); + let stub_agent: Rc = Rc::new(StubAgentServer::new(connection)); + + cx.update_window(workspace_window.into(), |_, window, cx| { + panel.update(cx, |panel, cx| { + panel.open_external_thread_with_server(stub_agent.clone(), window, cx); + }); + })?; + + cx.run_until_parked(); + + // ---- Screenshot 1: Default "Local Project" selector (dropdown closed) ---- + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_default = run_visual_test( + "start_thread_in_selector_default", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 2: Dropdown open showing menu entries ---- + cx.update_window(workspace_window.into(), |_, window, cx| { + panel.update(cx, |panel, cx| { + panel.open_start_thread_in_menu_for_tests(window, cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_open_dropdown = run_visual_test( + "start_thread_in_selector_open", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 3: "New Worktree" selected (dropdown closed, label changed) ---- + // First dismiss the dropdown, then change the target so the toolbar label is visible + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel.close_start_thread_in_menu_for_tests(cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel.set_start_thread_in_for_tests(StartThreadIn::NewWorktree, cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_new_worktree = run_visual_test( + "start_thread_in_selector_new_worktree", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 4: "Creating worktree…" status banner ---- + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel + .set_worktree_creation_status_for_tests(Some(WorktreeCreationStatus::Creating), cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_creating = run_visual_test( + "worktree_creation_status_creating", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 5: Error status banner ---- + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel.set_worktree_creation_status_for_tests( + Some(WorktreeCreationStatus::Error( + "Failed to create worktree: branch already exists".into(), + )), + cx, + ); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_error = run_visual_test( + "worktree_creation_status_error", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 6: Worktree creation succeeded ---- + // Clear the error status and re-select New Worktree to ensure a clean state. + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel.set_worktree_creation_status_for_tests(None, cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, cx| { + window.dispatch_action(Box::new(StartThreadIn::NewWorktree), cx); + })?; + cx.run_until_parked(); + + // Insert a message into the active thread's message editor and submit. + let thread_view = cx + .read(|cx| panel.read(cx).as_active_thread_view(cx)) + .ok_or_else(|| anyhow::anyhow!("No active thread view"))?; + + cx.update_window(workspace_window.into(), |_, window, cx| { + let message_editor = thread_view.read(cx).message_editor.clone(); + message_editor.update(cx, |message_editor, cx| { + message_editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "Add a CLI flag to set the log level".to_string(), + ))], + window, + cx, + ); + message_editor.send(cx); + }); + })?; + cx.run_until_parked(); + + // Wait for the full worktree creation flow to complete. The creation status + // is cleared to `None` at the very end of the async task, after panels are + // loaded, the agent panel is focused, and the new workspace is activated. + cx.background_executor.allow_parking(); + let mut creation_complete = false; + for _ in 0..120 { + cx.run_until_parked(); + let status_cleared = cx.read(|cx| { + panel + .read(cx) + .worktree_creation_status_for_tests() + .is_none() + }); + let workspace_count = workspace_window.update(cx, |multi_workspace, _window, _cx| { + multi_workspace.workspaces().len() + })?; + if workspace_count == 2 && status_cleared { + creation_complete = true; + break; + } + cx.advance_clock(Duration::from_millis(100)); + } + cx.background_executor.forbid_parking(); + + if !creation_complete { + return Err(anyhow::anyhow!("Worktree creation did not complete")); + } + + // The creation flow called `external_thread` on the new workspace's agent + // panel, which tried to launch a real agent binary and failed. Replace the + // error state by injecting the stub server, and shrink the panel so the + // editor content is visible. + workspace_window.update(cx, |multi_workspace, window, cx| { + let new_workspace = &multi_workspace.workspaces()[1]; + new_workspace.update(cx, |workspace, cx| { + if let Some(new_panel) = workspace.panel::(cx) { + new_panel.update(cx, |panel, cx| { + panel.set_size(Some(px(480.0)), window, cx); + panel.open_external_thread_with_server(stub_agent.clone(), window, cx); + }); + } + }); + })?; + cx.run_until_parked(); + + // Type and send a message so the thread target dropdown disappears. + let new_panel = workspace_window.update(cx, |multi_workspace, _window, cx| { + let new_workspace = &multi_workspace.workspaces()[1]; + new_workspace.read(cx).panel::(cx) + })?; + if let Some(new_panel) = new_panel { + let new_thread_view = cx.read(|cx| new_panel.read(cx).as_active_thread_view(cx)); + if let Some(new_thread_view) = new_thread_view { + cx.update_window(workspace_window.into(), |_, window, cx| { + let message_editor = new_thread_view.read(cx).message_editor.clone(); + message_editor.update(cx, |editor, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "Add a CLI flag to set the log level".to_string(), + ))], + window, + cx, + ); + editor.send(cx); + }); + })?; + cx.run_until_parked(); + } + } + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_succeeded = run_visual_test( + "worktree_creation_succeeded", + workspace_window.into(), + cx, + update_baseline, + ); + + // Clean up — drop the workspace observer first so no new panels are + // registered on workspaces created during teardown. + drop(_workspace_observer); + + workspace_window + .update(cx, |multi_workspace, _window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + let project = workspace.read(cx).project().clone(); + project.update(cx, |project, cx| { + let worktree_ids: Vec<_> = + project.worktrees(cx).map(|wt| wt.read(cx).id()).collect(); + for id in worktree_ids { + project.remove_worktree(id, cx); + } + }); + }) + .log_err(); + + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.remove_window(); + }) + .log_err(); + + cx.run_until_parked(); + + for _ in 0..15 { + cx.advance_clock(Duration::from_millis(100)); + cx.run_until_parked(); + } + + // Delete the preserved temp directory so visual-test runs don't + // accumulate filesystem artifacts. + if let Err(err) = std::fs::remove_dir_all(&temp_path) { + log::warn!( + "failed to clean up visual-test temp dir {}: {err}", + temp_path.display() + ); + } + + // Reset feature flags + cx.update(|cx| { + cx.update_flags(false, vec![]); + }); + + let results = [ + ("default", result_default), + ("open_dropdown", result_open_dropdown), + ("new_worktree", result_new_worktree), + ("creating", result_creating), + ("error", result_error), + ("succeeded", result_succeeded), + ]; + + let mut has_baseline_update = None; + let mut failures = Vec::new(); + + for (name, result) in &results { + match result { + Ok(TestResult::Passed) => {} + Ok(TestResult::BaselineUpdated(p)) => { + has_baseline_update = Some(p.clone()); + } + Err(e) => { + failures.push(format!("{}: {}", name, e)); + } + } + } + + if !failures.is_empty() { + Err(anyhow::anyhow!( + "start_thread_in_selector failures: {}", + failures.join("; ") + )) + } else if let Some(p) = has_baseline_update { + Ok(TestResult::BaselineUpdated(p)) + } else { + Ok(TestResult::Passed) + } +} diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 83d504ea8f1cfbb13b5f0ea97cea6508a04126aa..c1e00b817abc8817cc81dc528c66901011f134aa 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -376,8 +376,19 @@ pub fn initialize_workspace( return; }; let multi_workspace_handle = cx.entity(); - let sidebar = cx.new(|cx| Sidebar::new(multi_workspace_handle, window, cx)); + let sidebar = cx.new(|cx| Sidebar::new(multi_workspace_handle.clone(), window, cx)); multi_workspace.register_sidebar(sidebar, window, cx); + + let multi_workspace_handle = multi_workspace_handle.downgrade(); + window.on_window_should_close(cx, move |window, cx| { + multi_workspace_handle + .update(cx, |multi_workspace, cx| { + // We'll handle closing asynchronously + multi_workspace.close_window(&CloseWindow, window, cx); + false + }) + .unwrap_or(true) + }); }) .detach(); @@ -411,16 +422,7 @@ pub fn initialize_workspace( if let Some(specs) = window.gpu_specs() { log::info!("Using GPU: {:?}", specs); show_software_emulation_warning_if_needed(specs.clone(), window, cx); - if let Some((crash_server, message)) = crashes::CRASH_HANDLER - .get() - .zip(bincode::serialize(&specs).ok()) - && let Err(err) = crash_server.send_message(3, message) - { - log::warn!( - "Failed to store active gpu info for crash reporting: {}", - err - ); - } + crashes::set_gpu_info(specs); } let edit_prediction_menu_handle = PopoverMenuHandle::default(); @@ -485,18 +487,8 @@ pub fn initialize_workspace( status_bar.add_right_item(image_info, window, cx); }); - let handle = cx.entity().downgrade(); - window.on_window_should_close(cx, move |window, cx| { - handle - .update(cx, |workspace, cx| { - // We'll handle closing asynchronously - workspace.close_window(&CloseWindow, window, cx); - false - }) - .unwrap_or(true) - }); - - initialize_panels(prompt_builder.clone(), window, cx); + let panels_task = initialize_panels(prompt_builder.clone(), window, cx); + workspace.set_panels_task(panels_task); register_actions(app_state.clone(), workspace, window, cx); workspace.focus_handle(cx).focus(window, cx); @@ -620,7 +612,7 @@ fn initialize_panels( prompt_builder: Arc, window: &mut Window, cx: &mut Context, -) { +) -> Task> { cx.spawn_in(window, async move |workspace_handle, cx| { let project_panel = ProjectPanel::load(workspace_handle.clone(), cx.clone()); let outline_panel = OutlinePanel::load(workspace_handle.clone(), cx.clone()); @@ -662,7 +654,6 @@ fn initialize_panels( anyhow::Ok(()) }) - .detach(); } fn setup_or_teardown_ai_panel( @@ -794,7 +785,7 @@ fn register_actions( } } }) - .register_action(|workspace, _: &workspace::Open, window, cx| { + .register_action(|workspace, action: &workspace::Open, window, cx| { telemetry::event!("Project Opened"); workspace::prompt_for_open_path_and_open( workspace, @@ -805,6 +796,7 @@ fn register_actions( multiple: true, prompt: None, }, + action.create_new_window, window, cx, ); @@ -820,6 +812,7 @@ fn register_actions( multiple: true, prompt: None, }, + true, window, cx, ); @@ -1103,7 +1096,7 @@ fn register_actions( ); }, ) - .detach(); + .detach_and_log_err(cx); } } }) @@ -2060,40 +2053,39 @@ fn open_settings_file( cx: &mut Context, ) { cx.spawn_in(window, async move |workspace, cx| { - let settings_open_task = workspace + let (worktree_creation_task, settings_open_task) = workspace .update_in(cx, |workspace, window, cx| { - workspace.with_local_workspace(window, cx, move |_workspace, window, cx| { - cx.spawn_in(window, async move |workspace, cx| { - let worktree_creation_task = - workspace.update_in(cx, |workspace, _window, cx| { - workspace.project().update(cx, |project, cx| { - // Set up a dedicated worktree for settings, since - // otherwise we're dropping and re-starting LSP servers - // for each file inside on every settings file - // close/open - - // TODO: Do note that all other external files (e.g. - // drag and drop from OS) still have their worktrees - // released on file close, causing LSP servers' - // restarts. - project.find_or_create_worktree( - paths::config_dir().as_path(), - false, - cx, - ) - }) - })?; - let _ = worktree_creation_task.await?; - let settings_open_task = - workspace.update_in(cx, |_workspace, window, cx| { - create_and_open_local_file(abs_path, window, cx, default_content) - })?; - let _ = settings_open_task.await?; - anyhow::Ok(()) - }) + workspace.with_local_or_wsl_workspace(window, cx, move |workspace, window, cx| { + let project = workspace.project().clone(); + + let worktree_creation_task = cx.spawn_in(window, async move |_, cx| { + let config_dir = project + .update(cx, |project, cx| { + project.try_windows_path_to_wsl(paths::config_dir().as_path(), cx) + }) + .await?; + // Set up a dedicated worktree for settings, since + // otherwise we're dropping and re-starting LSP servers + // for each file inside on every settings file + // close/open + + // TODO: Do note that all other external files (e.g. + // drag and drop from OS) still have their worktrees + // released on file close, causing LSP servers' + // restarts. + project + .update(cx, |project, cx| { + project.find_or_create_worktree(&config_dir, false, cx) + }) + .await + }); + let settings_open_task = + create_and_open_local_file(abs_path, window, cx, default_content); + (worktree_creation_task, settings_open_task) }) })? .await?; + let _ = worktree_creation_task.await?; let _ = settings_open_task.await?; anyhow::Ok(()) }) @@ -4793,6 +4785,7 @@ mod tests { "action", "activity_indicator", "agent", + "agents_sidebar", "app_menu", "assistant", "assistant2", @@ -4810,6 +4803,7 @@ mod tests { "console", "context_server", "copilot", + "csv", "debug_panel", "debugger", "dev", @@ -4861,7 +4855,6 @@ mod tests { "settings_profile_selector", "snippets", "stash_picker", - "supermaven", "svg", "syntax_tree_view", "tab_switcher", @@ -5022,7 +5015,7 @@ mod tests { language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); web_search::init(cx); git_graph::init(cx); - web_search_providers::init(app_state.client.clone(), cx); + web_search_providers::init(app_state.client.clone(), app_state.user_store.clone(), cx); let prompt_builder = PromptBuilder::load(app_state.fs.clone(), false, cx); project::AgentRegistryStore::init_global( cx, @@ -5809,7 +5802,15 @@ mod tests { // Window B: workspace for dir3 let (window_a, _) = cx .update(|cx| { - Workspace::new_local(vec![dir1.into()], app_state.clone(), None, None, None, cx) + Workspace::new_local( + vec![dir1.into()], + app_state.clone(), + None, + None, + None, + true, + cx, + ) }) .await .expect("failed to open first workspace"); @@ -5825,7 +5826,15 @@ mod tests { let (window_b, _) = cx .update(|cx| { - Workspace::new_local(vec![dir3.into()], app_state.clone(), None, None, None, cx) + Workspace::new_local( + vec![dir3.into()], + app_state.clone(), + None, + None, + None, + true, + cx, + ) }) .await .expect("failed to open third workspace"); diff --git a/crates/zed/src/zed/app_menus.rs b/crates/zed/src/zed/app_menus.rs index 40582c8e13ff822189c9b3a1a467a9ff7f9d597a..f73d703557f8f73ad380c0b7a2cb995b29f92cf1 100644 --- a/crates/zed/src/zed/app_menus.rs +++ b/crates/zed/src/zed/app_menus.rs @@ -2,7 +2,7 @@ use collab_ui::collab_panel; use gpui::{App, Menu, MenuItem, OsAction}; use release_channel::ReleaseChannel; use terminal_view::terminal_panel; -use zed_actions::{ToggleFocus as ToggleDebugPanel, dev}; +use zed_actions::{debug_panel, dev}; pub fn app_menus(cx: &mut App) -> Vec { use zed_actions::Quit; @@ -43,7 +43,7 @@ pub fn app_menus(cx: &mut App) -> Vec { MenuItem::action("Outline Panel", outline_panel::ToggleFocus), MenuItem::action("Collab Panel", collab_panel::ToggleFocus), MenuItem::action("Terminal Panel", terminal_panel::ToggleFocus), - MenuItem::action("Debugger Panel", ToggleDebugPanel), + MenuItem::action("Debugger Panel", debug_panel::ToggleFocus), MenuItem::separator(), MenuItem::action("Diagnostics", diagnostics::Deploy), MenuItem::separator(), @@ -125,7 +125,7 @@ pub fn app_menus(cx: &mut App) -> Vec { } else { "Open…" }, - workspace::Open, + workspace::Open::default(), ), MenuItem::action( "Open Recent...", diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index 326ddef2d4b1e08b656a9381b1a632fbce1bdac3..9f05c5795e6f16cab231df8a5586106ed25b03ee 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -2,20 +2,18 @@ use client::{Client, UserStore}; use codestral::{CodestralEditPredictionDelegate, load_codestral_api_key}; use collections::HashMap; use copilot::CopilotEditPredictionDelegate; -use edit_prediction::{EditPredictionModel, ZedEditPredictionDelegate, Zeta2FeatureFlag}; +use edit_prediction::{EditPredictionModel, ZedEditPredictionDelegate}; use editor::Editor; -use feature_flags::FeatureFlagAppExt; use gpui::{AnyWindowHandle, App, AppContext as _, Context, Entity, WeakEntity}; use language::language_settings::{EditPredictionProvider, all_language_settings}; -use settings::{ - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, EditPredictionPromptFormat, SettingsStore, -}; +use settings::{EditPredictionPromptFormat, SettingsStore}; use std::{cell::RefCell, rc::Rc, sync::Arc}; -use supermaven::{Supermaven, SupermavenEditPredictionDelegate}; use ui::Window; pub fn init(client: Arc, user_store: Entity, cx: &mut App) { + edit_prediction::EditPredictionStore::global(&client, &user_store, cx); + let editors: Rc, AnyWindowHandle>>> = Rc::default(); cx.observe_new({ let editors = editors.clone(); @@ -60,13 +58,13 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { cx.on_action(clear_edit_prediction_store_edit_history); - let mut provider_config = edit_prediction_provider_config_for_settings(cx); cx.subscribe(&user_store, { let editors = editors.clone(); let client = client.clone(); move |user_store, event, cx| { if let client::user::Event::PrivateUserInfoUpdated = event { + let provider_config = edit_prediction_provider_config_for_settings(cx); assign_edit_prediction_providers( &editors, provider_config, @@ -80,18 +78,18 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { .detach(); cx.observe_global::({ - let user_store = user_store.clone(); + let mut previous_config = edit_prediction_provider_config_for_settings(cx); move |cx| { let new_provider_config = edit_prediction_provider_config_for_settings(cx); - if new_provider_config != provider_config { + if new_provider_config != previous_config { telemetry::event!( "Edit Prediction Provider Changed", - from = provider_config.map(|config| config.name()), + from = previous_config.map(|config| config.name()), to = new_provider_config.map(|config| config.name()) ); - provider_config = new_provider_config; + previous_config = new_provider_config; assign_edit_prediction_providers( &editors, new_provider_config, @@ -111,10 +109,9 @@ fn edit_prediction_provider_config_for_settings(cx: &App) -> Option None, EditPredictionProvider::Copilot => Some(EditPredictionProviderConfig::Copilot), - EditPredictionProvider::Supermaven => Some(EditPredictionProviderConfig::Supermaven), - EditPredictionProvider::Zed => Some(EditPredictionProviderConfig::Zed( - EditPredictionModel::Zeta1, - )), + EditPredictionProvider::Zed => { + Some(EditPredictionProviderConfig::Zed(EditPredictionModel::Zeta)) + } EditPredictionProvider::Codestral => Some(EditPredictionProviderConfig::Codestral), EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi => { let custom_settings = if provider == EditPredictionProvider::Ollama { @@ -133,10 +130,11 @@ fn edit_prediction_provider_config_for_settings(cx: &App) -> Option Option Some(EditPredictionProviderConfig::Zed( EditPredictionModel::Mercury, )), - EditPredictionProvider::Experimental(name) => { - if name == EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME - && cx.has_flag::() - { - Some(EditPredictionProviderConfig::Zed( - EditPredictionModel::Zeta2, - )) - } else { - None - } - } + EditPredictionProvider::Experimental(_) => None, } } @@ -183,7 +171,6 @@ fn infer_prompt_format(model: &str) -> Option { #[derive(Copy, Clone, PartialEq, Eq)] enum EditPredictionProviderConfig { Copilot, - Supermaven, Codestral, Zed(EditPredictionModel), } @@ -192,11 +179,9 @@ impl EditPredictionProviderConfig { fn name(&self) -> &'static str { match self { EditPredictionProviderConfig::Copilot => "Copilot", - EditPredictionProviderConfig::Supermaven => "Supermaven", EditPredictionProviderConfig::Codestral => "Codestral", EditPredictionProviderConfig::Zed(model) => match model { - EditPredictionModel::Zeta1 => "Zeta1", - EditPredictionModel::Zeta2 => "Zeta2", + EditPredictionModel::Zeta => "Zeta", EditPredictionModel::Fim { .. } => "FIM", EditPredictionModel::Sweep => "Sweep", EditPredictionModel::Mercury => "Mercury", @@ -285,12 +270,6 @@ fn assign_edit_prediction_provider( editor.set_edit_prediction_provider(Some(provider), window, cx); } } - Some(EditPredictionProviderConfig::Supermaven) => { - if let Some(supermaven) = Supermaven::global(cx) { - let provider = cx.new(|_| SupermavenEditPredictionDelegate::new(supermaven)); - editor.set_edit_prediction_provider(Some(provider), window, cx); - } - } Some(EditPredictionProviderConfig::Codestral) => { let http_client = client.http_client(); let provider = cx.new(|_| CodestralEditPredictionDelegate::new(http_client)); @@ -300,27 +279,122 @@ fn assign_edit_prediction_provider( let ep_store = edit_prediction::EditPredictionStore::global(client, &user_store, cx); if let Some(project) = editor.project() { - let has_model = ep_store.update(cx, |ep_store, cx| { + ep_store.update(cx, |ep_store, cx| { ep_store.set_edit_prediction_model(model); if let Some(buffer) = &singleton_buffer { ep_store.register_buffer(buffer, project, cx); } - true }); - if has_model { - let provider = cx.new(|cx| { - ZedEditPredictionDelegate::new( - project.clone(), - singleton_buffer, - &client, - &user_store, - cx, - ) - }); - editor.set_edit_prediction_provider(Some(provider), window, cx); - } + let provider = cx.new(|cx| { + ZedEditPredictionDelegate::new( + project.clone(), + singleton_buffer, + &client, + &user_store, + cx, + ) + }); + editor.set_edit_prediction_provider(Some(provider), window, cx); } } } } + +#[cfg(test)] +mod tests { + use super::*; + use editor::MultiBuffer; + use gpui::{BorrowAppContext, TestAppContext}; + use settings::{EditPredictionProvider, SettingsStore}; + use workspace::AppState; + + #[gpui::test] + async fn test_subscribe_uses_stale_provider_config_after_settings_change( + cx: &mut TestAppContext, + ) { + let app_state = cx.update(|cx| { + let app_state = AppState::test(cx); + client::init(&app_state.client, cx); + language_model::init(app_state.client.clone(), cx); + editor::init(cx); + app_state + }); + + // Override the default provider to None so the subscribe closure + // captures None at init time. (The test default is Zed/Zeta1, which + // is a no-op on project-less editors and would mask the bug.) + cx.update(|cx| { + cx.update_global::(|store: &mut SettingsStore, cx| { + store.update_user_settings(cx, |settings| { + settings.project.all_languages.edit_predictions = + Some(settings::EditPredictionSettingsContent { + provider: Some(EditPredictionProvider::None), + ..Default::default() + }); + }); + }); + }); + + cx.update(|cx| { + init(app_state.client.clone(), app_state.user_store.clone(), cx); + }); + + // Create an editor in a window so observe_new registers it. + let editor = cx.add_window(|window, cx| { + let buffer = cx.new(|_cx| MultiBuffer::new(language::Capability::ReadWrite)); + Editor::new(editor::EditorMode::full(), buffer, None, window, cx) + }); + + editor + .update(cx, |editor, _window, _cx| { + assert!( + editor.edit_prediction_provider().is_none(), + "editor should start with no provider when settings = None" + ); + }) + .unwrap(); + + // Change settings to Codestral. The observe_global closure updates its + // own copy of provider_config and assigns Codestral to all editors. + cx.update(|cx| { + cx.update_global::(|store: &mut SettingsStore, cx| { + store.update_user_settings(cx, |settings| { + settings.project.all_languages.edit_predictions = + Some(settings::EditPredictionSettingsContent { + provider: Some(EditPredictionProvider::Codestral), + ..Default::default() + }); + }); + }); + }); + + editor + .update(cx, |editor, _window, _cx| { + assert!( + editor.edit_prediction_provider().is_some(), + "editor should have a provider after changing settings to Codestral" + ); + }) + .unwrap(); + + // Emit PrivateUserInfoUpdated. The subscribe closure should use the + // CURRENT provider config (Codestral), but due to the bug it uses the + // stale init-time value (None) and clears the provider. + cx.update(|cx| { + app_state.user_store.update(cx, |_, cx| { + cx.emit(client::user::Event::PrivateUserInfoUpdated); + }); + }); + cx.run_until_parked(); + + editor + .update(cx, |editor, _window, _cx| { + assert!( + editor.edit_prediction_provider().is_some(), + "BUG: subscribe closure used stale provider_config (None) instead of current (Codestral)" + ); + }) + .unwrap(); + } +} diff --git a/crates/zed/src/zed/quick_action_bar/preview.rs b/crates/zed/src/zed/quick_action_bar/preview.rs index 5d43e79542357977b06fbbd884472f94ad3595c8..01e2d164d7d7a8a81e64ab77ad646111e4baacd7 100644 --- a/crates/zed/src/zed/quick_action_bar/preview.rs +++ b/crates/zed/src/zed/quick_action_bar/preview.rs @@ -1,3 +1,8 @@ +use csv_preview::{ + CsvPreviewView, OpenPreview as CsvOpenPreview, OpenPreviewToTheSide as CsvOpenPreviewToTheSide, + TabularDataPreviewFeatureFlag, +}; +use feature_flags::FeatureFlagAppExt as _; use gpui::{AnyElement, Modifiers, WeakEntity}; use markdown_preview::{ OpenPreview as MarkdownOpenPreview, OpenPreviewToTheSide as MarkdownOpenPreviewToTheSide, @@ -16,6 +21,7 @@ use super::QuickActionBar; enum PreviewType { Markdown, Svg, + Csv, } impl QuickActionBar { @@ -35,6 +41,10 @@ impl QuickActionBar { } else if SvgPreviewView::resolve_active_item_as_svg_buffer(workspace, cx).is_some() { preview_type = Some(PreviewType::Svg); + } else if cx.has_flag::() + && CsvPreviewView::resolve_active_item_as_csv_editor(workspace, cx).is_some() + { + preview_type = Some(PreviewType::Csv); } }); } @@ -57,6 +67,13 @@ impl QuickActionBar { Box::new(SvgOpenPreviewToTheSide) as Box, &svg_preview::OpenPreview as &dyn gpui::Action, ), + PreviewType::Csv => ( + "toggle-csv-preview", + "Preview CSV", + Box::new(CsvOpenPreview) as Box, + Box::new(CsvOpenPreviewToTheSide) as Box, + &csv_preview::OpenPreview as &dyn gpui::Action, + ), }; let alt_click = gpui::Keystroke { diff --git a/crates/zed_actions/src/lib.rs b/crates/zed_actions/src/lib.rs index 848673b9377a4947053f0bb8d79de9863c58408c..ae785bb4a0c792dd7f55d8850e8c05ce6327c108 100644 --- a/crates/zed_actions/src/lib.rs +++ b/crates/zed_actions/src/lib.rs @@ -639,15 +639,19 @@ actions!( ] ); -actions!( - debug_panel, - [ - /// Toggles the debug panel. - Toggle, - /// Toggles focus on the debug panel. - ToggleFocus - ] -); +pub mod debug_panel { + use gpui::actions; + actions!( + debug_panel, + [ + /// Toggles the debug panel. + Toggle, + /// Toggles focus on the debug panel. + ToggleFocus + ] + ); +} + actions!( debugger, [ @@ -732,3 +736,17 @@ pub mod preview { ); } } + +pub mod notebook { + use gpui::actions; + + actions!( + notebook, + [ + /// Move to down in cells + NotebookMoveDown, + /// Move to up in cells + NotebookMoveUp, + ] + ); +} diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index 7391683d34d8010336c6f81e6da50be6e6c11c15..668367727449c4fa3f9698746f3181d9bf3cca0a 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -18,17 +18,10 @@ fn estimate_tokens(bytes: usize) -> usize { bytes / 3 } -/// The client's preferred edit prediction model. The server may override this. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub enum EditPredictionModelKind { - Zeta1, - Zeta2, -} - /// Pre-computed byte offset ranges within `cursor_excerpt` for different /// editable and context token budgets. Allows the server to select the /// appropriate ranges for whichever model it uses. -#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, PartialEq, Hash, Serialize, Deserialize)] pub struct ExcerptRanges { /// Editable region computed with a 150-token budget. pub editable_150: Range, @@ -36,37 +29,40 @@ pub struct ExcerptRanges { pub editable_180: Range, /// Editable region computed with a 350-token budget. pub editable_350: Range, + /// Editable region computed with a 350-token budget. + pub editable_512: Option>, /// Context boundary when using editable_150 with 350 tokens of additional context. pub editable_150_context_350: Range, /// Context boundary when using editable_180 with 350 tokens of additional context. pub editable_180_context_350: Range, /// Context boundary when using editable_350 with 150 tokens of additional context. pub editable_350_context_150: Range, + pub editable_350_context_512: Option>, + pub editable_350_context_1024: Option>, + pub context_4096: Option>, + pub context_8192: Option>, } #[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] pub struct ZetaPromptInput { pub cursor_path: Arc, pub cursor_excerpt: Arc, - pub editable_range_in_excerpt: Range, pub cursor_offset_in_excerpt: usize, #[serde(default, skip_serializing_if = "Option::is_none")] pub excerpt_start_row: Option, pub events: Vec>, pub related_files: Vec, - /// When set, the excerpt was computed with a larger budget (~512 tokens) - /// and these ranges let the server select model-appropriate subsets. - /// When absent, the excerpt IS the context region and - /// `editable_range_in_excerpt` is the only editable range. + /// These ranges let the server select model-appropriate subsets. + pub excerpt_ranges: ExcerptRanges, + /// The name of the edit prediction model experiment to use. #[serde(default, skip_serializing_if = "Option::is_none")] - pub excerpt_ranges: Option, - /// Client's preferred model. The server may override. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub preferred_model: Option, + pub experiment: Option, #[serde(default)] pub in_open_source_repo: bool, #[serde(default)] pub can_collect_data: bool, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub repo_url: Option, } #[derive( @@ -92,6 +88,8 @@ pub enum ZetaFormat { V0131GitMergeMarkersPrefix, V0211Prefill, V0211SeedCoder, + v0226Hashline, + V0304SeedNoEdits, } impl std::fmt::Display for ZetaFormat { @@ -128,25 +126,6 @@ impl ZetaFormat { .collect::>() .concat() } - - pub fn special_tokens(&self) -> &'static [&'static str] { - match self { - ZetaFormat::V0112MiddleAtEnd - | ZetaFormat::V0113Ordered - | ZetaFormat::V0114180EditableRegion => &[ - "<|fim_prefix|>", - "<|fim_suffix|>", - "<|fim_middle|>", - "<|file_sep|>", - CURSOR_MARKER, - ], - ZetaFormat::V0120GitMergeMarkers => v0120_git_merge_markers::special_tokens(), - ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill => { - v0131_git_merge_markers_prefix::special_tokens() - } - ZetaFormat::V0211SeedCoder => seed_coder::special_tokens(), - } - } } #[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] @@ -213,36 +192,35 @@ pub struct RelatedFile { pub struct RelatedExcerpt { pub row_range: Range, pub text: Arc, + #[serde(default)] + pub order: usize, } pub fn prompt_input_contains_special_tokens(input: &ZetaPromptInput, format: ZetaFormat) -> bool { - format - .special_tokens() + special_tokens_for_format(format) .iter() .any(|token| input.cursor_excerpt.contains(token)) } pub fn format_zeta_prompt(input: &ZetaPromptInput, format: ZetaFormat) -> String { - format_zeta_prompt_with_budget(input, format, MAX_PROMPT_TOKENS) + format_prompt_with_budget_for_format(input, format, MAX_PROMPT_TOKENS) } -/// Post-processes model output for the given zeta format by stripping format-specific suffixes. -pub fn clean_zeta2_model_output(output: &str, format: ZetaFormat) -> &str { +pub fn special_tokens_for_format(format: ZetaFormat) -> &'static [&'static str] { match format { - ZetaFormat::V0120GitMergeMarkers => output - .strip_suffix(v0120_git_merge_markers::END_MARKER) - .unwrap_or(output), - ZetaFormat::V0131GitMergeMarkersPrefix => output - .strip_suffix(v0131_git_merge_markers_prefix::END_MARKER) - .unwrap_or(output), - ZetaFormat::V0211SeedCoder => output - .strip_suffix(seed_coder::END_MARKER) - .unwrap_or(output), - _ => output, + ZetaFormat::V0112MiddleAtEnd => v0112_middle_at_end::special_tokens(), + ZetaFormat::V0113Ordered => v0113_ordered::special_tokens(), + ZetaFormat::V0114180EditableRegion => v0114180_editable_region::special_tokens(), + ZetaFormat::V0120GitMergeMarkers => v0120_git_merge_markers::special_tokens(), + ZetaFormat::V0131GitMergeMarkersPrefix => v0131_git_merge_markers_prefix::special_tokens(), + ZetaFormat::V0211Prefill => v0211_prefill::special_tokens(), + ZetaFormat::V0211SeedCoder => seed_coder::special_tokens(), + ZetaFormat::v0226Hashline => hashline::special_tokens(), + ZetaFormat::V0304SeedNoEdits => seed_coder::special_tokens(), } } -pub fn excerpt_range_for_format( +pub fn excerpt_ranges_for_format( format: ZetaFormat, ranges: &ExcerptRanges, ) -> (Range, Range) { @@ -251,85 +229,93 @@ pub fn excerpt_range_for_format( ranges.editable_150.clone(), ranges.editable_150_context_350.clone(), ), - ZetaFormat::V0114180EditableRegion - | ZetaFormat::V0120GitMergeMarkers + ZetaFormat::V0114180EditableRegion => ( + ranges.editable_180.clone(), + ranges.editable_180_context_350.clone(), + ), + ZetaFormat::V0120GitMergeMarkers | ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill - | ZetaFormat::V0211SeedCoder => ( + | ZetaFormat::V0211SeedCoder + | ZetaFormat::v0226Hashline + | ZetaFormat::V0304SeedNoEdits => ( ranges.editable_350.clone(), ranges.editable_350_context_150.clone(), ), } } -pub fn resolve_cursor_region( - input: &ZetaPromptInput, - format: ZetaFormat, -) -> (&str, Range, usize) { - let Some(ranges) = &input.excerpt_ranges else { - return ( - &input.cursor_excerpt, - input.editable_range_in_excerpt.clone(), - input.cursor_offset_in_excerpt, - ); - }; - - let (editable_range, context_range) = excerpt_range_for_format(format, ranges); - let context_start = context_range.start; - let context_text = &input.cursor_excerpt[context_range]; - let adjusted_editable = - (editable_range.start - context_start)..(editable_range.end - context_start); - let adjusted_cursor = input.cursor_offset_in_excerpt - context_start; - - (context_text, adjusted_editable, adjusted_cursor) -} - -fn format_zeta_prompt_with_budget( - input: &ZetaPromptInput, +pub fn write_cursor_excerpt_section_for_format( format: ZetaFormat, - max_tokens: usize, -) -> String { - let (context, editable_range, cursor_offset) = resolve_cursor_region(input, format); - let path = &*input.cursor_path; - - let mut cursor_section = String::new(); + prompt: &mut String, + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, +) { match format { - ZetaFormat::V0112MiddleAtEnd => { - v0112_middle_at_end::write_cursor_excerpt_section( - &mut cursor_section, - path, - context, - &editable_range, - cursor_offset, - ); - } + ZetaFormat::V0112MiddleAtEnd => v0112_middle_at_end::write_cursor_excerpt_section( + prompt, + path, + context, + editable_range, + cursor_offset, + ), ZetaFormat::V0113Ordered | ZetaFormat::V0114180EditableRegion => { v0113_ordered::write_cursor_excerpt_section( - &mut cursor_section, + prompt, path, context, - &editable_range, + editable_range, cursor_offset, ) } ZetaFormat::V0120GitMergeMarkers => v0120_git_merge_markers::write_cursor_excerpt_section( - &mut cursor_section, + prompt, path, context, - &editable_range, + editable_range, cursor_offset, ), ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill => { v0131_git_merge_markers_prefix::write_cursor_excerpt_section( - &mut cursor_section, + prompt, path, context, - &editable_range, + editable_range, cursor_offset, ) } - ZetaFormat::V0211SeedCoder => { - return seed_coder::format_prompt_with_budget( + ZetaFormat::V0211SeedCoder | ZetaFormat::V0304SeedNoEdits => { + seed_coder::write_cursor_excerpt_section( + prompt, + path, + context, + editable_range, + cursor_offset, + ) + } + ZetaFormat::v0226Hashline => hashline::write_cursor_excerpt_section( + prompt, + path, + context, + editable_range, + cursor_offset, + ), + } +} + +pub fn format_prompt_with_budget_for_format( + input: &ZetaPromptInput, + format: ZetaFormat, + max_tokens: usize, +) -> String { + let (context, editable_range, cursor_offset) = resolve_cursor_region(input, format); + let path = &*input.cursor_path; + + match format { + ZetaFormat::V0211SeedCoder | ZetaFormat::V0304SeedNoEdits => { + seed_coder::format_prompt_with_budget( path, context, &editable_range, @@ -337,50 +323,179 @@ fn format_zeta_prompt_with_budget( &input.events, &input.related_files, max_tokens, - ); + ) } - } + _ => { + let mut cursor_section = String::new(); + write_cursor_excerpt_section_for_format( + format, + &mut cursor_section, + path, + context, + &editable_range, + cursor_offset, + ); - let cursor_tokens = estimate_tokens(cursor_section.len()); - let budget_after_cursor = max_tokens.saturating_sub(cursor_tokens); + let cursor_tokens = estimate_tokens(cursor_section.len()); + let budget_after_cursor = max_tokens.saturating_sub(cursor_tokens); - let edit_history_section = format_edit_history_within_budget( - &input.events, - "<|file_sep|>", - "edit history", - budget_after_cursor, - ); - let edit_history_tokens = estimate_tokens(edit_history_section.len()); - let budget_after_edit_history = budget_after_cursor.saturating_sub(edit_history_tokens); + let edit_history_section = format_edit_history_within_budget( + &input.events, + "<|file_sep|>", + "edit history", + budget_after_cursor, + ); + let edit_history_tokens = estimate_tokens(edit_history_section.len()); + let budget_after_edit_history = budget_after_cursor.saturating_sub(edit_history_tokens); - let related_files_section = format_related_files_within_budget( - &input.related_files, - "<|file_sep|>", - budget_after_edit_history, - ); + let related_files_section = format_related_files_within_budget( + &input.related_files, + "<|file_sep|>", + "", + budget_after_edit_history, + ); - let mut prompt = String::new(); - prompt.push_str(&related_files_section); - prompt.push_str(&edit_history_section); - prompt.push_str(&cursor_section); - prompt + let mut prompt = String::new(); + prompt.push_str(&related_files_section); + prompt.push_str(&edit_history_section); + prompt.push_str(&cursor_section); + prompt + } + } } -pub fn get_prefill(input: &ZetaPromptInput, format: ZetaFormat) -> String { +pub fn get_prefill_for_format( + format: ZetaFormat, + context: &str, + editable_range: &Range, +) -> String { match format { + ZetaFormat::V0211Prefill => v0211_prefill::get_prefill(context, editable_range), ZetaFormat::V0112MiddleAtEnd | ZetaFormat::V0113Ordered | ZetaFormat::V0114180EditableRegion | ZetaFormat::V0120GitMergeMarkers | ZetaFormat::V0131GitMergeMarkersPrefix - | ZetaFormat::V0211SeedCoder => String::new(), - ZetaFormat::V0211Prefill => { - let (context, editable_range, _) = resolve_cursor_region(input, format); - v0211_prefill::get_prefill(context, &editable_range) + | ZetaFormat::V0211SeedCoder + | ZetaFormat::v0226Hashline + | ZetaFormat::V0304SeedNoEdits => String::new(), + } +} + +pub fn output_end_marker_for_format(format: ZetaFormat) -> Option<&'static str> { + match format { + ZetaFormat::V0120GitMergeMarkers => Some(v0120_git_merge_markers::END_MARKER), + ZetaFormat::V0131GitMergeMarkersPrefix => Some(v0131_git_merge_markers_prefix::END_MARKER), + ZetaFormat::V0211Prefill => Some(v0131_git_merge_markers_prefix::END_MARKER), + ZetaFormat::V0211SeedCoder | ZetaFormat::V0304SeedNoEdits => Some(seed_coder::END_MARKER), + ZetaFormat::V0112MiddleAtEnd + | ZetaFormat::V0113Ordered + | ZetaFormat::V0114180EditableRegion + | ZetaFormat::v0226Hashline => None, + } +} + +pub fn current_region_markers_for_format(format: ZetaFormat) -> (&'static str, &'static str) { + match format { + ZetaFormat::V0112MiddleAtEnd => ("<|fim_middle|>current\n", "<|fim_middle|>updated"), + ZetaFormat::V0113Ordered + | ZetaFormat::V0114180EditableRegion + | ZetaFormat::v0226Hashline => ("<|fim_middle|>current\n", "<|fim_suffix|>"), + ZetaFormat::V0120GitMergeMarkers + | ZetaFormat::V0131GitMergeMarkersPrefix + | ZetaFormat::V0211Prefill => ( + v0120_git_merge_markers::START_MARKER, + v0120_git_merge_markers::SEPARATOR, + ), + ZetaFormat::V0211SeedCoder | ZetaFormat::V0304SeedNoEdits => { + (seed_coder::START_MARKER, seed_coder::SEPARATOR) } } } +pub fn clean_extracted_region_for_format(format: ZetaFormat, region: &str) -> String { + match format { + ZetaFormat::v0226Hashline => hashline::strip_hashline_prefixes(region), + _ => region.to_string(), + } +} + +pub fn encode_patch_as_output_for_format( + format: ZetaFormat, + old_editable_region: &str, + patch: &str, + cursor_offset: Option, +) -> Result> { + match format { + ZetaFormat::v0226Hashline => { + hashline::patch_to_edit_commands(old_editable_region, patch, cursor_offset).map(Some) + } + ZetaFormat::V0304SeedNoEdits => Ok(seed_coder::no_edits(patch)), + _ => Ok(None), + } +} + +pub fn output_with_context_for_format( + format: ZetaFormat, + old_editable_region: &str, + output: &str, +) -> Result> { + match format { + ZetaFormat::v0226Hashline => { + if hashline::output_has_edit_commands(output) { + Ok(Some(hashline::apply_edit_commands( + old_editable_region, + output, + ))) + } else { + Ok(None) + } + } + ZetaFormat::V0304SeedNoEdits => { + if output.starts_with(seed_coder::NO_EDITS) { + Ok(Some(old_editable_region.to_owned())) + } else { + Ok(None) + } + } + _ => Ok(None), + } +} + +/// Post-processes model output for the given zeta format by stripping format-specific suffixes. +pub fn clean_zeta2_model_output(output: &str, format: ZetaFormat) -> &str { + match output_end_marker_for_format(format) { + Some(marker) => output.strip_suffix(marker).unwrap_or(output), + None => output, + } +} + +pub fn excerpt_range_for_format( + format: ZetaFormat, + ranges: &ExcerptRanges, +) -> (Range, Range) { + excerpt_ranges_for_format(format, ranges) +} + +pub fn resolve_cursor_region( + input: &ZetaPromptInput, + format: ZetaFormat, +) -> (&str, Range, usize) { + let (editable_range, context_range) = excerpt_range_for_format(format, &input.excerpt_ranges); + let context_start = context_range.start; + let context_text = &input.cursor_excerpt[context_range]; + let adjusted_editable = + (editable_range.start - context_start)..(editable_range.end - context_start); + let adjusted_cursor = input.cursor_offset_in_excerpt - context_start; + + (context_text, adjusted_editable, adjusted_cursor) +} + +pub fn get_prefill(input: &ZetaPromptInput, format: ZetaFormat) -> String { + let (context, editable_range, _) = resolve_cursor_region(input, format); + get_prefill_for_format(format, context, &editable_range) +} + fn format_edit_history_within_budget( events: &[Arc], file_marker: &str, @@ -419,53 +534,98 @@ fn format_edit_history_within_budget( result } -fn format_related_files_within_budget( +fn excerpt_rendered_tokens(excerpt: &RelatedExcerpt, file_max_row: u32) -> usize { + let needs_newline = !excerpt.text.ends_with('\n'); + let needs_ellipsis = excerpt.row_range.end < file_max_row; + let len = excerpt.text.len() + + if needs_newline { "\n".len() } else { 0 } + + if needs_ellipsis { "...\n".len() } else { 0 }; + estimate_tokens(len) +} + +pub fn format_related_files_within_budget( related_files: &[RelatedFile], - file_marker: &str, + file_prefix: &str, + file_suffix: &str, max_tokens: usize, ) -> String { - let mut result = String::new(); - let mut total_tokens = 0; + struct ExcerptCandidate { + file_ix: usize, + excerpt_ix: usize, + order: usize, + } - for file in related_files { - let path_str = file.path.to_string_lossy(); - let header = format!("{}{}\n", file_marker, path_str); - let header_tokens = estimate_tokens(header.len()); + let mut excerpt_candidates: Vec = related_files + .iter() + .enumerate() + .flat_map(|(file_ix, file)| { + file.excerpts + .iter() + .enumerate() + .map(move |(excerpt_ix, e)| ExcerptCandidate { + file_ix, + excerpt_ix, + order: e.order, + }) + }) + .collect(); + + // Pre-compute file header strings and their token costs. + let file_headers: Vec = related_files + .iter() + .map(|file| { + let path_str = file.path.to_string_lossy(); + format!("{}{}\n", file_prefix, path_str) + }) + .collect(); - if total_tokens + header_tokens > max_tokens { + // Sort the excerpts by their order and determine how many fit within the budget. + let mut total_tokens = 0; + let mut included_excerpt_count = 0_usize; + let mut included_file_indices = vec![false; related_files.len()]; + excerpt_candidates.sort_by_key(|e| (e.order, e.file_ix, e.excerpt_ix)); + for candidate in &excerpt_candidates { + let file = &related_files[candidate.file_ix]; + let excerpt = &file.excerpts[candidate.excerpt_ix]; + let file_already_included = included_file_indices[candidate.file_ix]; + let header_cost = if file_already_included { + 0 + } else { + estimate_tokens(file_headers[candidate.file_ix].len() + file_suffix.len()) + }; + let excerpt_cost = excerpt_rendered_tokens(excerpt, file.max_row); + if total_tokens + header_cost + excerpt_cost > max_tokens { break; } + total_tokens += header_cost + excerpt_cost; + if !file_already_included { + included_file_indices[candidate.file_ix] = true; + } + included_excerpt_count += 1; + } - let mut file_tokens = header_tokens; - let mut excerpts_to_include = 0; + excerpt_candidates.truncate(included_excerpt_count); + excerpt_candidates.sort_unstable_by_key(|c| (c.file_ix, c.excerpt_ix)); - for excerpt in &file.excerpts { - let needs_newline = !excerpt.text.ends_with('\n'); - let needs_ellipsis = excerpt.row_range.end < file.max_row; - let excerpt_len = excerpt.text.len() - + if needs_newline { "\n".len() } else { 0 } - + if needs_ellipsis { "...\n".len() } else { 0 }; - - let excerpt_tokens = estimate_tokens(excerpt_len); - if total_tokens + file_tokens + excerpt_tokens > max_tokens { - break; + // Render all of the files that fit within the token budget, in the original order. + let mut result = String::new(); + let mut last_file_ix = None; + for candidate in &excerpt_candidates { + if last_file_ix != Some(candidate.file_ix) { + if last_file_ix.is_some() { + result.push_str(file_suffix); } - file_tokens += excerpt_tokens; - excerpts_to_include += 1; + result.push_str(&file_headers[candidate.file_ix]); + last_file_ix = Some(candidate.file_ix); } - - if excerpts_to_include > 0 { - total_tokens += file_tokens; - result.push_str(&header); - for excerpt in file.excerpts.iter().take(excerpts_to_include) { - result.push_str(&excerpt.text); - if !result.ends_with('\n') { - result.push('\n'); - } - if excerpt.row_range.end < file.max_row { - result.push_str("...\n"); - } - } + let file = &related_files[candidate.file_ix]; + let excerpt = &file.excerpts[candidate.excerpt_ix]; + result.push_str(&excerpt.text); + if !result.ends_with('\n') { + result.push('\n'); + } + if excerpt.row_range.end < file.max_row { + result.push_str("...\n"); } } @@ -499,6 +659,16 @@ pub fn write_related_files( mod v0112_middle_at_end { use super::*; + pub fn special_tokens() -> &'static [&'static str] { + &[ + "<|fim_prefix|>", + "<|fim_suffix|>", + "<|fim_middle|>", + "<|file_sep|>", + CURSOR_MARKER, + ] + } + pub fn write_cursor_excerpt_section( prompt: &mut String, path: &Path, @@ -533,6 +703,16 @@ mod v0112_middle_at_end { mod v0113_ordered { use super::*; + pub fn special_tokens() -> &'static [&'static str] { + &[ + "<|fim_prefix|>", + "<|fim_suffix|>", + "<|fim_middle|>", + "<|file_sep|>", + CURSOR_MARKER, + ] + } + pub fn write_cursor_excerpt_section( prompt: &mut String, path: &Path, @@ -567,6 +747,14 @@ mod v0113_ordered { } } +mod v0114180_editable_region { + use super::*; + + pub fn special_tokens() -> &'static [&'static str] { + v0113_ordered::special_tokens() + } +} + pub mod v0120_git_merge_markers { //! A prompt that uses git-style merge conflict markers to represent the editable region. //! @@ -665,86 +853,1497 @@ pub mod v0131_git_merge_markers_prefix { //! changes applied //! >>>>>>> UPDATED - use super::*; + use super::*; + + pub const START_MARKER: &str = "<<<<<<< CURRENT\n"; + pub const SEPARATOR: &str = "=======\n"; + pub const END_MARKER: &str = ">>>>>>> UPDATED\n"; + + pub fn special_tokens() -> &'static [&'static str] { + &[ + "<|fim_prefix|>", + "<|fim_suffix|>", + "<|fim_middle|>", + "<|file_sep|>", + START_MARKER, + SEPARATOR, + END_MARKER, + CURSOR_MARKER, + ] + } + + pub fn write_cursor_excerpt_section( + prompt: &mut String, + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, + ) { + let path_str = path.to_string_lossy(); + write!(prompt, "<|file_sep|>{}\n", path_str).ok(); + + prompt.push_str("<|fim_prefix|>"); + prompt.push_str(&context[..editable_range.start]); + prompt.push_str(START_MARKER); + prompt.push_str(&context[editable_range.start..cursor_offset]); + prompt.push_str(CURSOR_MARKER); + prompt.push_str(&context[cursor_offset..editable_range.end]); + if !prompt.ends_with('\n') { + prompt.push('\n'); + } + prompt.push_str(SEPARATOR); + + prompt.push_str("<|fim_suffix|>"); + prompt.push_str(&context[editable_range.end..]); + if !prompt.ends_with('\n') { + prompt.push('\n'); + } + + prompt.push_str("<|fim_middle|>"); + } +} + +pub mod v0211_prefill { + use super::*; + + pub fn special_tokens() -> &'static [&'static str] { + v0131_git_merge_markers_prefix::special_tokens() + } + + pub fn get_prefill(context: &str, editable_range: &Range) -> String { + let editable_region = &context[editable_range.start..editable_range.end]; + + let prefill_len = (editable_region.len() as f64 * PREFILL_RATIO) as usize; + let prefill_len = editable_region.floor_char_boundary(prefill_len); + + // Find a token boundary to avoid splitting tokens in the prefill. + // In Qwen2.5-Coder, \n is always the END of a token (e.g. `;\n`, + // ` {\n`), and \n\n / \n\n\n are single tokens, so we must include + // the \n and consume any consecutive \n characters after it. + let prefill = &editable_region[..prefill_len]; + match prefill.rfind('\n') { + Some(pos) => { + let mut end = pos + 1; + while end < editable_region.len() + && editable_region.as_bytes().get(end) == Some(&b'\n') + { + end += 1; + } + editable_region[..end].to_string() + } + // No newline found. Fall back to splitting before the last space + // (word-level boundary) + None => match prefill.rfind(' ') { + Some(pos) => prefill[..pos].to_string(), + None => prefill.to_string(), + }, + } + } +} + +pub mod hashline { + + use std::fmt::Display; + + pub const END_MARKER: &str = "<|fim_middle|>updated"; + pub const START_MARKER: &str = "<|fim_middle|>current"; + + use super::*; + + const SET_COMMAND_MARKER: &str = "<|set|>"; + const INSERT_COMMAND_MARKER: &str = "<|insert|>"; + + pub fn special_tokens() -> &'static [&'static str] { + return &[ + SET_COMMAND_MARKER, + "<|set_range|>", + INSERT_COMMAND_MARKER, + CURSOR_MARKER, + "<|file_sep|>", + "<|fim_prefix|>", + "<|fim_suffix|>", + "<|fim_middle|>", + ]; + } + + /// A parsed line reference like `3:c3` (line index 3 with hash 0xc3). + #[derive(Debug, Clone, PartialEq, Eq)] + struct LineRef { + index: usize, + hash: u8, + } + + impl Display for LineRef { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}:{:02x}", self.index, self.hash) + } + } + + pub fn hash_line(line: &[u8]) -> u8 { + let mut h: u8 = 0; + for &byte in line { + h = h.wrapping_add(byte); + } + return h; + } + + /// Write the hashline-encoded editable region into `out`. Each line of + /// `editable_text` is prefixed with `{line_index}:{hash}|` and the cursor + /// marker is inserted at `cursor_offset_in_editable` (byte offset relative + /// to the start of `editable_text`). + pub fn write_hashline_editable_region( + out: &mut String, + editable_text: &str, + cursor_offset_in_editable: usize, + ) { + let mut offset = 0; + for (i, line) in editable_text.lines().enumerate() { + let (head, cursor, tail) = if cursor_offset_in_editable > offset + && cursor_offset_in_editable < offset + line.len() + { + ( + &line[..cursor_offset_in_editable - offset], + CURSOR_MARKER, + &line[cursor_offset_in_editable - offset..], + ) + } else { + (line, "", "") + }; + write!( + out, + "\n{}|{head}{cursor}{tail}", + LineRef { + index: i, + hash: hash_line(line.as_bytes()) + } + ) + .unwrap(); + offset += line.len() + 1; + } + } + + pub fn write_cursor_excerpt_section( + prompt: &mut String, + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, + ) { + let path_str = path.to_string_lossy(); + write!(prompt, "<|file_sep|>{}\n", path_str).ok(); + + prompt.push_str("<|fim_prefix|>\n"); + prompt.push_str(&context[..editable_range.start]); + prompt.push_str(START_MARKER); + + let cursor_offset_in_editable = cursor_offset.saturating_sub(editable_range.start); + let editable_region = &context[editable_range.clone()]; + write_hashline_editable_region(prompt, editable_region, cursor_offset_in_editable); + + if !prompt.ends_with('\n') { + prompt.push('\n'); + } + + prompt.push_str("<|fim_suffix|>\n"); + prompt.push_str(&context[editable_range.end..]); + if !prompt.ends_with('\n') { + prompt.push('\n'); + } + + prompt.push_str(END_MARKER); + } + + /// A single edit command parsed from the model output. + #[derive(Debug)] + enum EditCommand<'a> { + /// Replace a range of lines (inclusive on both ends). Single-line set is + /// represented by `start == end`. + Set { + start: LineRef, + end: LineRef, + content: &'a str, + }, + /// Insert new lines after the given line, or before the first line if + /// `after` is `None`. + Insert { + after: Option, + content: &'a str, + }, + } + + /// Parse a line reference like `3:c3` into a `LineRef`. + fn parse_line_ref(s: &str) -> Option { + let (idx_str, hash_str) = s.split_once(':')?; + let index = idx_str.parse::().ok()?; + let hash = u8::from_str_radix(hash_str, 16).ok()?; + Some(LineRef { index, hash }) + } + + /// Parse the model output into a list of `EditCommand`s. + fn parse_edit_commands(model_output: &str) -> Vec> { + let mut commands = Vec::new(); + let mut offset = 0usize; + + while offset < model_output.len() { + let next_nl = model_output[offset..] + .find('\n') + .map(|i| offset + i) + .unwrap_or(model_output.len()); + let line = &model_output[offset..next_nl]; + let line_end = if next_nl < model_output.len() { + next_nl + 1 + } else { + next_nl + }; + + let trimmed = line.trim(); + let (is_set, specifier) = if let Some(spec) = trimmed.strip_prefix(SET_COMMAND_MARKER) { + (true, spec) + } else if let Some(spec) = trimmed.strip_prefix(INSERT_COMMAND_MARKER) { + (false, spec) + } else { + offset = line_end; + continue; + }; + + let mut content_end = line_end; + let mut scan = line_end; + + while scan < model_output.len() { + let body_nl = model_output[scan..] + .find('\n') + .map(|i| scan + i) + .unwrap_or(model_output.len()); + let body_line = &model_output[scan..body_nl]; + if body_line.trim().starts_with(SET_COMMAND_MARKER) + || body_line.trim().starts_with(INSERT_COMMAND_MARKER) + { + break; + } + scan = if body_nl < model_output.len() { + body_nl + 1 + } else { + body_nl + }; + content_end = scan; + } + + let content = &model_output[line_end..content_end]; + + if is_set { + if let Some((start_str, end_str)) = specifier.split_once('-') { + if let (Some(start), Some(end)) = + (parse_line_ref(start_str), parse_line_ref(end_str)) + { + commands.push(EditCommand::Set { + start, + end, + content, + }); + } + } else if let Some(target) = parse_line_ref(specifier) { + commands.push(EditCommand::Set { + start: target.clone(), + end: target, + content, + }); + } + } else { + let after = parse_line_ref(specifier); + commands.push(EditCommand::Insert { after, content }); + } + + offset = scan; + } + + commands + } + + /// Returns `true` if the model output contains `<|set|>` or `<|insert|>` commands + /// (as opposed to being a plain full-replacement output). + /// Strip the `{line_num}:{hash}|` prefixes from each line of a hashline-encoded + /// editable region, returning the plain text content. + pub fn strip_hashline_prefixes(region: &str) -> String { + let mut decoded: String = region + .lines() + .map(|line| line.find('|').map_or(line, |pos| &line[pos + 1..])) + .collect::>() + .join("\n"); + if region.ends_with('\n') { + decoded.push('\n'); + } + decoded + } + + pub fn output_has_edit_commands(model_output: &str) -> bool { + model_output.contains(SET_COMMAND_MARKER) || model_output.contains(INSERT_COMMAND_MARKER) + } + + /// Apply `<|set|>` and `<|insert|>` edit commands from the model output to the + /// original editable region text. + /// + /// `editable_region` is the original text of the editable region (without hash + /// prefixes). `model_output` is the raw model response containing edit commands. + /// + /// Returns the full replacement text for the editable region. + pub fn apply_edit_commands(editable_region: &str, model_output: &str) -> String { + let original_lines: Vec<&str> = editable_region.lines().collect(); + let old_hashes: Vec = original_lines + .iter() + .map(|line| hash_line(line.as_bytes())) + .collect(); + + let commands = parse_edit_commands(model_output); + + // For set operations: indexed by start line → Some((end line index, content)) + // For insert operations: indexed by line index → vec of content to insert after + // Insert-before-first is tracked separately. + let mut set_ops: Vec> = vec![None; original_lines.len()]; + let mut insert_before_first: Vec<&str> = Vec::new(); + let mut insert_after: Vec> = vec![Vec::new(); original_lines.len()]; + + for command in &commands { + match command { + EditCommand::Set { + start, + end, + content, + } => { + if start.index < old_hashes.len() + && end.index < old_hashes.len() + && start.index <= end.index + && old_hashes[start.index] == start.hash + && old_hashes[end.index] == end.hash + { + set_ops[start.index] = Some((end.index, *content)); + } + } + EditCommand::Insert { after, content } => match after { + None => insert_before_first.push(*content), + Some(line_ref) => { + if line_ref.index < old_hashes.len() + && old_hashes[line_ref.index] == line_ref.hash + { + insert_after[line_ref.index].push(*content); + } + } + }, + } + } + + let mut result = String::new(); + + // Emit any insertions before the first line + for content in &insert_before_first { + result.push_str(content); + if !content.ends_with('\n') { + result.push('\n'); + } + } + + let mut i = 0; + while i < original_lines.len() { + if let Some((end_index, replacement)) = set_ops[i].as_ref() { + // Replace lines i..=end_index with the replacement content + result.push_str(replacement); + if !replacement.is_empty() && !replacement.ends_with('\n') { + result.push('\n'); + } + // Emit any insertions after the end of this set range + if *end_index < insert_after.len() { + for content in &insert_after[*end_index] { + result.push_str(content); + if !content.ends_with('\n') { + result.push('\n'); + } + } + } + i = end_index + 1; + } else { + // Keep the original line + result.push_str(original_lines[i]); + result.push('\n'); + // Emit any insertions after this line + for content in &insert_after[i] { + result.push_str(content); + if !content.ends_with('\n') { + result.push('\n'); + } + } + i += 1; + } + } + + // Preserve trailing newline behavior: if the original ended with a + // newline the result already has one; if it didn't, trim the extra one + // we added. + if !editable_region.ends_with('\n') && result.ends_with('\n') { + result.pop(); + } + + result + } + + /// Convert a unified diff patch into hashline edit commands. + /// + /// Parses the unified diff `patch` directly to determine which lines of + /// `old_text` are deleted/replaced and what new lines are added, then emits + /// `<|set|>` and `<|insert|>` edit commands referencing old lines by their + /// `{index}:{hash}` identifiers. + /// + /// `cursor_offset` is an optional byte offset into the first hunk's new + /// text (context + additions) where the cursor marker should be placed. + pub fn patch_to_edit_commands( + old_text: &str, + patch: &str, + cursor_offset: Option, + ) -> Result { + let old_lines: Vec<&str> = old_text.lines().collect(); + let old_hashes: Vec = old_lines + .iter() + .map(|line| hash_line(line.as_bytes())) + .collect(); + + let mut result = String::new(); + let mut first_hunk = true; + + struct Hunk<'a> { + line_range: Range, + new_text_lines: Vec<&'a str>, + cursor_line_offset_in_new_text: Option<(usize, usize)>, + } + + // Parse the patch line by line. We only care about hunk headers, + // context, deletions, and additions. + let mut old_line_index: usize = 0; + let mut current_hunk: Option = None; + // Byte offset tracking within the hunk's new text for cursor placement. + let mut new_text_byte_offset: usize = 0; + // The line index of the last old line seen before/in the current hunk + // (used for insert-after reference). + let mut last_old_line_before_hunk: Option = None; + + fn flush_hunk( + hunk: Hunk, + last_old_line: Option, + result: &mut String, + old_hashes: &[u8], + ) { + if hunk.line_range.is_empty() { + // Pure insertion — reference the old line to insert after when in bounds. + if let Some(after) = last_old_line + && let Some(&hash) = old_hashes.get(after) + { + write!( + result, + "{INSERT_COMMAND_MARKER}{}\n", + LineRef { index: after, hash } + ) + .unwrap(); + } else { + result.push_str(INSERT_COMMAND_MARKER); + result.push('\n'); + } + } else { + let start = hunk.line_range.start; + let end_exclusive = hunk.line_range.end; + let deleted_line_count = end_exclusive.saturating_sub(start); + + if deleted_line_count == 1 { + if let Some(&hash) = old_hashes.get(start) { + write!( + result, + "{SET_COMMAND_MARKER}{}\n", + LineRef { index: start, hash } + ) + .unwrap(); + } else { + result.push_str(SET_COMMAND_MARKER); + result.push('\n'); + } + } else { + let end_inclusive = end_exclusive - 1; + match ( + old_hashes.get(start).copied(), + old_hashes.get(end_inclusive).copied(), + ) { + (Some(start_hash), Some(end_hash)) => { + write!( + result, + "{SET_COMMAND_MARKER}{}-{}\n", + LineRef { + index: start, + hash: start_hash + }, + LineRef { + index: end_inclusive, + hash: end_hash + } + ) + .unwrap(); + } + _ => { + result.push_str(SET_COMMAND_MARKER); + result.push('\n'); + } + } + } + } + for (line_offset, line) in hunk.new_text_lines.iter().enumerate() { + if let Some((cursor_line_offset, char_offset)) = hunk.cursor_line_offset_in_new_text + && line_offset == cursor_line_offset + { + result.push_str(&line[..char_offset]); + result.push_str(CURSOR_MARKER); + result.push_str(&line[char_offset..]); + continue; + } + + result.push_str(line); + } + } + + for raw_line in patch.split_inclusive('\n') { + if raw_line.starts_with("@@") { + // Flush any pending change hunk from a previous patch hunk. + if let Some(hunk) = current_hunk.take() { + flush_hunk(hunk, last_old_line_before_hunk, &mut result, &old_hashes); + } + + // Parse hunk header: @@ -old_start[,old_count] +new_start[,new_count] @@ + // We intentionally do not trust old_start as a direct local index into `old_text`, + // because some patches are produced against a larger file region and carry + // non-local line numbers. We keep indexing local by advancing from parsed patch lines. + if first_hunk { + new_text_byte_offset = 0; + first_hunk = false; + } + continue; + } + + if raw_line.starts_with("---") || raw_line.starts_with("+++") { + continue; + } + if raw_line.starts_with("\\ No newline") { + continue; + } - pub const START_MARKER: &str = "<<<<<<< CURRENT\n"; - pub const SEPARATOR: &str = "=======\n"; - pub const END_MARKER: &str = ">>>>>>> UPDATED\n"; + if raw_line.starts_with('-') { + // Extend or start a change hunk with this deleted old line. + match &mut current_hunk { + Some(Hunk { + line_range: range, .. + }) => range.end = old_line_index + 1, + None => { + current_hunk = Some(Hunk { + line_range: old_line_index..old_line_index + 1, + new_text_lines: Vec::new(), + cursor_line_offset_in_new_text: None, + }); + } + } + old_line_index += 1; + } else if let Some(added_content) = raw_line.strip_prefix('+') { + // Place cursor marker if cursor_offset falls within this line. + let mut cursor_line_offset = None; + if let Some(cursor_off) = cursor_offset + && (first_hunk + || cursor_off >= new_text_byte_offset + && cursor_off <= new_text_byte_offset + added_content.len()) + { + let line_offset = added_content.floor_char_boundary( + cursor_off + .saturating_sub(new_text_byte_offset) + .min(added_content.len()), + ); + cursor_line_offset = Some(line_offset); + } - pub fn special_tokens() -> &'static [&'static str] { - &[ - "<|fim_prefix|>", - "<|fim_suffix|>", - "<|fim_middle|>", - "<|file_sep|>", - START_MARKER, - SEPARATOR, - END_MARKER, - CURSOR_MARKER, - ] + new_text_byte_offset += added_content.len(); + + let hunk = current_hunk.get_or_insert(Hunk { + line_range: old_line_index..old_line_index, + new_text_lines: vec![], + cursor_line_offset_in_new_text: None, + }); + hunk.new_text_lines.push(added_content); + hunk.cursor_line_offset_in_new_text = cursor_line_offset + .map(|offset_in_line| (hunk.new_text_lines.len() - 1, offset_in_line)); + } else { + // Context line (starts with ' ' or is empty). + if let Some(hunk) = current_hunk.take() { + flush_hunk(hunk, last_old_line_before_hunk, &mut result, &old_hashes); + } + last_old_line_before_hunk = Some(old_line_index); + old_line_index += 1; + let content = raw_line.strip_prefix(' ').unwrap_or(raw_line); + new_text_byte_offset += content.len(); + } + } + + // Flush final group. + if let Some(hunk) = current_hunk.take() { + flush_hunk(hunk, last_old_line_before_hunk, &mut result, &old_hashes); + } + + // Trim a single trailing newline. + if result.ends_with('\n') { + result.pop(); + } + + Ok(result) } - pub fn write_cursor_excerpt_section( - prompt: &mut String, - path: &Path, - context: &str, - editable_range: &Range, - cursor_offset: usize, - ) { - let path_str = path.to_string_lossy(); - write!(prompt, "<|file_sep|>{}\n", path_str).ok(); + #[cfg(test)] + mod tests { + use super::*; + use indoc::indoc; + + #[test] + fn test_format_cursor_region() { + struct Case { + name: &'static str, + context: &'static str, + editable_range: Range, + cursor_offset: usize, + expected: &'static str, + } - prompt.push_str("<|fim_prefix|>"); - prompt.push_str(&context[..editable_range.start]); - prompt.push_str(START_MARKER); - prompt.push_str(&context[editable_range.start..cursor_offset]); - prompt.push_str(CURSOR_MARKER); - prompt.push_str(&context[cursor_offset..editable_range.end]); - if !prompt.ends_with('\n') { - prompt.push('\n'); + let cases = [ + Case { + name: "basic_cursor_placement", + context: "hello world\n", + editable_range: 0..12, + cursor_offset: 5, + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:5c|hello<|user_cursor|> world + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "multiline_cursor_on_second_line", + context: "aaa\nbbb\nccc\n", + editable_range: 0..12, + cursor_offset: 5, // byte 5 → 1 byte into "bbb" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:23|aaa + 1:26|b<|user_cursor|>bb + 2:29|ccc + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "no_trailing_newline_in_context", + context: "line1\nline2", + editable_range: 0..11, + cursor_offset: 3, + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:d9|lin<|user_cursor|>e1 + 1:da|line2 + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "leading_newline_in_editable_region", + context: "\nabc\n", + editable_range: 0..5, + cursor_offset: 2, // byte 2 = 'a' in "abc" (after leading \n) + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:00| + 1:26|a<|user_cursor|>bc + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "with_suffix", + context: "abc\ndef", + editable_range: 0..4, // editable region = "abc\n", suffix = "def" + cursor_offset: 2, + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:26|ab<|user_cursor|>c + <|fim_suffix|> + def + <|fim_middle|>updated"}, + }, + Case { + name: "unicode_two_byte_chars", + context: "héllo\n", + editable_range: 0..7, + cursor_offset: 3, // byte 3 = after "hé" (h=1 byte, é=2 bytes), before "llo" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:1b|hé<|user_cursor|>llo + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "unicode_three_byte_chars", + context: "日本語\n", + editable_range: 0..10, + cursor_offset: 6, // byte 6 = after "日本" (3+3 bytes), before "語" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:80|日本<|user_cursor|>語 + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "unicode_four_byte_chars", + context: "a🌍b\n", + editable_range: 0..7, + cursor_offset: 5, // byte 5 = after "a🌍" (1+4 bytes), before "b" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:6b|a🌍<|user_cursor|>b + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "cursor_at_start_of_region_not_placed", + context: "abc\n", + editable_range: 0..4, + cursor_offset: 0, // cursor_offset(0) > offset(0) is false → cursor not placed + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:26|abc + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "cursor_at_end_of_line_not_placed", + context: "abc\ndef\n", + editable_range: 0..8, + cursor_offset: 3, // byte 3 = the \n after "abc" → falls between lines, not placed + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:26|abc + 1:2f|def + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "cursor_offset_relative_to_context_not_editable_region", + // cursor_offset is relative to `context`, so when editable_range.start > 0, + // write_cursor_excerpt_section must subtract it before comparing against + // per-line offsets within the editable region. + context: "pre\naaa\nbbb\nsuf\n", + editable_range: 4..12, // editable region = "aaa\nbbb\n" + cursor_offset: 9, // byte 9 in context = second 'b' in "bbb" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + pre + <|fim_middle|>current + 0:23|aaa + 1:26|b<|user_cursor|>bb + <|fim_suffix|> + suf + <|fim_middle|>updated"}, + }, + ]; + + for case in &cases { + let mut prompt = String::new(); + hashline::write_cursor_excerpt_section( + &mut prompt, + Path::new("test.rs"), + case.context, + &case.editable_range, + case.cursor_offset, + ); + assert_eq!(prompt, case.expected, "failed case: {}", case.name); + } } - prompt.push_str(SEPARATOR); - prompt.push_str("<|fim_suffix|>"); - prompt.push_str(&context[editable_range.end..]); - if !prompt.ends_with('\n') { - prompt.push('\n'); + #[test] + fn test_apply_edit_commands() { + struct Case { + name: &'static str, + original: &'static str, + model_output: &'static str, + expected: &'static str, + } + + let cases = vec![ + Case { + name: "set_single_line", + original: indoc! {" + let mut total = 0; + for product in products { + total += ; + } + total + "}, + model_output: indoc! {" + <|set|>2:87 + total += product.price; + "}, + expected: indoc! {" + let mut total = 0; + for product in products { + total += product.price; + } + total + "}, + }, + Case { + name: "set_range", + original: indoc! {" + fn foo() { + let x = 1; + let y = 2; + let z = 3; + } + "}, + model_output: indoc! {" + <|set|>1:46-3:4a + let sum = 6; + "}, + expected: indoc! {" + fn foo() { + let sum = 6; + } + "}, + }, + Case { + name: "insert_after_line", + original: indoc! {" + fn main() { + let x = 1; + } + "}, + model_output: indoc! {" + <|insert|>1:46 + let y = 2; + "}, + expected: indoc! {" + fn main() { + let x = 1; + let y = 2; + } + "}, + }, + Case { + name: "insert_before_first", + original: indoc! {" + let x = 1; + let y = 2; + "}, + model_output: indoc! {" + <|insert|> + use std::io; + "}, + expected: indoc! {" + use std::io; + let x = 1; + let y = 2; + "}, + }, + Case { + name: "set_with_cursor_marker", + original: indoc! {" + fn main() { + println!(); + } + "}, + model_output: indoc! {" + <|set|>1:34 + eprintln!(\"<|user_cursor|>\"); + "}, + expected: indoc! {" + fn main() { + eprintln!(\"<|user_cursor|>\"); + } + "}, + }, + Case { + name: "multiple_set_commands", + original: indoc! {" + aaa + bbb + ccc + ddd + "}, + model_output: indoc! {" + <|set|>0:23 + AAA + <|set|>2:29 + CCC + "}, + expected: indoc! {" + AAA + bbb + CCC + ddd + "}, + }, + Case { + name: "set_range_multiline_replacement", + original: indoc! {" + fn handle_submit() { + } + + fn handle_keystroke() { + "}, + model_output: indoc! {" + <|set|>0:3f-1:7d + fn handle_submit(modal_state: &mut ModalState) { + <|user_cursor|> + } + "}, + expected: indoc! {" + fn handle_submit(modal_state: &mut ModalState) { + <|user_cursor|> + } + + fn handle_keystroke() { + "}, + }, + Case { + name: "no_edit_commands_returns_original", + original: indoc! {" + hello + world + "}, + model_output: "some random text with no commands", + expected: indoc! {" + hello + world + "}, + }, + Case { + name: "wrong_hash_set_ignored", + original: indoc! {" + aaa + bbb + "}, + model_output: indoc! {" + <|set|>0:ff + ZZZ + "}, + expected: indoc! {" + aaa + bbb + "}, + }, + Case { + name: "insert_and_set_combined", + original: indoc! {" + alpha + beta + gamma + "}, + model_output: indoc! {" + <|set|>0:06 + ALPHA + <|insert|>1:9c + beta_extra + "}, + expected: indoc! {" + ALPHA + beta + beta_extra + gamma + "}, + }, + Case { + name: "no_trailing_newline_preserved", + original: "hello\nworld", + model_output: indoc! {" + <|set|>0:14 + HELLO + "}, + expected: "HELLO\nworld", + }, + Case { + name: "set_range_hash_mismatch_in_end_bound", + original: indoc! {" + one + two + three + "}, + model_output: indoc! {" + <|set|>0:42-2:ff + ONE_TWO_THREE + "}, + expected: indoc! {" + one + two + three + "}, + }, + Case { + name: "set_range_start_greater_than_end_ignored", + original: indoc! {" + a + b + c + "}, + model_output: indoc! {" + <|set|>2:63-1:62 + X + "}, + expected: indoc! {" + a + b + c + "}, + }, + Case { + name: "insert_out_of_bounds_ignored", + original: indoc! {" + x + y + "}, + model_output: indoc! {" + <|insert|>99:aa + z + "}, + expected: indoc! {" + x + y + "}, + }, + Case { + name: "set_out_of_bounds_ignored", + original: indoc! {" + x + y + "}, + model_output: indoc! {" + <|set|>99:aa + z + "}, + expected: indoc! {" + x + y + "}, + }, + Case { + name: "malformed_set_command_ignored", + original: indoc! {" + alpha + beta + "}, + model_output: indoc! {" + <|set|>not-a-line-ref + UPDATED + "}, + expected: indoc! {" + alpha + beta + "}, + }, + Case { + name: "malformed_insert_hash_treated_as_before_first", + original: indoc! {" + alpha + beta + "}, + model_output: indoc! {" + <|insert|>1:nothex + preamble + "}, + expected: indoc! {" + preamble + alpha + beta + "}, + }, + Case { + name: "set_then_insert_same_target_orders_insert_after_replacement", + original: indoc! {" + cat + dog + "}, + model_output: indoc! {" + <|set|>0:38 + CAT + <|insert|>0:38 + TAIL + "}, + expected: indoc! {" + CAT + TAIL + dog + "}, + }, + Case { + name: "overlapping_set_ranges_last_wins", + original: indoc! {" + a + b + c + d + "}, + model_output: indoc! {" + <|set|>0:61-2:63 + FIRST + <|set|>1:62-3:64 + SECOND + "}, + expected: indoc! {" + FIRST + d + "}, + }, + Case { + name: "insert_before_first_and_after_line", + original: indoc! {" + a + b + "}, + model_output: indoc! {" + <|insert|> + HEAD + <|insert|>0:61 + MID + "}, + expected: indoc! {" + HEAD + a + MID + b + "}, + }, + ]; + + for case in &cases { + let result = hashline::apply_edit_commands(case.original, &case.model_output); + assert_eq!(result, case.expected, "failed case: {}", case.name); + } } - prompt.push_str("<|fim_middle|>"); - } -} + #[test] + fn test_output_has_edit_commands() { + assert!(hashline::output_has_edit_commands(&format!( + "{}0:ab\nnew", + SET_COMMAND_MARKER + ))); + assert!(hashline::output_has_edit_commands(&format!( + "{}0:ab\nnew", + INSERT_COMMAND_MARKER + ))); + assert!(hashline::output_has_edit_commands(&format!( + "some text\n{}1:cd\nstuff", + SET_COMMAND_MARKER + ))); + assert!(!hashline::output_has_edit_commands("just plain text")); + assert!(!hashline::output_has_edit_commands("NO_EDITS")); + } -pub mod v0211_prefill { - use super::*; + // ---- hashline::patch_to_edit_commands round-trip tests ---- - pub fn get_prefill(context: &str, editable_range: &Range) -> String { - let editable_region = &context[editable_range.start..editable_range.end]; + #[test] + fn test_patch_to_edit_commands() { + struct Case { + name: &'static str, + old: &'static str, + patch: &'static str, + expected_new: &'static str, + } - let prefill_len = (editable_region.len() as f64 * PREFILL_RATIO) as usize; - let prefill_len = editable_region.floor_char_boundary(prefill_len); + let cases = [ + Case { + name: "single_line_replacement", + old: indoc! {" + let mut total = 0; + for product in products { + total += ; + } + total + "}, + patch: indoc! {" + @@ -1,5 +1,5 @@ + let mut total = 0; + for product in products { + - total += ; + + total += product.price; + } + total + "}, + expected_new: indoc! {" + let mut total = 0; + for product in products { + total += product.price; + } + total + "}, + }, + Case { + name: "multiline_replacement", + old: indoc! {" + fn foo() { + let x = 1; + let y = 2; + let z = 3; + } + "}, + patch: indoc! {" + @@ -1,5 +1,3 @@ + fn foo() { + - let x = 1; + - let y = 2; + - let z = 3; + + let sum = 1 + 2 + 3; + } + "}, + expected_new: indoc! {" + fn foo() { + let sum = 1 + 2 + 3; + } + "}, + }, + Case { + name: "insertion", + old: indoc! {" + fn main() { + let x = 1; + } + "}, + patch: indoc! {" + @@ -1,3 +1,4 @@ + fn main() { + let x = 1; + + let y = 2; + } + "}, + expected_new: indoc! {" + fn main() { + let x = 1; + let y = 2; + } + "}, + }, + Case { + name: "insertion_before_first", + old: indoc! {" + let x = 1; + let y = 2; + "}, + patch: indoc! {" + @@ -1,2 +1,3 @@ + +use std::io; + let x = 1; + let y = 2; + "}, + expected_new: indoc! {" + use std::io; + let x = 1; + let y = 2; + "}, + }, + Case { + name: "deletion", + old: indoc! {" + aaa + bbb + ccc + ddd + "}, + patch: indoc! {" + @@ -1,4 +1,2 @@ + aaa + -bbb + -ccc + ddd + "}, + expected_new: indoc! {" + aaa + ddd + "}, + }, + Case { + name: "multiple_changes", + old: indoc! {" + alpha + beta + gamma + delta + epsilon + "}, + patch: indoc! {" + @@ -1,5 +1,5 @@ + -alpha + +ALPHA + beta + gamma + -delta + +DELTA + epsilon + "}, + expected_new: indoc! {" + ALPHA + beta + gamma + DELTA + epsilon + "}, + }, + Case { + name: "replace_with_insertion", + old: indoc! {r#" + fn handle() { + modal_state.close(); + modal_state.dismiss(); + "#}, + patch: indoc! {r#" + @@ -1,3 +1,4 @@ + fn handle() { + modal_state.close(); + + eprintln!(""); + modal_state.dismiss(); + "#}, + expected_new: indoc! {r#" + fn handle() { + modal_state.close(); + eprintln!(""); + modal_state.dismiss(); + "#}, + }, + Case { + name: "complete_replacement", + old: indoc! {" + aaa + bbb + ccc + "}, + patch: indoc! {" + @@ -1,3 +1,3 @@ + -aaa + -bbb + -ccc + +xxx + +yyy + +zzz + "}, + expected_new: indoc! {" + xxx + yyy + zzz + "}, + }, + Case { + name: "add_function_body", + old: indoc! {" + fn foo() { + modal_state.dismiss(); + } - // Find a token boundary to avoid splitting tokens in the prefill. - // In Qwen2.5-Coder, \n is always the END of a token (e.g. `;\n`, - // ` {\n`), and \n\n / \n\n\n are single tokens, so we must include - // the \n and consume any consecutive \n characters after it. - let prefill = &editable_region[..prefill_len]; - match prefill.rfind('\n') { - Some(pos) => { - let mut end = pos + 1; - while end < editable_region.len() - && editable_region.as_bytes().get(end) == Some(&b'\n') - { - end += 1; - } - editable_region[..end].to_string() + fn + + fn handle_keystroke() { + "}, + patch: indoc! {" + @@ -1,6 +1,8 @@ + fn foo() { + modal_state.dismiss(); + } + + -fn + +fn handle_submit() { + + todo() + +} + + fn handle_keystroke() { + "}, + expected_new: indoc! {" + fn foo() { + modal_state.dismiss(); + } + + fn handle_submit() { + todo() + } + + fn handle_keystroke() { + "}, + }, + Case { + name: "with_cursor_offset", + old: indoc! {r#" + fn main() { + println!(); + } + "#}, + patch: indoc! {r#" + @@ -1,3 +1,3 @@ + fn main() { + - println!(); + + eprintln!(""); + } + "#}, + expected_new: indoc! {r#" + fn main() { + eprintln!("<|user_cursor|>"); + } + "#}, + }, + Case { + name: "non_local_hunk_header_pure_insertion_repro", + old: indoc! {" + aaa + bbb + "}, + patch: indoc! {" + @@ -20,2 +20,3 @@ + aaa + +xxx + bbb + "}, + expected_new: indoc! {" + aaa + xxx + bbb + "}, + }, + ]; + + for case in &cases { + // The cursor_offset for patch_to_edit_commands is relative to + // the first hunk's new text (context + additions). We compute + // it by finding where the marker sits in the expected output + // (which mirrors the new text of the hunk). + let cursor_offset = case.expected_new.find(CURSOR_MARKER); + + let commands = + hashline::patch_to_edit_commands(case.old, case.patch, cursor_offset) + .unwrap_or_else(|e| panic!("failed case {}: {e}", case.name)); + + assert!( + hashline::output_has_edit_commands(&commands), + "case {}: expected edit commands, got: {commands:?}", + case.name, + ); + + let applied = hashline::apply_edit_commands(case.old, &commands); + assert_eq!(applied, case.expected_new, "case {}", case.name); } - // No newline found. Fall back to splitting before the last space - // (word-level boundary) - None => match prefill.rfind(' ') { - Some(pos) => prefill[..pos].to_string(), - None => prefill.to_string(), - }, } } } @@ -800,6 +2399,8 @@ pub mod seed_coder { pub const SEPARATOR: &str = "=======\n"; pub const END_MARKER: &str = ">>>>>>> UPDATED\n"; + pub const NO_EDITS: &str = "NO_EDITS\n"; + pub fn special_tokens() -> &'static [&'static str] { &[ FIM_SUFFIX, @@ -813,6 +2414,17 @@ pub mod seed_coder { ] } + pub fn write_cursor_excerpt_section( + prompt: &mut String, + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, + ) { + let section = build_cursor_prefix_section(path, context, editable_range, cursor_offset); + prompt.push_str(§ion); + } + pub fn format_prompt_with_budget( path: &Path, context: &str, @@ -842,6 +2454,7 @@ pub mod seed_coder { let related_files_section = super::format_related_files_within_budget( related_files, FILE_MARKER, + "", budget_after_edit_history, ); @@ -892,6 +2505,17 @@ pub mod seed_coder { section.push_str(SEPARATOR); section } + + /// Format patch as containing no changes if it's empty; otherwise return None. + pub(crate) fn no_edits(patch: &str) -> Option { + // Count lines in the patch + let empty_patch = patch.lines().count() <= 3; + if empty_patch { + Some(format!("{NO_EDITS}{END_MARKER}")) + } else { + None + } + } } /// The zeta1 prompt format @@ -1104,18 +2728,27 @@ mod tests { events: Vec, related_files: Vec, ) -> ZetaPromptInput { + let context_range = 0..cursor_excerpt.len(); ZetaPromptInput { cursor_path: Path::new("test.rs").into(), cursor_excerpt: cursor_excerpt.into(), - editable_range_in_excerpt: editable_range, cursor_offset_in_excerpt: cursor_offset, excerpt_start_row: None, events: events.into_iter().map(Arc::new).collect(), related_files, - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: ExcerptRanges { + editable_150: editable_range.clone(), + editable_180: editable_range.clone(), + editable_350: editable_range, + editable_150_context_350: context_range.clone(), + editable_180_context_350: context_range.clone(), + editable_350_context_150: context_range, + ..Default::default() + }, + experiment: None, in_open_source_repo: false, can_collect_data: false, + repo_url: None, } } @@ -1136,13 +2769,14 @@ mod tests { excerpts: vec![RelatedExcerpt { row_range: 0..content.lines().count() as u32, text: content.into(), + order: 0, }], in_open_source_repo: false, } } fn format_with_budget(input: &ZetaPromptInput, max_tokens: usize) -> String { - format_zeta_prompt_with_budget(input, ZetaFormat::V0114180EditableRegion, max_tokens) + format_prompt_with_budget_for_format(input, ZetaFormat::V0114180EditableRegion, max_tokens) } #[test] @@ -1244,14 +2878,17 @@ mod tests { RelatedExcerpt { row_range: 0..10, text: "first excerpt\n".into(), + order: 0, }, RelatedExcerpt { row_range: 10..20, text: "second excerpt\n".into(), + order: 0, }, RelatedExcerpt { row_range: 20..30, text: "third excerpt\n".into(), + order: 0, }, ], }], @@ -1291,6 +2928,149 @@ mod tests { ); } + #[test] + fn test_truncation_prioritizes_lower_order_excerpts() { + // Two files: file_a has a high-order excerpt, file_b has a low-order one. + // With tight budget, only the lower-order excerpt from file_b should be included. + let input = make_input( + "x", + 0..1, + 0, + vec![], + vec![ + RelatedFile { + path: Path::new("file_a.rs").into(), + max_row: 10, + in_open_source_repo: false, + excerpts: vec![RelatedExcerpt { + row_range: 0..10, + text: "low priority content\n".into(), + order: 5, + }], + }, + RelatedFile { + path: Path::new("file_b.rs").into(), + max_row: 10, + in_open_source_repo: false, + excerpts: vec![RelatedExcerpt { + row_range: 0..10, + text: "high priority content\n".into(), + order: 1, + }], + }, + ], + ); + + // With large budget, both files included; rendered in stable lexicographic order. + assert_eq!( + format_with_budget(&input, 10000), + indoc! {r#" + <|file_sep|>file_a.rs + low priority content + <|file_sep|>file_b.rs + high priority content + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + <|user_cursor|>x + <|fim_suffix|> + <|fim_middle|>updated + "#} + ); + + // With tight budget, only file_b (lower order) fits. + // Cursor section is ~37 tokens, so budget 52 leaves ~15 for related files. + // file_b header (7) + excerpt (7) = 14 tokens, which fits. + // file_a would need another 14 tokens, which doesn't fit. + assert_eq!( + format_with_budget(&input, 52), + indoc! {r#" + <|file_sep|>file_b.rs + high priority content + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + <|user_cursor|>x + <|fim_suffix|> + <|fim_middle|>updated + "#} + ); + } + + #[test] + fn test_truncation_drops_high_order_excerpts_within_file() { + // A single file has excerpts at order 1 and order 3. With a tight budget, + // only the order-1 excerpts are included while the order-3 excerpt is + // dropped — even though they belong to the same file. This also preserves + // the parent invariant: parent outline items have order ≤ their best + // child, so they're always included when any child is. + let input = make_input( + "x", + 0..1, + 0, + vec![], + vec![RelatedFile { + path: Path::new("mod.rs").into(), + max_row: 30, + in_open_source_repo: false, + excerpts: vec![ + RelatedExcerpt { + row_range: 0..5, + text: "mod header\n".into(), + order: 1, + }, + RelatedExcerpt { + row_range: 5..15, + text: "important fn\n".into(), + order: 1, + }, + RelatedExcerpt { + row_range: 15..30, + text: "less important fn\n".into(), + order: 3, + }, + ], + }], + ); + + // With large budget, all three excerpts included. + assert_eq!( + format_with_budget(&input, 10000), + indoc! {r#" + <|file_sep|>mod.rs + mod header + ... + important fn + ... + less important fn + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + <|user_cursor|>x + <|fim_suffix|> + <|fim_middle|>updated + "#} + ); + + // With tight budget, only order<=1 excerpts included (header + important fn). + assert_eq!( + format_with_budget(&input, 55), + indoc! {r#" + <|file_sep|>mod.rs + mod header + ... + important fn + ... + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + <|user_cursor|>x + <|fim_suffix|> + <|fim_middle|>updated + "#} + ); + } + #[test] fn test_truncation_drops_older_events_first() { let input = make_input( @@ -1361,11 +3141,11 @@ mod tests { } fn format_seed_coder(input: &ZetaPromptInput) -> String { - format_zeta_prompt_with_budget(input, ZetaFormat::V0211SeedCoder, 10000) + format_prompt_with_budget_for_format(input, ZetaFormat::V0211SeedCoder, 10000) } fn format_seed_coder_with_budget(input: &ZetaPromptInput, max_tokens: usize) -> String { - format_zeta_prompt_with_budget(input, ZetaFormat::V0211SeedCoder, max_tokens) + format_prompt_with_budget_for_format(input, ZetaFormat::V0211SeedCoder, max_tokens) } #[test] @@ -1463,6 +3243,72 @@ mod tests { ); } + #[test] + fn test_seed_coder_truncation_prioritizes_lower_order() { + let input = make_input( + "code", + 0..4, + 2, + vec![], + vec![ + RelatedFile { + path: Path::new("low_prio.rs").into(), + max_row: 5, + in_open_source_repo: false, + excerpts: vec![RelatedExcerpt { + row_range: 0..5, + text: "low prio\n".into(), + order: 10, + }], + }, + RelatedFile { + path: Path::new("high_prio.rs").into(), + max_row: 5, + in_open_source_repo: false, + excerpts: vec![RelatedExcerpt { + row_range: 0..5, + text: "high prio\n".into(), + order: 1, + }], + }, + ], + ); + + // With large budget, both included; rendered in stable lexicographic order. + assert_eq!( + format_seed_coder(&input), + indoc! {r#" + <[fim-suffix]> + <[fim-prefix]>low_prio.rs + low prio + high_prio.rs + high prio + + test.rs + <<<<<<< CURRENT + co<|user_cursor|>de + ======= + <[fim-middle]>"#} + ); + + // With tight budget, only high_prio included. + // Cursor sections cost 25 tokens, so budget 44 leaves 19 for related files. + // high_prio header (7) + excerpt (3) = 10, fits. low_prio would add 10 more = 20 > 19. + assert_eq!( + format_seed_coder_with_budget(&input, 44), + indoc! {r#" + <[fim-suffix]> + <[fim-prefix]>high_prio.rs + high prio + + test.rs + <<<<<<< CURRENT + co<|user_cursor|>de + ======= + <[fim-middle]>"#} + ); + } + #[test] fn test_seed_coder_clean_output() { let output_with_marker = "new code\n>>>>>>> UPDATED\n"; @@ -1484,15 +3330,23 @@ mod tests { let input = ZetaPromptInput { cursor_path: Path::new("src/main.rs").into(), cursor_excerpt: excerpt.into(), - editable_range_in_excerpt: 15..41, cursor_offset_in_excerpt: 30, excerpt_start_row: Some(0), events: vec![Arc::new(make_event("other.rs", "-old\n+new\n"))], related_files: vec![], - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: ExcerptRanges { + editable_150: 15..41, + editable_180: 15..41, + editable_350: 15..41, + editable_150_context_350: 0..excerpt.len(), + editable_180_context_350: 0..excerpt.len(), + editable_350_context_150: 0..excerpt.len(), + ..Default::default() + }, + experiment: None, in_open_source_repo: false, can_collect_data: false, + repo_url: None, }; let prompt = zeta1::format_zeta1_from_input(&input, 15..41, 0..excerpt.len()); @@ -1539,15 +3393,23 @@ mod tests { let input = ZetaPromptInput { cursor_path: Path::new("src/main.rs").into(), cursor_excerpt: excerpt.into(), - editable_range_in_excerpt: 0..28, cursor_offset_in_excerpt: 15, excerpt_start_row: Some(10), events: vec![], related_files: vec![], - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: ExcerptRanges { + editable_150: 0..28, + editable_180: 0..28, + editable_350: 0..28, + editable_150_context_350: 0..28, + editable_180_context_350: 0..28, + editable_350_context_150: 0..28, + ..Default::default() + }, + experiment: None, in_open_source_repo: false, can_collect_data: false, + repo_url: None, }; let prompt = zeta1::format_zeta1_from_input(&input, 0..28, 0..28); @@ -1589,15 +3451,23 @@ mod tests { let input = ZetaPromptInput { cursor_path: Path::new("test.rs").into(), cursor_excerpt: excerpt.into(), - editable_range_in_excerpt: editable_range.clone(), cursor_offset_in_excerpt: 25, excerpt_start_row: Some(0), events: vec![], related_files: vec![], - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: ExcerptRanges { + editable_150: editable_range.clone(), + editable_180: editable_range.clone(), + editable_350: editable_range.clone(), + editable_150_context_350: context_range.clone(), + editable_180_context_350: context_range.clone(), + editable_350_context_150: context_range.clone(), + ..Default::default() + }, + experiment: None, in_open_source_repo: false, can_collect_data: false, + repo_url: None, }; let prompt = zeta1::format_zeta1_from_input(&input, editable_range, context_range); diff --git a/crates/zlog/src/filter.rs b/crates/zlog/src/filter.rs index c6e51fa40340b4aad7efb017c961ce8891ab776e..710ddf761eb6eb1d0c164522903a9525d12de2a4 100644 --- a/crates/zlog/src/filter.rs +++ b/crates/zlog/src/filter.rs @@ -38,8 +38,6 @@ const DEFAULT_FILTERS: &[(&str, log::LevelFilter)] = &[ #[cfg(any(target_os = "linux", target_os = "freebsd"))] ("zbus", log::LevelFilter::Warn), #[cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))] - ("wgpu", log::LevelFilter::Warn), - #[cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))] ("naga::back::spv::writer", log::LevelFilter::Warn), // usvg prints a lot of warnings on rendering an SVG with partial errors, which // can happen a lot with the SVG preview diff --git a/crates/zlog/src/sink.rs b/crates/zlog/src/sink.rs index 07e87be1b071f2538e716bb8fd2b692527363fc4..2aea9c957756011689d81618eedcf22979ea2077 100644 --- a/crates/zlog/src/sink.rs +++ b/crates/zlog/src/sink.rs @@ -56,10 +56,9 @@ pub fn init_output_file( path: &'static PathBuf, path_rotate: Option<&'static PathBuf>, ) -> io::Result<()> { - let mut file = std::fs::OpenOptions::new() - .create(true) - .append(true) - .open(path)?; + let mut enabled_sinks_file = ENABLED_SINKS_FILE + .try_lock() + .expect("Log file lock is available during init"); SINK_FILE_PATH .set(path) @@ -70,22 +69,30 @@ pub fn init_output_file( .expect("Init file output should only be called once"); } - let mut enabled_sinks_file = ENABLED_SINKS_FILE - .try_lock() - .expect("Log file lock is available during init"); - - let size_bytes = file.metadata().map_or(0, |metadata| metadata.len()); - if size_bytes >= SINK_FILE_SIZE_BYTES_MAX { - rotate_log_file(&mut file, Some(path), path_rotate, &SINK_FILE_SIZE_BYTES); - } else { - SINK_FILE_SIZE_BYTES.store(size_bytes, Ordering::Release); - } - + let file = open_or_create_log_file(path, path_rotate, SINK_FILE_SIZE_BYTES_MAX)?; + SINK_FILE_SIZE_BYTES.store(file.metadata().map_or(0, |m| m.len()), Ordering::Release); *enabled_sinks_file = Some(file); Ok(()) } +fn open_or_create_log_file( + path: &PathBuf, + path_rotate: Option<&PathBuf>, + sink_file_size_bytes_max: u64, +) -> Result { + let size_bytes = std::fs::metadata(path).map(|metadata| metadata.len()); + match size_bytes { + Ok(size_bytes) if size_bytes >= sink_file_size_bytes_max => { + rotate_log_file(Some(path), path_rotate).map(|it| it.unwrap()) + } + _ => std::fs::OpenOptions::new() + .create(true) + .append(true) + .open(path), + } +} + const LEVEL_OUTPUT_STRINGS: [&str; 6] = [ " ", // nop: ERROR = 1 "ERROR", // @@ -144,11 +151,11 @@ pub fn submit(mut record: Record) { record.message ); } - let mut file = ENABLED_SINKS_FILE.lock().unwrap_or_else(|handle| { + let mut file_guard = ENABLED_SINKS_FILE.lock().unwrap_or_else(|handle| { ENABLED_SINKS_FILE.clear_poison(); handle.into_inner() }); - if let Some(file) = file.as_mut() { + if let Some(file) = file_guard.as_mut() { struct SizedWriter<'a> { file: &'a mut std::fs::File, written: u64, @@ -182,12 +189,16 @@ pub fn submit(mut record: Record) { SINK_FILE_SIZE_BYTES.fetch_add(writer.written, Ordering::AcqRel) + writer.written }; if file_size_bytes > SINK_FILE_SIZE_BYTES_MAX { - rotate_log_file( - file, - SINK_FILE_PATH.get(), - SINK_FILE_PATH_ROTATE.get(), - &SINK_FILE_SIZE_BYTES, - ); + *file_guard = None; + let file = rotate_log_file(SINK_FILE_PATH.get(), SINK_FILE_PATH_ROTATE.get()); + match file { + Ok(Some(file)) => *file_guard = Some(file), + Ok(None) => {} + Err(e) => { + eprintln!("Failed to open log file: {e}") + } + } + SINK_FILE_SIZE_BYTES.store(0, Ordering::Release); } } } @@ -247,19 +258,13 @@ impl std::fmt::Display for SourceFmt<'_> { } fn rotate_log_file( - file: &mut fs::File, path: Option, path_rotate: Option, - atomic_size: &AtomicU64, -) where +) -> std::io::Result> +where PathRef: AsRef, { - if let Err(err) = file.flush() { - eprintln!( - "Failed to flush log file before rotating, some logs may be lost: {}", - err - ); - } + let path = path.as_ref().map(PathRef::as_ref); let rotation_error = match (path, path_rotate) { (Some(_), None) => Some(anyhow::anyhow!("No rotation log file path configured")), (None, _) => Some(anyhow::anyhow!("No log file path configured")), @@ -270,46 +275,53 @@ fn rotate_log_file( if let Some(err) = rotation_error { eprintln!("Log file rotation failed. Truncating log file anyways: {err}",); } - _ = file.set_len(0); - - // SAFETY: It is safe to set size to 0 even if set_len fails as - // according to the documentation, it only fails if: - // - the file is not writeable: should never happen, - // - the size would cause an overflow (implementation specific): 0 should never cause an overflow - atomic_size.store(0, Ordering::Release); + path.map(|path| { + fs::OpenOptions::new() + .create(true) + .write(true) + .truncate(true) + .open(path) + }) + .transpose() } #[cfg(test)] mod tests { + use super::*; #[test] - fn test_rotate_log_file() { + fn test_open_or_create_log_file_rotate() { let temp_dir = tempfile::tempdir().unwrap(); let log_file_path = temp_dir.path().join("log.txt"); let rotation_log_file_path = temp_dir.path().join("log_rotated.txt"); - let mut file = fs::File::create(&log_file_path).unwrap(); let contents = String::from("Hello, world!"); - file.write_all(contents.as_bytes()).unwrap(); + std::fs::write(&log_file_path, &contents).unwrap(); - let size = AtomicU64::new(contents.len() as u64); - - rotate_log_file( - &mut file, - Some(&log_file_path), - Some(&rotation_log_file_path), - &size, - ); + open_or_create_log_file(&log_file_path, Some(&rotation_log_file_path), 4).unwrap(); assert!(log_file_path.exists()); assert_eq!(log_file_path.metadata().unwrap().len(), 0); assert!(rotation_log_file_path.exists()); - assert_eq!( - std::fs::read_to_string(&rotation_log_file_path).unwrap(), - contents, - ); - assert_eq!(size.load(Ordering::Acquire), 0); + assert_eq!(std::fs::read_to_string(&log_file_path).unwrap(), ""); + } + + #[test] + fn test_open_or_create_log_file() { + let temp_dir = tempfile::tempdir().unwrap(); + let log_file_path = temp_dir.path().join("log.txt"); + let rotation_log_file_path = temp_dir.path().join("log_rotated.txt"); + + let contents = String::from("Hello, world!"); + std::fs::write(&log_file_path, &contents).unwrap(); + + open_or_create_log_file(&log_file_path, Some(&rotation_log_file_path), !0).unwrap(); + + assert!(log_file_path.exists()); + assert_eq!(log_file_path.metadata().unwrap().len(), 13); + assert!(!rotation_log_file_path.exists()); + assert_eq!(std::fs::read_to_string(&log_file_path).unwrap(), contents); } /// Regression test, ensuring that if log level values change we are made aware diff --git a/docs/.doc-examples/complex-feature.md b/docs/.doc-examples/complex-feature.md index 6a859ce5041f0e39834cc5f47f5b18248a15295e..745e6b3bcdc97dc35092bb651903f37435acc1ef 100644 --- a/docs/.doc-examples/complex-feature.md +++ b/docs/.doc-examples/complex-feature.md @@ -91,11 +91,11 @@ To disable word diff for specific languages only, add this to your settings.json File History shows the commit history for an individual file. Each entry displays the commit's author, timestamp, and message. Selecting a commit opens a diff view filtered to show only the changes made to that file in that commit. -To open File History: +To view File History: -- Right-click on a file in the Project Panel and select "Open File History" -- Right-click on a file in the Git Panel and select "Open File History" -- Right-click on an editor tab and select "Open File History" +- Right-click on a file in the Project Panel and select "View File History" +- Right-click on a file in the Git Panel and select "View File History" +- Right-click on an editor tab and select "View File History" - Use the Command Palette and search for "file history" ## Fetch, Push, and Pull {#fetch-push-pull} diff --git a/docs/.prettierignore b/docs/.prettierignore index a52439689a83a1c2e834918c39441186b47120e5..c742ed4b6859f32219cecbac9f722db8a6929710 100644 --- a/docs/.prettierignore +++ b/docs/.prettierignore @@ -1,2 +1,5 @@ # Handlebars partials are not supported by Prettier. *.hbs + +# Automatically generated +theme/c15t@*.js diff --git a/docs/README.md b/docs/README.md index e1649f4bc99e1668352a46ee2071dcfe1775f4a7..a0f9bbd5c628f41d291880239ca555ea7ec0e3ea 100644 --- a/docs/README.md +++ b/docs/README.md @@ -64,6 +64,22 @@ This will render a human-readable version of the action name, e.g., "zed: open s Templates are functions that modify the source of the docs pages (usually with a regex match and replace). You can see how the actions and keybindings are templated in `crates/docs_preprocessor/src/main.rs` for reference on how to create new templates. +## Consent Banner + +We pre-bundle the `c15t` package because the docs pipeline does not include a JS bundler. If you need to update `c15t` and rebuild the bundle, use: + +``` +mkdir c15t-bundle && cd c15t-bundle +npm init -y +npm install c15t@ esbuild +echo "import { getOrCreateConsentRuntime } from 'c15t'; window.c15t = { getOrCreateConsentRuntime };" > entry.js +npx esbuild entry.js --bundle --format=iife --minify --outfile=c15t@.js +cp c15t@.js ../theme/c15t@.js +cd .. && rm -rf c15t-bundle +``` + +Replace `` with the new version of `c15t` you are installing. Then update `book.toml` to reference the new bundle filename. + ### References - Template Trait: `crates/docs_preprocessor/src/templates.rs` diff --git a/docs/book.toml b/docs/book.toml index 86fa447f581fba88ff7df53bb51e08440585a9dc..3269003a1d37ede19ec18b62809a928a08764d2f 100644 --- a/docs/book.toml +++ b/docs/book.toml @@ -23,8 +23,8 @@ default-description = "Learn how to use and customize Zed, the fast, collaborati default-title = "Zed Code Editor Documentation" no-section-label = true preferred-dark-theme = "dark" -additional-css = ["theme/page-toc.css", "theme/plugins.css", "theme/highlight.css"] -additional-js = ["theme/page-toc.js", "theme/plugins.js"] +additional-css = ["theme/page-toc.css", "theme/plugins.css", "theme/highlight.css", "theme/consent-banner.css"] +additional-js = ["theme/page-toc.js", "theme/plugins.js", "theme/c15t@2.0.0-rc.3.js", "theme/analytics.js"] [output.zed-html.print] enable = false diff --git a/docs/src/ai/agent-panel.md b/docs/src/ai/agent-panel.md index b62512d469bee4c1e776f29fddc35bcc4d979467..7e183d38550d3624a0c9a48051e95ca4c568d72d 100644 --- a/docs/src/ai/agent-panel.md +++ b/docs/src/ai/agent-panel.md @@ -114,8 +114,6 @@ The agent can search your codebase to find relevant context, but providing it ex Add context by typing `@` in the message editor. You can mention files, directories, symbols, previous threads, rules files, and diagnostics. -Copying images and pasting them in the panel's message editor is also supported. - When you paste multi-line code selections copied from a buffer, Zed automatically formats them as @-mentions with the file context. To paste content without this automatic formatting, use {#kb agent::PasteRaw} to paste raw text directly. @@ -123,6 +121,14 @@ To paste content without this automatic formatting, use {#kb agent::PasteRaw} to Additionally, you can also select text in a buffer or terminal and add it as context by using the {#kb agent::AddSelectionToThread} keybinding, running the {#action agent::AddSelectionToThread} action, or choosing the "Selection" item in the `+` menu in the message editor. +### Images as Context + +It's also possible to attach images in your prompt for providers that support vision models. +OpenAI GPT-4o and later, Anthropic Claude 3 and later, Google Gemini 1.5 and 2.0, and Bedrock vision models (Claude 3+, Amazon Nova Pro and Lite, Meta Llama 3.2 Vision, Mistral Pixtral) all support image inputs. + +To add an image, you can either search in your project's directory by @-mentioning it, or drag it from your file system directly into the agent panel message editor. +Copying an image and pasting it is also supported. + ## Token Usage {#token-usage} Zed surfaces how many tokens you are consuming for your currently active thread near the profile selector in the panel's message editor. @@ -168,7 +174,7 @@ You can explore the exact tools enabled in each profile by clicking on the profi Alternatively, you can also use either the command palette, by running {#action agent::ManageProfiles}, or the keybinding directly, {#kb agent::ManageProfiles}, to have access to the profile management modal. -Use {#kb agent::CycleModeSelector} to switch between profiles without opening the modal. +Use {#kb agent::CycleModeSelector} to cycle through available profiles without opening the modal. #### Custom Profiles {#custom-profiles} diff --git a/docs/src/ai/agent-settings.md b/docs/src/ai/agent-settings.md index af02bd5f2072ee8e709c65d6237168c6d2159e70..0547f19c9ca0e58cb5d63d7ae1c5231d091a6503 100644 --- a/docs/src/ai/agent-settings.md +++ b/docs/src/ai/agent-settings.md @@ -290,10 +290,10 @@ See the [Tool Permissions](./tool-permissions.md) documentation for more example > **Note:** Before Zed v0.224.0, tool approval was controlled by the `agent.always_allow_tool_actions` boolean (default `false`). Set it to `true` to auto-approve tool actions, or leave it `false` to require confirmation for edits and tool calls. -### Single-file Review +### Edit Display Mode Control whether to display review actions (accept & reject) in single buffers after the agent is done performing edits. -The default value is `true`. +The default value is `false`. ```json [settings] { @@ -303,8 +303,6 @@ The default value is `true`. } ``` -When set to `false`, these controls are only available in the multibuffer review tab. - ### Sound Notification Control whether to hear a notification sound when the agent is done generating changes or needs your input. diff --git a/docs/src/ai/ai-improvement.md b/docs/src/ai/ai-improvement.md index 94085058f237b942f29d43f8d82b2f0afa97a782..26085bc3971eca633fa481469e26719161fbf7e0 100644 --- a/docs/src/ai/ai-improvement.md +++ b/docs/src/ai/ai-improvement.md @@ -3,73 +3,99 @@ title: AI Improvement and Data Collection - Zed description: Zed's opt-in approach to AI data collection for improving the agent panel and edit predictions. --- -# Zed AI Improvement +# Zed AI Features and Privacy -## Agent Panel +## Overview -### Opt-In +AI features in Zed include: -When you use the Agent Panel through any of these means: +- [Agent Panel](./agent-panel.md) +- [Edit Predictions](./edit-prediction.md) +- [Inline Assist](./inline-assistant.md) +- [Text Threads](./text-threads.md) +- Auto Git Commit Message Generation -- [Zed's hosted models](./subscription.md) -- [connecting a non-Zed AI service via API key](./llm-providers.md) -- using an [external agent](./external-agents.md) +By default, Zed does not store your prompts or code context. This data is sent to your selected AI provider (e.g., Anthropic, OpenAI, Google, or xAI) to generate responses, then discarded. Zed will not use your data to evaluate or improve AI features unless you explicitly share it (see [AI Feedback with Ratings](#ai-feedback-with-ratings)) or you opt in to edit prediction training data collection (see [Edit Predictions](#edit-predictions)). + +Zed is model-agnostic by design, and none of this changes based on which provider you choose. You can use your own API keys or Zed's hosted models without any data being retained. + +### Data Retention and Training -Zed does not persistently store user content or use user content to evaluate and/or improve our AI features, unless it is explicitly shared with Zed. Each share is opt-in, and sharing once will not cause future content or data to be shared again. +Zed's Agent Panel can be used via: -> Note that rating responses will send your data related to that response to Zed's servers. -> **_If you don't want data persisted on Zed's servers, don't rate_**. We will not collect data for improving our Agentic offering without you explicitly rating responses. +- [Zed's hosted models](./subscription.md) +- [connecting a non-Zed AI service via API key](./llm-providers.md) +- using an [external agent](./external-agents.md) via ACP -When using upstream services through Zed's hosted models, we require assurances from our service providers that your user content won't be used for training models. +When using Zed's hosted models, we require assurances from our service providers that your user content won't be used for training models. | Provider | No Training Guarantee | Zero-Data Retention (ZDR) | | --------- | ------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- | | Anthropic | [Yes](https://www.anthropic.com/legal/commercial-terms) | [Yes](https://privacy.anthropic.com/en/articles/8956058-i-have-a-zero-data-retention-agreement-with-anthropic-what-products-does-it-apply-to) | | Google | [Yes](https://cloud.google.com/terms/service-terms) | [Yes](https://cloud.google.com/terms/service-terms), see Service Terms sections 17 and 19h | | OpenAI | [Yes](https://openai.com/enterprise-privacy/) | [Yes](https://platform.openai.com/docs/guides/your-data) | +| xAI | [Yes](https://x.ai/legal/faq-enterprise) | [Yes](https://x.ai/legal/faq-enterprise) | When you use your own API keys or external agents, **Zed does not have control over how your data is used by that service provider.** You should reference your agreement with each service provider to understand what terms and conditions apply. -### Data we collect +### AI Feedback with Ratings + +You can provide feedback on Zed's AI features by rating specific AI responses in Zed and sharing details related to those conversations with Zed. Each share is opt-in, and sharing once will not cause future content or data to be shared again. + +> **Rating = Data Sharing:** When you rate a response, your entire conversation thread is sent to Zed. This includes messages, AI responses, and thread metadata. +> **_If you don't want data persisted on Zed's servers, don't rate_**. We will not collect data for improving our AI features without you explicitly rating responses. -For prompts you have explicitly shared with us, Zed may store copies of those prompts and other data about the specific use of the Agent Panel. +### Data Collected (AI Feedback) -This data includes: +For conversations you have explicitly shared with us via rating, Zed may store: -- The prompt given to the Agent -- Any commentary you include -- Product telemetry about the agentic thread +- All messages in the thread (your prompts and AI responses) +- Any commentary you include with your rating +- Thread metadata (model used, token counts, timestamps) - Metadata about your Zed installation -### Data Handling +If you do not rate responses, Zed will not store Customer Data (code, conversations, responses) related to your usage of the AI features. + +Telemetry related to Zed's AI features is collected. This includes metadata such as the AI feature being used and high-level interactions with the feature to understand performance (e.g., Agent response time, edit acceptance/rejection in the Agent panel or edit completions). You can read more in Zed's [telemetry](../telemetry.md) documentation. Collected data is stored in Snowflake, a private database. We periodically review this data to refine the agent's system prompt and tool use. All data is anonymized and stripped of sensitive information (access tokens, user IDs, email addresses). ## Edit Predictions -By default, when using Zed Edit Predictions, Zed does not persistently store user content or use user content for training of its models. +Edit predictions can be powered by **Zed's Zeta model** or by **third-party providers** like GitHub Copilot. + +### Zed's Zeta Model (Default) + +Zed sends a limited context window to the model to generate predictions: + +- A code excerpt around your cursor (not the full file) +- Recent edits as diffs +- Relevant excerpts from related open files -### Opt-in +This data is processed transiently to generate predictions and is not retained afterward. -Users who are working on open source licensed projects may optionally opt-in to providing model improvement feedback. This opt-in occurs on a per-project basis. If you work on multiple open source projects and wish to provide model improvement feedback you will have to opt-in for each individual project. +### Third-Party Providers -When working on other projects where you haven't opted-in, Zed will not persistently store user content or use user content for training of its models. +When using third-party providers like GitHub Copilot, **Zed does not control how your data is handled** by that provider. You should consult their Terms and Conditions directly. -You can see exactly how Zed detects open source licenses in: [license_detection.rs](https://github.com/zed-industries/zed/blob/main/crates/edit_prediction/src/license_detection.rs). +Note: Zed's `disabled_globs` settings will prevent predictions from being requested, but third-party providers may receive file content when files are opened. -### Exclusions +### Training Data: Opt-In for Open Source Projects -Zed will intentionally exclude certain files from Predictive Edits entirely, even when you have opted-in to model improvement feedback. +Zed does not collect training data for our edit prediction model unless the following conditions are met: -You can inspect this exclusion list by opening `zed: open default settings` from the command palette: +1. **You opt in** – Toggle "Training Data Collection" under the **Privacy** section of the edit prediction status bar menu (click the edit prediction icon in the status bar). +2. **The project is open source** — detected via LICENSE file ([see detection logic](https://github.com/zed-industries/zed/blob/main/crates/edit_prediction/src/license_detection.rs)) +3. **The file isn't excluded** — via `disabled_globs` + +### File Exclusions + +Certain files are always excluded from edit predictions—regardless of opt-in status: ```json [settings] { "edit_predictions": { - // A list of globs representing files that edit predictions should be disabled for. - // There's a sensible default list of globs already included. - // Any addition to this list will be merged with the default list. "disabled_globs": [ "**/.env*", "**/*.pem", @@ -92,22 +118,17 @@ Users may explicitly exclude additional paths and/or file extensions by adding t } ``` -### Data we collect - -For open source projects where you have opted-in, Zed may store copies of requests and responses to the Zed AI Prediction service. - -This data includes: +### Data Collected (Edit Prediction Training Data) -- sampled edit prediction examples (cursor context + recent diffs/edits) for offline evaluation -- the edit prediction -- a portion of the buffer content around the cursor -- a few recent edits -- the current buffer outline -- diagnostics (errors, warnings, etc) from language servers +For open source projects where you've opted in, Zed may collect: -### Data Handling +- Code excerpt around your cursor +- Recent edit diffs +- The generated prediction +- Repository URL and git revision +- Buffer outline and diagnostics -Collected data is stored in Snowflake, a private database. We periodically select training samples from this data. All data is anonymized and stripped of sensitive information (access tokens, user IDs, email addresses). The training dataset is publicly available at [huggingface.co/datasets/zed-industries/zeta](https://huggingface.co/datasets/zed-industries/zeta). +Collected data is stored in Snowflake. We periodically review this data to select training samples for inclusion in our model training dataset. We ensure any included data is anonymized and contains no sensitive information (access tokens, user IDs, email addresses, etc). This training dataset is publicly available at [huggingface.co/datasets/zed-industries/zeta](https://huggingface.co/datasets/zed-industries/zeta). ### Model Output @@ -115,4 +136,4 @@ We then use this training dataset to fine-tune [Qwen2.5-Coder-7B](https://huggin ## Applicable terms -Please see the [Zed Terms of Service](https://zed.dev/terms-of-service) for more. +Please see the [Zed Terms of Service](https://zed.dev/terms) for more. diff --git a/docs/src/ai/edit-prediction.md b/docs/src/ai/edit-prediction.md index 56b1c1260ec98bc82de337ac492b1b4aa40723d8..92fde3eddd3be0a2dbfb1b6d37065b58cf2ad411 100644 --- a/docs/src/ai/edit-prediction.md +++ b/docs/src/ai/edit-prediction.md @@ -301,8 +301,6 @@ Edit Prediction also works with other providers. ### GitHub Copilot {#github-copilot} -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - To use GitHub Copilot as your provider, set this in your settings file ([how to edit](../configuring-zed.md#settings-files)): ```json [settings] @@ -408,13 +406,47 @@ After adding your API key, Codestral will appear in the provider dropdown in the ### Self-Hosted OpenAI-compatible servers -To configure Zed to use an arbitrary server for edit predictions: +You can use any self-hosted server that implements the OpenAI completion API format. This works with vLLM, llama.cpp server, LocalAI, and other compatible servers. -1. Open the Settings Editor (`Cmd+,` on macOS, `Ctrl+,` on Linux/Windows) -2. Search for "Edit Predictions" and click **Configure Providers** -3. Find the "OpenAI-compatible API" section and enter the URL and model name. You can also select a prompt format that Zed should use. Zed currently supports several FIM prompt formats, as well as Zed's own Zeta prompt format. If you do not select a prompt format, Zed will attempt to infer it from the model name. +#### Configuration + +Set `open_ai_compatible_api` as your provider and configure the API endpoint: + +```json [settings] +{ + "edit_predictions": { + "provider": "open_ai_compatible_api", + "open_ai_compatible_api": { + "api_url": "http://localhost:8080/v1/completions", + "model": "deepseek-coder-6.7b-base", + "prompt_format": "deepseek_coder", + "max_output_tokens": 64 + } + } +} +``` + +The `prompt_format` setting controls how code context is formatted for the model. Use `"infer"` to detect the format from the model name, or specify one explicitly: + +- `code_llama` - CodeLlama format: `
 prefix  suffix `
+- `star_coder` - StarCoder format: `prefixsuffix`
+- `deepseek_coder` - DeepSeek format with special unicode markers
+- `qwen` - Qwen/CodeGemma format: `<|fim_prefix|>prefix<|fim_suffix|>suffix<|fim_middle|>`
+- `codestral` - Codestral format: `[SUFFIX]suffix[PREFIX]prefix`
+- `glm` - GLM-4 format with code markers
+- `infer` - Auto-detect from model name (default)
 
-The URL must accept requests according to OpenAI's [Completions API](https://developers.openai.com/api/reference/resources/completions/methods/create)
+Your server must implement the OpenAI `/v1/completions` endpoint. Edit predictions will send POST requests with this format:
+
+```json
+{
+  "model": "your-model-name",
+  "prompt": "formatted-code-context",
+  "max_tokens": 256,
+  "temperature": 0.2,
+  "stop": ["<|endoftext|>", ...]
+}
+```
 
 ## See also
 
diff --git a/docs/src/ai/external-agents.md b/docs/src/ai/external-agents.md
index 74afac9604988afe952f4735ed0f81fb994b57c7..7a76e795f127651201a6483986ebbc917088bf96 100644
--- a/docs/src/ai/external-agents.md
+++ b/docs/src/ai/external-agents.md
@@ -27,7 +27,10 @@ If you'd like to bind this to a keyboard shortcut, you can do so by editing your
 [
   {
     "bindings": {
-      "cmd-alt-g": ["agent::NewExternalAgentThread", { "agent": "gemini" }]
+      "cmd-alt-g": [
+        "agent::NewExternalAgentThread",
+        { "agent": { "custom": { "name": "gemini" } } }
+      ]
     }
   }
 ]
@@ -38,32 +41,14 @@ If you'd like to bind this to a keyboard shortcut, you can do so by editing your
 The first time you create a Gemini CLI thread, Zed will install [@google/gemini-cli](https://github.com/google-gemini/gemini-cli).
 This installation is only available to Zed and is kept up to date as you use the agent.
 
-By default, Zed will use this managed version of Gemini CLI even if you have it installed globally.
-However, you can configure it to use a version in your `PATH` by adding this to your settings:
-
-```json [settings]
-{
-  "agent_servers": {
-    "gemini": {
-      "ignore_system_version": false
-    }
-  }
-}
-```
-
 #### Authentication
 
-After you have Gemini CLI running, you'll be prompted to choose your authentication method.
+After you have Gemini CLI running, you'll be prompted to authenticate.
 
-Most users should click the "Log in with Google".
-This will cause a browser window to pop-up and auth directly with Gemini CLI.
+Click the "Login" button to open the Gemini CLI interactively, where you can log in with your Google account or [Vertex AI](https://cloud.google.com/vertex-ai) credentials.
 Zed does not see your OAuth or access tokens in this case.
 
-You can also use the "Gemini API Key".
-If you select this, and have the `GEMINI_API_KEY` set, then we will use that.
-Otherwise Zed will prompt you for an API key which will be stored securely in your keychain, and used to start Gemini CLI from within Zed.
-
-The "Vertex AI" option is for those who are using [Vertex AI](https://cloud.google.com/vertex-ai), and have already configured their environment correctly.
+If the `GEMINI_API_KEY` environment variable (or `GOOGLE_AI_API_KEY`) is already set, or you have configured a Google AI API key in Zed's [language model provider settings](./llm-providers.md#google-ai), it will be passed to Gemini CLI automatically.
 
 For more information, see the [Gemini CLI docs](https://github.com/google-gemini/gemini-cli/blob/main/docs/index.md).
 
@@ -88,7 +73,10 @@ If you'd like to bind this to a keyboard shortcut, you can do so by editing your
 [
   {
     "bindings": {
-      "cmd-alt-c": ["agent::NewExternalAgentThread", { "agent": "claude_code" }]
+      "cmd-alt-c": [
+        "agent::NewExternalAgentThread",
+        { "agent": { "custom": { "name": "claude-acp" } } }
+      ]
     }
   }
 ]
@@ -114,7 +102,8 @@ If you want to override the executable used by the adapter, you can set the `CLA
 ```json
 {
   "agent_servers": {
-    "claude": {
+    "claude-acp": {
+      "type": "registry",
       "env": {
         "CLAUDE_CODE_EXECUTABLE": "/path/to/alternate-claude-code-executable"
       }
@@ -159,7 +148,10 @@ If you'd like to bind this to a keyboard shortcut, you can do so by editing your
 [
   {
     "bindings": {
-      "cmd-alt-c": ["agent::NewExternalAgentThread", { "agent": "codex" }]
+      "cmd-alt-c": [
+        "agent::NewExternalAgentThread",
+        { "agent": { "custom": { "name": "codex-acp" } } }
+      ]
     }
   }
 ]
@@ -248,7 +240,7 @@ You can also add agents through your settings file ([how to edit](../configuring
 
 This can be useful if you're in the middle of developing a new agent that speaks the protocol and you want to debug it.
 
-It's also possible to specify a custom path, arguments, or environment for the builtin integrations by using the `claude` and `gemini` names.
+It's also possible to customize environment variables for registry-installed agents like Claude Agent, Codex, and Gemini CLI by using their registry names (`claude-acp`, `codex-acp`, `gemini`) with `"type": "registry"` in your settings.
 
 ## Debugging Agents
 
diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md
index 696427ae6981df5ec75907f7ce2957c07d76eb1f..24501ab2d356b8dc4098808ed8e9193cf6e171c6 100644
--- a/docs/src/ai/llm-providers.md
+++ b/docs/src/ai/llm-providers.md
@@ -1,6 +1,6 @@
 ---
 title: LLM Providers - Use Your Own API Keys in Zed
-description: Bring your own API keys to Zed. Set up Anthropic, OpenAI, Google AI, Ollama, DeepSeek, Mistral, OpenRouter, and more.
+description: Bring your own API keys to Zed. Set up Anthropic, OpenAI, Google AI, Ollama, DeepSeek, Mistral, OpenRouter, Vercel AI Gateway, and more.
 ---
 
 # LLM Providers
@@ -32,6 +32,7 @@ Zed supports these providers with your own API keys:
 - [OpenAI](#openai)
 - [OpenAI API Compatible](#openai-api-compatible)
 - [OpenRouter](#openrouter)
+- [Vercel AI Gateway](#vercel-ai-gateway)
 - [Vercel](#vercel-v0)
 - [xAI](#xai)
 
@@ -87,7 +88,7 @@ With that done, choose one of the three authentication methods:
 While it's possible to configure through the Agent Panel settings UI by entering your AWS access key and secret directly, we recommend using named profiles instead for better security practices.
 To do this:
 
-1. Create an IAM User that you can assume in the [IAM Console](https://us-east-1.console.aws.amazon.com/iam/home?region=us-east-1#/users).
+1. Create an IAM User in the [IAM Console](https://us-east-1.console.aws.amazon.com/iam/home?region=us-east-1#/users).
 2. Create security credentials for that User, save them and keep them secure.
 3. Open the Agent Configuration with (`agent: open settings`) and go to the Amazon Bedrock section
 4. Copy the credentials from Step 2 into the respective **Access Key ID**, **Secret Access Key**, and **Region** fields.
@@ -151,7 +152,7 @@ For the most up-to-date supported regions and models, refer to the [Supported Mo
 
 #### Extended Context Window {#bedrock-extended-context}
 
-Anthropic models on Bedrock support a [1M token extended context window](https://docs.anthropic.com/en/docs/build-with-claude/extended-context) beta. To enable this feature, add `"allow_extended_context": true` to your Bedrock configuration:
+Anthropic models on Bedrock support a 1M token extended context window through the `anthropic_beta` API parameter. To enable this feature, set `"allow_extended_context": true` in your Bedrock configuration:
 
 ```json [settings]
 {
@@ -166,9 +167,11 @@ Anthropic models on Bedrock support a [1M token extended context window](https:/
 }
 ```
 
-When enabled, Zed will include the `anthropic_beta` field in requests to Bedrock, enabling the 1M token context window for supported Anthropic models such as Claude Sonnet 4.5 and Claude Opus 4.6.
+Zed enables extended context for supported models (Claude Sonnet 4.5 and Claude Opus 4.6). Extended context usage may increase API costs—refer to AWS Bedrock pricing for details.
 
-> **Note**: Extended context usage may incur additional API costs. Refer to your AWS Bedrock pricing for details.
+#### Image Support {#bedrock-image-support}
+
+Bedrock models that support vision (Claude 3 and later, Amazon Nova Pro and Lite, Meta Llama 3.2 Vision models, Mistral Pixtral) can receive images in conversations and tool results.
 
 ### Anthropic {#anthropic}
 
@@ -303,6 +306,15 @@ Here is an example of a custom Google AI model you could add to your Zed setting
   "language_models": {
     "google": {
       "available_models": [
+        {
+          "name": "gemini-3.1-pro-preview",
+          "display_name": "Gemini 3.1 Pro",
+          "max_tokens": 1000000,
+          "mode": {
+            "type": "thinking",
+            "budget_tokens": 24000
+          }
+        },
         {
           "name": "gemini-3-flash-preview",
           "display_name": "Gemini 3 Flash (Thinking)",
@@ -614,6 +626,23 @@ The OpenRouter API key will be saved in your keychain.
 
 Zed will also use the `OPENROUTER_API_KEY` environment variable if it's defined.
 
+When using OpenRouter as your assistant provider, you must explicitly select a model in your settings. OpenRouter no longer provides a default model selection.
+
+Configure your preferred OpenRouter model in `settings.json`:
+
+```json [settings]
+{
+  "agent": {
+    "default_model": {
+      "provider": "openrouter",
+      "model": "openrouter/auto"
+    }
+  }
+}
+```
+
+The `openrouter/auto` model automatically routes your requests to the most appropriate available model. You can also specify any model available through OpenRouter's API.
+
 #### Custom Models {#openrouter-custom-models}
 
 You can add custom models to the OpenRouter provider by adding the following to your Zed settings file ([how to edit](../configuring-zed.md#settings-files)):
@@ -704,6 +733,30 @@ Example adding routing preferences to a model:
 
 These routing controls let you fine‑tune cost, capability, and reliability trade‑offs without changing the model name you select in the UI.
 
+### Vercel AI Gateway {#vercel-ai-gateway}
+
+[Vercel AI Gateway](https://vercel.com/ai-gateway) provides access to many models through a single OpenAI-compatible endpoint.
+
+1. Create an API key from your [Vercel AI Gateway keys page](https://vercel.com/d?to=%2F%5Bteam%5D%2F%7E%2Fai%2Fapi-keys&title=Go+to+AI+Gateway)
+2. Open the settings view (`agent: open settings`) and go to the **Vercel AI Gateway** section
+3. Enter your Vercel AI Gateway API key
+
+The Vercel AI Gateway API key will be saved in your keychain.
+
+Zed will also use the `VERCEL_AI_GATEWAY_API_KEY` environment variable if it's defined.
+
+You can also set a custom endpoint for Vercel AI Gateway in your settings file:
+
+```json [settings]
+{
+  "language_models": {
+    "vercel_ai_gateway": {
+      "api_url": "https://ai-gateway.vercel.sh/v1"
+    }
+  }
+}
+```
+
 ### Vercel v0 {#vercel-v0}
 
 [Vercel v0](https://v0.app/docs/api/model) is a model for generating full-stack apps, with framework-aware completions for stacks like Next.js and Vercel.
diff --git a/docs/src/ai/mcp.md b/docs/src/ai/mcp.md
index 1a3ee231f204eb3ed0f216fe2bc71590e74c3afc..08608a9871b7f826c4789bf2213cd7fdd6d624bb 100644
--- a/docs/src/ai/mcp.md
+++ b/docs/src/ai/mcp.md
@@ -86,7 +86,7 @@ Once installation is complete, you can return to the Agent Panel and start promp
 How reliably MCP tools get called can vary from model to model.
 Mentioning the MCP server by name can help the model pick tools from that server.
 
-If you want to _ensure_ a given MCP server will be used, you can create [a custom profile](./agent-panel.md#custom-profiles) where all built-in tools (or the ones that could cause conflicts with the server's tools) are turned off and only the tools coming from the MCP server are turned on.
+However, if you want to _ensure_ a given MCP server will be used, you can create [a custom profile](./agent-panel.md#custom-profiles) where all built-in tools (or the ones that could cause conflicts with the server's tools) are turned off and only the tools coming from the MCP server are turned on.
 
 As an example, [the Dagger team suggests](https://container-use.com/agent-integrations#zed) doing that with their [Container Use MCP server](https://zed.dev/extensions/mcp-server-container-use):
 
@@ -156,3 +156,15 @@ Note that for [external agents](./external-agents.md) connected through the [Age
 
 Regarding the built-in ones, Claude Agent and Codex both support it, and Gemini CLI does not yet.
 In the meantime, learn how to add MCP server support to Gemini CLI through [their documentation](https://github.com/google-gemini/gemini-cli?tab=readme-ov-file#using-mcp-servers).
+
+### Error Handling
+
+When a MCP server encounters an error while processing a tool call, the agent receives the error message directly and the operation fails.
+Common error scenarios include:
+
+- Invalid parameters passed to the tool
+- Server-side failures (database connection issues, rate limits)
+- Unsupported operations or missing resources
+
+The error message from the context server will be shown in the agent's response, allowing you to diagnose and correct the issue.
+Check the context server's logs or documentation for details about specific error codes.
diff --git a/docs/src/ai/models.md b/docs/src/ai/models.md
index 09c26305d277d9afc592bac9b554f123957db03b..bbf41cf66cc4d93b38123c12fadd7a60c119dfef 100644
--- a/docs/src/ai/models.md
+++ b/docs/src/ai/models.md
@@ -43,8 +43,6 @@ Zed's plans offer hosted versions of major LLMs with higher rate limits than dir
 |                        | OpenAI    | Cached Input        | $0.005                       | $0.0055                 |
 | Gemini 3.1 Pro         | Google    | Input               | $2.00                        | $2.20                   |
 |                        | Google    | Output              | $12.00                       | $13.20                  |
-| Gemini 3 Pro           | Google    | Input               | $2.00                        | $2.20                   |
-|                        | Google    | Output              | $12.00                       | $13.20                  |
 | Gemini 3 Flash         | Google    | Input               | $0.30                        | $0.33                   |
 |                        | Google    | Output              | $2.50                        | $2.75                   |
 | Grok 4                 | X.ai      | Input               | $3.00                        | $3.30                   |
@@ -68,7 +66,8 @@ As of February 19, 2026, Zed Pro serves newer model versions in place of the ret
 - Claude Sonnet 4 → Claude Sonnet 4.5 or Claude Sonnet 4.6
 - Claude Sonnet 3.7 (retired Feb 19) → Claude Sonnet 4.5 or Claude Sonnet 4.6
 - GPT-5.1 and GPT-5 → GPT-5.2 or GPT-5.2 Codex
-- Gemini 2.5 Pro → Gemini 3 Pro
+- Gemini 2.5 Pro → Gemini 3.1 Pro
+- Gemini 3 Pro → Gemini 3.1 Pro
 - Gemini 2.5 Flash → Gemini 3 Flash
 
 ## Usage {#usage}
@@ -93,7 +92,6 @@ A context window is the maximum span of text and code an LLM can consider at onc
 | GPT-5 mini        | OpenAI    | 400k                      |
 | GPT-5 nano        | OpenAI    | 400k                      |
 | Gemini 3.1 Pro    | Google    | 200k                      |
-| Gemini 3 Pro      | Google    | 200k                      |
 | Gemini 3 Flash    | Google    | 200k                      |
 
 > Context window limits for hosted Sonnet 4.5/4.6 and Gemini 3.1 Pro/3 Pro/Flash may increase in future releases.
diff --git a/docs/src/ai/overview.md b/docs/src/ai/overview.md
index b05b3ac6a7a3c9ce42e226e75d5e9e28420f8b03..9463f7bbb11cdcb204915fca138e584baa1f9640 100644
--- a/docs/src/ai/overview.md
+++ b/docs/src/ai/overview.md
@@ -28,7 +28,7 @@ The [Inline Assistant](./inline-assistant.md) works differently: select code or
 
 [Edit Prediction](./edit-prediction.md) provides AI code completions on every keystroke. Each keypress sends a request to the prediction provider, which returns single or multi-line suggestions you accept with `tab`.
 
-The default provider is Zeta, Zed's open-source model trained on open data. You can also use GitHub Copilot, Supermaven, or Codestral.
+The default provider is Zeta, Zed's open-source model trained on open data. You can also use GitHub Copilot, or Codestral.
 
 ## Text threads
 
diff --git a/docs/src/ai/privacy-and-security.md b/docs/src/ai/privacy-and-security.md
index 5eac8a43268865920825557aa8f5a20ec9e04839..4aada3dff47ba8d0eca8f1056e326d6060451306 100644
--- a/docs/src/ai/privacy-and-security.md
+++ b/docs/src/ai/privacy-and-security.md
@@ -7,15 +7,17 @@ description: Zed's approach to AI privacy: opt-in data sharing by default, zero-
 
 ## Philosophy
 
-Zed aims to collect only the minimum data necessary to serve and improve our product.
+Zed collects minimal data necessary to serve and improve our product. Features that could share data, like AI and telemetry, are either opt-in or can be disabled.
 
-Data sharing is opt-in by default. Privacy is not a setting to toggle—it's the baseline.
+- **Telemetry**: Zed collects only the data necessary to understand usage and fix issues. Client-side telemetry can be disabled in settings.
 
-As an open-source product, we believe in maximal transparency, and invite you to examine our codebase. If you find issues, we encourage you to share them with us.
+- **AI**: Data sharing for AI improvement is opt-in, and each share is a one-time action; it does not grant permission for future data collection. You can use Zed's AI features without sharing any data with Zed and without authenticating.
 
-Zed, including AI features, works without sharing data with us and without authentication.
+- **Open-Source**: Zed's codebase is public. You can inspect exactly what data is collected and how it's handled. If you find issues, we encourage you to report them.
 
-## Documentation
+- **Secure-by-default**: Designing Zed and our Service with "secure-by-default" as an objective is of utmost importance to us. We take your security and ours very seriously and strive to follow industry best-practice in order to uphold that principle.
+
+## Related Documentation
 
 - [Tool Permissions](./tool-permissions.md): Configure granular rules to control which agent actions are auto-approved, blocked, or require confirmation.
 
@@ -23,16 +25,15 @@ Zed, including AI features, works without sharing data with us and without authe
 
 - [Telemetry](../telemetry.md): How Zed collects general telemetry data.
 
-- [AI Improvement](./ai-improvement.md): Zed's opt-in-only approach to data collection for AI improvement, whether our Agentic offering or Edit Predictions.
+- [Zed AI Features and Privacy](./ai-improvement.md): An overview of Zed's AI features, your data when using AI in Zed, and how to opt-in and help Zed improve these features.
 
 - [Accounts](../authentication.md): When and why you'd need to authenticate into Zed, how to do so, and what scope we need from you.
 
-- [Collab](https://zed.dev/faq#data-and-privacy): How Zed's live collaboration works, and how data flows to provide the experience (we don't store your code).
+- [Collab](https://zed.dev/faq#data-and-privacy): How Zed's live collaboration works and how data flows. Zed does not store your code.
 
 ## Legal Links
 
-- [Terms of Service](https://zed.dev/terms-of-service)
-- [Terms of Use](https://zed.dev/terms)
+- [Terms of Service](https://zed.dev/terms)
 - [Privacy Policy](https://zed.dev/privacy-policy)
 - [Zed's Contributor License and Feedback Agreement](https://zed.dev/cla)
 - [Subprocessors](https://zed.dev/subprocessors)
diff --git a/docs/src/ai/tools.md b/docs/src/ai/tools.md
index 66f0af571d70fb8db7add2bd89139bf788369de6..faafc76b164f7f786c91c212bf51960f24a6bb0a 100644
--- a/docs/src/ai/tools.md
+++ b/docs/src/ai/tools.md
@@ -91,6 +91,6 @@ Executes shell commands and returns the combined output, creating a new shell pr
 
 ## Other Tools
 
-### `subagent`
+### `spawn_agent`
 
-Spawns a subagent with its own context window to perform a delegated task. Useful for running parallel investigations, completing self-contained tasks, or performing research where only the outcome matters. Each subagent has access to the same tools as the parent agent.
+Spawns a subagent with its own context window to perform a delegated task. Each subagent has access to the same tools as the parent agent.
diff --git a/docs/src/collaboration/overview.md b/docs/src/collaboration/overview.md
index ce74a74ddc6c831433ce64dd67701fd221cf1eb3..1022ec683bf5eefab55b9aff939c568098fdda30 100644
--- a/docs/src/collaboration/overview.md
+++ b/docs/src/collaboration/overview.md
@@ -19,3 +19,30 @@ The Collaboration Panel has two sections:
 > **Warning:** Sharing a project gives collaborators access to your local file system within that project. Only collaborate with people you trust.
 
 See the [Data and Privacy FAQs](https://zed.dev/faq#data-and-privacy) for more details.
+
+## Audio Settings {#audio-settings}
+
+### Selecting Audio Devices
+
+You can select specific input and output audio devices instead of using system defaults. To configure audio devices:
+
+1. Open {#kb zed::OpenSettings}
+2. Navigate to **Collaboration** > **Experimental**
+3. Use the **Output Audio Device** and **Input Audio Device** dropdowns to select your preferred devices
+
+Changes take effect immediately. If you select a device that becomes unavailable, Zed falls back to system defaults.
+
+To test your audio configuration, click **Test Audio** in the same section. This opens a window where you can verify your microphone and speaker work correctly with the selected devices.
+
+**JSON configuration:**
+
+```json [settings]
+{
+  "audio": {
+    "experimental.output_audio_device": "Device Name (device-id)",
+    "experimental.input_audio_device": "Device Name (device-id)"
+  }
+}
+```
+
+Set either value to `null` to use system defaults.
diff --git a/docs/src/completions.md b/docs/src/completions.md
index 9962fd5f24c604bb22f73ba5a797de936f9cb0d4..81c2efa3514a4623408b2869325ab0991ce382d6 100644
--- a/docs/src/completions.md
+++ b/docs/src/completions.md
@@ -8,7 +8,7 @@ description: Zed's code completions from language servers and edit predictions.
 Zed supports two sources for completions:
 
 1. "Code Completions" provided by Language Servers (LSPs) automatically installed by Zed or via [Zed Language Extensions](languages.md).
-2. "Edit Predictions" provided by Zed's own Zeta model or by external providers like [GitHub Copilot](#github-copilot) or [Supermaven](#supermaven).
+2. "Edit Predictions" provided by Zed's own Zeta model or by external providers like [GitHub Copilot](#github-copilot).
 
 ## Language Server Code Completions {#code-completions}
 
diff --git a/docs/src/configuring-languages.md b/docs/src/configuring-languages.md
index 90fec03c0b96a05d9ab193da240d045314404204..485d843fd480177376cf4e5e990fc495e2bb60a7 100644
--- a/docs/src/configuring-languages.md
+++ b/docs/src/configuring-languages.md
@@ -122,11 +122,40 @@ You can specify your preference using the `language_servers` setting:
 
 In this example:
 
-- `intelephense` is set as the primary language server
-- `phpactor` is disabled (note the `!` prefix)
-- `...` expands to the rest of the language servers that are registered for PHP
+- `intelephense` is set as the primary language server.
+- `phpactor` and `phptools` are disabled (note the `!` prefix).
+- `"..."` expands to the rest of the language servers registered for PHP that are not already listed.
 
-This configuration allows you to tailor the language server setup to your specific needs, ensuring that you get the most suitable functionality for your development workflow.
+The `"..."` entry acts as a wildcard that includes any registered language server you haven't explicitly mentioned. Servers you list by name keep their position, and `"..."` fills in the remaining ones at that point in the list. Servers prefixed with `!` are excluded entirely. This means that if a new language server extension is installed or a new server is registered for a language, `"..."` will automatically include it. If you want full control over which servers are enabled, omit `"..."` — only the servers you list by name will be used.
+
+#### Examples
+
+Suppose you're working with Ruby. The default configuration is:
+
+```json [settings]
+{
+  "language_servers": [
+    "solargraph",
+    "!ruby-lsp",
+    "!rubocop",
+    "!sorbet",
+    "!steep",
+    "!kanayago",
+    "..."
+  ]
+}
+```
+
+When you override `language_servers` in your settings, your list **replaces** the default entirely. This means default-disabled servers like `kanayago` will be re-enabled by `"..."` unless you explicitly disable them again.
+
+| Configuration                                     | Result                                                             |
+| ------------------------------------------------- | ------------------------------------------------------------------ |
+| `["..."]`                                         | `solargraph`, `ruby-lsp`, `rubocop`, `sorbet`, `steep`, `kanayago` |
+| `["ruby-lsp", "..."]`                             | `ruby-lsp`, `solargraph`, `rubocop`, `sorbet`, `steep`, `kanayago` |
+| `["ruby-lsp", "!solargraph", "!kanayago", "..."]` | `ruby-lsp`, `rubocop`, `sorbet`, `steep`                           |
+| `["ruby-lsp", "solargraph"]`                      | `ruby-lsp`, `solargraph`                                           |
+
+> Note: In the first example, `"..."` includes `kanayago` even though it is disabled by default. The override replaced the default list, so the `"!kanayago"` entry is no longer present. To keep it disabled, you must include `"!kanayago"` in your configuration.
 
 ### Toolchains
 
@@ -136,6 +165,8 @@ Not all languages in Zed support toolchain discovery and selection, but for thos
 
 ### Configuring Language Servers
 
+When configuring language servers in your `settings.json`, autocomplete suggestions include all available LSP adapters recognized by Zed, not only those currently active for loaded languages. This helps you discover and configure language servers before opening files that use them.
+
 Many language servers accept custom configuration options. You can set these in the `lsp` section of your `settings.json`:
 
 ```json [settings]
diff --git a/docs/src/debugger.md b/docs/src/debugger.md
index 2a84821cac88097e61e744f41d74abefd21d3b8b..bf05de0f6ccccff4e95fd622bab7130d655a1167 100644
--- a/docs/src/debugger.md
+++ b/docs/src/debugger.md
@@ -163,6 +163,14 @@ Some debug adapters (e.g. CodeLLDB and JavaScript) will also _verify_ whether yo
 All breakpoints enabled for a given project are also listed in "Breakpoints" item in your debugging session UI. From "Breakpoints" item in your UI you can also manage exception breakpoints.
 The debug adapter will then stop whenever an exception of a given kind occurs. Which exception types are supported depends on the debug adapter.
 
+## Working with Split Panes
+
+When debugging with multiple split panes open, Zed shows the active debug line in one pane and preserves your layout in others. If you have the same file open in multiple panes, the debugger picks a pane where the file is already the active tab—it won't switch tabs in panes where the file is inactive.
+
+Once the debugger picks a pane, it continues using that pane for subsequent breakpoints during the session. If you drag the tab with the active debug line to a different split, the debugger tracks the move and uses the new pane.
+
+This ensures the debugger doesn't disrupt your workflow when stepping through code across different files.
+
 ## Settings
 
 The settings for the debugger are grouped under the `debugger` key in `settings.json`:
diff --git a/docs/src/development.md b/docs/src/development.md
index 529ce2a69c08f2d3ebb77a5747762de642a1f841..b4c9ea387da020be8d2d0dd517b0c5998bde41e2 100644
--- a/docs/src/development.md
+++ b/docs/src/development.md
@@ -86,6 +86,30 @@ For benchmarking unit tests, annotate them with the `#[perf]` attribute from the
 perf-test -p $CRATE` to benchmark them. See the rustdoc documentation on `crates/util_macros` and `tooling/perf` for
 in-depth examples and explanations.
 
+## ETW Profiling on Windows
+
+Zed supports performance profiling with Event Tracing for Windows (ETW) to capture detailed performance data, including CPU, GPU, memory, disk, and file I/O activity. Data is saved to an `.etl` file, which can be opened in standard profiling tools for analysis.
+
+ETW recordings may contain personally identifiable or security-sensitive information, such as paths to files and registry keys accessed, as well as process names. Please keep this in mind when sharing traces with others.
+
+### Recording a trace
+
+Open the command palette and run one of the following:
+
+- `zed: record etw trace`: records CPU, GPU, memory, and I/O activity
+- `zed: record etw trace with heap tracing`: includes heap allocation data for the Zed process
+
+Zed will prompt you to choose a save location for the `.etl` file, then request administrator permission. Once granted, recording will begin.
+
+### Saving or canceling
+
+While a trace is recording, open the command palette and run one of the following:
+
+- `zed: save etw trace`: stops recording and saves the trace to disk
+- `zed: cancel etw trace`: stops recording without saving
+
+Recordings automatically save after 60 seconds if not stopped manually.
+
 ## Contributor links
 
 - [CONTRIBUTING.md](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md)
diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md
index 3e748e4b33e51e2dcd08175b793d97ea0ddda2d8..eee29cc57d1ce5e1a5a7608c70ece98bf4a233ee 100644
--- a/docs/src/extensions/languages.md
+++ b/docs/src/extensions/languages.md
@@ -434,6 +434,40 @@ The `semantic_tokens` setting accepts the following values:
 - `"combined"`: Use LSP semantic tokens together with tree-sitter highlighting.
 - `"full"`: Use LSP semantic tokens exclusively, replacing tree-sitter highlighting.
 
+#### Extension-Provided Semantic Token Rules
+
+Language extensions can ship default semantic token rules for their language server's custom token types. To do this, place a `semantic_token_rules.json` file in the language directory alongside `config.toml`:
+
+```
+my-extension/
+  languages/
+    my-language/
+      config.toml
+      highlights.scm
+      semantic_token_rules.json
+```
+
+The file uses the same format as the `semantic_token_rules` array in user settings — a JSON array of rule objects:
+
+```json
+[
+  {
+    "token_type": "lifetime",
+    "style": ["lifetime"]
+  },
+  {
+    "token_type": "builtinType",
+    "style": ["type"]
+  },
+  {
+    "token_type": "selfKeyword",
+    "style": ["variable.special"]
+  }
+]
+```
+
+This is useful when a language server reports custom (non-standard) semantic token types that aren't covered by Zed's built-in default rules. Extension-provided rules act as sensible defaults for that language — users can always override them via `semantic_token_rules` in their settings file, and built-in default rules are only used when neither user nor extension rules match.
+
 #### Customizing Semantic Token Styles
 
 Zed supports customizing the styles used for semantic tokens. You can define rules in your settings file, which customize how semantic tokens get mapped to styles in your theme.
@@ -463,7 +497,13 @@ Zed supports customizing the styles used for semantic tokens. You can define rul
 }
 ```
 
-All rules that match a given `token_type` and `token_modifiers` are applied. Earlier rules take precedence. If no rules match, the token is not highlighted. User-defined rules take priority over the default rules.
+All rules that match a given `token_type` and `token_modifiers` are applied. Earlier rules take precedence. If no rules match, the token is not highlighted.
+
+Rules are applied in the following priority order (highest to lowest):
+
+1. **User settings** — rules from `semantic_token_rules` in your settings file.
+2. **Extension rules** — rules from `semantic_token_rules.json` in extension language directories.
+3. **Default rules** — Zed's built-in rules for standard LSP token types.
 
 Each rule in the `semantic_token_rules` array is defined as follows:
 
diff --git a/docs/src/finding-navigating.md b/docs/src/finding-navigating.md
index b5f6e3fff774281d699276449c11602df543a021..f1d3536f8c909f18240f83eac6f4309159b764e1 100644
--- a/docs/src/finding-navigating.md
+++ b/docs/src/finding-navigating.md
@@ -19,8 +19,6 @@ Open any file in your project with {#kb file_finder::Toggle}. Type part of the f
 
 ## Project Search
 
-> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
-
 Search across all files with {#kb pane::DeploySearch}. Start typing in the search field to begin searching—results appear as you type.
 
 Results appear in a [multibuffer](./multibuffers.md), letting you edit matches in place.
diff --git a/docs/src/getting-started.md b/docs/src/getting-started.md
index be5281b56091740d274b70b1ee11c348418cc9da..a87e1bea0f4c3eacaa330b34874283a0b61b5eb9 100644
--- a/docs/src/getting-started.md
+++ b/docs/src/getting-started.md
@@ -11,6 +11,12 @@ This guide covers the essential commands, environment setup, and navigation basi
 
 ## Quick Start
 
+### Welcome Page
+
+When you open Zed without a folder, you see the welcome page in the main editor area. The welcome page offers quick actions to open a folder, clone a repository, or view documentation. Once you open a folder or file, the welcome page disappears. If you split the editor into multiple panes, the welcome page appears only in the center pane when empty—other panes show a standard empty state.
+
+To reopen the welcome page, close all items in the center pane or use the command palette to search for "Welcome".
+
 ### 1. Open a Project
 
 Open a folder from the command line:
diff --git a/docs/src/git.md b/docs/src/git.md
index 5e46dfc322a21dca186dd08389fbf6f72a777288..f7b524925195a80af05387ad1b063ceccff66436 100644
--- a/docs/src/git.md
+++ b/docs/src/git.md
@@ -72,8 +72,6 @@ To disable word diff for specific languages only, add this to your settings.json
 
 ### Diff View Styles
 
-> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
-
 Zed displays diffs in two modes: **split** (side-by-side comparison) or **unified** (inline changes). Split view is the default.
 
 #### Changing the diff view
@@ -101,11 +99,11 @@ You can switch between modes at any time. Your preference applies to [Project Di
 
 File History shows the commit history for an individual file. Each entry displays the commit's author, timestamp, and message. Selecting a commit opens a diff view filtered to show only the changes made to that file in that commit.
 
-To open File History:
+To view File History:
 
-- Right-click on a file in the Project Panel and select "Open File History"
-- Right-click on a file in the Git Panel and select "Open File History"
-- Right-click on an editor tab and select "Open File History"
+- Right-click on a file in the Project Panel and select "View File History"
+- Right-click on a file in the Git Panel and select "View File History"
+- Right-click on an editor tab and select "View File History"
 - Use the Command Palette and search for "file history"
 
 ## Fetch, Push, and Pull
diff --git a/docs/src/globs.md b/docs/src/globs.md
index 26ecf51da8c0420fb65428eb296887b603a99eb5..f1fb584ee568d2e7393539ec3d74b5020c483aaf 100644
--- a/docs/src/globs.md
+++ b/docs/src/globs.md
@@ -24,8 +24,6 @@ A glob "pattern" is used to match a file name or complete file path. For example
 
 ### Multiple Patterns
 
-> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
-
 You can specify multiple glob patterns in Project Search filters by separating them with commas. When using comma-separated patterns, Zed correctly handles braces within individual patterns:
 
 - `*.ts, *.tsx` — Match TypeScript and TSX files
diff --git a/docs/src/languages/ansible.md b/docs/src/languages/ansible.md
index 99980a1a1642717d8306cf8d98ce81be33326207..fd595bc7e3391ab95d90c3d4e34742e6a8bd7c1f 100644
--- a/docs/src/languages/ansible.md
+++ b/docs/src/languages/ansible.md
@@ -14,10 +14,13 @@ Support for Ansible in Zed is provided via a community-maintained [Ansible exten
 
 ### File detection
 
-To avoid mishandling non-Ansible YAML files, the Ansible Language is not associated with any file extensions by default. To change this behavior you can add a `"file_types"` section to Zed settings inside your project (`.zed/settings.json`) or your Zed user settings (`~/.config/zed/settings.json`) to match your folder/naming conventions. For example:
+To avoid mishandling non-Ansible YAML files, the Ansible Language is not associated with any file extensions by default.
+
+To change this behavior, you can add a `"file_types"` section to Zed settings inside your project (`.zed/settings.json`) or your Zed user settings (`~/.config/zed/settings.json`) to match your folder/naming conventions. For example:
 
 ```json [settings]
-"file_types": {
+{
+  "file_types": {
     "Ansible": [
       "**.ansible.yml",
       "**.ansible.yaml",
@@ -39,6 +42,7 @@ To avoid mishandling non-Ansible YAML files, the Ansible Language is not associa
       "**playbook*.yaml"
     ]
   }
+}
 ```
 
 Feel free to modify this list as per your needs.
@@ -47,34 +51,36 @@ Feel free to modify this list as per your needs.
 
 If your inventory file is in the YAML format, you can either:
 
-- Append the `ansible-lint` inventory json schema to it via the following comment at the top of your inventory file:
+- Append the `ansible-lint` inventory JSON schema to it via the following comment at the top of your inventory file:
 
 ```yml
 # yaml-language-server: $schema=https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/inventory.json
 ```
 
-- Or configure the yaml language server settings to set this schema for all your inventory files, that match your inventory pattern, under your Zed settings ([ref](https://zed.dev/docs/languages/yaml)):
+- or, configure the YAML language server settings to set this schema for all your inventory files, that match your inventory pattern, under your Zed settings ([ref](https://zed.dev/docs/languages/yaml)):
 
 ```json [settings]
-"lsp": {
+{
+  "lsp": {
     "yaml-language-server": {
       "settings": {
         "yaml": {
           "schemas": {
             "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/inventory.json": [
               "./inventory/*.yaml",
-              "hosts.yml",
+              "hosts.yml"
             ]
           }
         }
       }
     }
-},
+  }
+}
 ```
 
 ### LSP Configuration
 
-By default, the following default config is passed to the Ansible language server. It conveniently mirrors the defaults set by [nvim-lspconfig](https://github.com/neovim/nvim-lspconfig/blob/03bc581e05e81d33808b42b2d7e76d70adb3b595/lua/lspconfig/configs/ansiblels.lua) for the Ansible language server:
+By default, the following configuration is passed to the Ansible language server. It conveniently mirrors the defaults set by [nvim-lspconfig](https://github.com/neovim/nvim-lspconfig/blob/03bc581e05e81d33808b42b2d7e76d70adb3b595/lua/lspconfig/configs/ansiblels.lua) for the Ansible language server:
 
 ```json
 {
@@ -99,31 +105,32 @@ By default, the following default config is passed to the Ansible language serve
 }
 ```
 
-> [!NOTE]
-> In order for linting to work, ensure that `ansible-lint` is installed and discoverable on your PATH
+> **Note:** In order for linting to work, ensure that `ansible-lint` is installed and discoverable on your `$PATH`.
 
 When desired, any of the above default settings can be overridden under the `"lsp"` section of your Zed settings file. For example:
 
 ```json [settings]
-"lsp": {
-  // Note, the Zed Ansible extension prefixes all settings with `ansible`
-  // so instead of using `ansible.ansible.path` use `ansible.path`.
-  "ansible-language-server": {
-    "settings": {
-      "ansible": {
-        "path": "ansible"
-      },
-      "executionEnvironment": {
-        "enabled": false
-      },
-      "python": {
-        "interpreterPath": "python3"
-      },
-      "validation": {
-        "enabled": false, // disable validation
-        "lint": {
-          "enabled": false, // disable ansible-lint
-          "path": "ansible-lint"
+{
+  "lsp": {
+    // The Zed Ansible extension prefixes all settings with `ansible`
+    // so use `ansible.path` instead of `ansible.ansible.path`.
+    "ansible-language-server": {
+      "settings": {
+        "ansible": {
+          "path": "ansible"
+        },
+        "executionEnvironment": {
+          "enabled": false
+        },
+        "python": {
+          "interpreterPath": "python3"
+        },
+        "validation": {
+          "enabled": false,
+          "lint": {
+            "enabled": false,
+            "path": "ansible-lint"
+          }
         }
       }
     }
@@ -131,5 +138,4 @@ When desired, any of the above default settings can be overridden under the `"ls
 }
 ```
 
-A full list of options/settings, that can be passed to the server, can be found at the project's page [here](https://github.com/ansible/vscode-ansible/blob/5a89836d66d470fb9d20e7ea8aa2af96f12f61fb/docs/als/settings.md).
-Feel free to modify option values as needed.
+A full list of options/settings that can be passed to the server can be found at the project's page [here](https://github.com/ansible/vscode-ansible/blob/main/docs/als/settings.md).
diff --git a/docs/src/languages/bash.md b/docs/src/languages/bash.md
index ce398a7237dd3131ab0b5e0bac10ded14d62f218..c801b55054c9939f5e124aca76dc5e6b80f008d4 100644
--- a/docs/src/languages/bash.md
+++ b/docs/src/languages/bash.md
@@ -5,8 +5,7 @@ description: "Configure Bash language support in Zed, including language servers
 
 # Bash
 
-Bash language support in Zed is provided by the community-maintained [Basher extension](https://github.com/d1y/bash.zed).
-Report issues to: [https://github.com/d1y/bash.zed/issues](https://github.com/d1y/bash.zed/issues)
+Bash support is available through the [Bash extension](https://github.com/zed-extensions/bash).
 
 - Tree-sitter: [tree-sitter/tree-sitter-bash](https://github.com/tree-sitter/tree-sitter-bash)
 - Language Server: [bash-lsp/bash-language-server](https://github.com/bash-lsp/bash-language-server)
diff --git a/docs/src/languages/json.md b/docs/src/languages/json.md
index 253669b2fca3a5007e8ea748a8602d85575b24ce..41644a8b0556c3a21f1c680a2fccb8c901a580cc 100644
--- a/docs/src/languages/json.md
+++ b/docs/src/languages/json.md
@@ -54,8 +54,6 @@ For example to for a `.luarc.json` for use with [lua-language-server](https://gi
 
 ### Schema Specification via Settings
 
-> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
-
 You can associate JSON Schemas with file paths using relative paths in your language server settings. Zed resolves paths relative to your project root:
 
 ```json [settings]
diff --git a/docs/src/languages/python.md b/docs/src/languages/python.md
index d66f52c71cb9295fe9ca94e5890de48cd1275e57..fdeabec5069ed20a9b168ab19129dde0cc6280ba 100644
--- a/docs/src/languages/python.md
+++ b/docs/src/languages/python.md
@@ -89,8 +89,8 @@ Configure language servers in Settings ({#kb zed::OpenSettings}) under Languages
   "languages": {
     "Python": {
       "language_servers": [
-        // Disable basedpyright and enable ty, and otherwise
-        // use the default configuration.
+        // Disable basedpyright and enable ty, and include all
+        // other registered language servers (ruff, pylsp, pyright).
         "ty",
         "!basedpyright",
         "..."
diff --git a/docs/src/languages/yaml.md b/docs/src/languages/yaml.md
index 907a07cc4366a29e7481aa5d927de8887ec84e96..b82e14d64bd20f861d505b71f88b73fc4dfdf56f 100644
--- a/docs/src/languages/yaml.md
+++ b/docs/src/languages/yaml.md
@@ -12,8 +12,6 @@ YAML support is available natively in Zed.
 
 ## Configuration
 
-> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
-
 You can configure various [yaml-language-server settings](https://github.com/redhat-developer/yaml-language-server?tab=readme-ov-file#language-server-settings) by adding them to your Zed settings.json in a `yaml-language-server` block under the `lsp` key.
 
 You can configure custom YAML schemas using relative paths. Zed resolves paths relative to your project root:
diff --git a/docs/src/migrate/vs-code.md b/docs/src/migrate/vs-code.md
index 367cab469acb1969909457edecba8a10c633bfc4..820158c73ffc1ec2f869ad88e34fea4697e4fbec 100644
--- a/docs/src/migrate/vs-code.md
+++ b/docs/src/migrate/vs-code.md
@@ -317,18 +317,12 @@ If you’re used to GitHub Copilot in VS Code, you can do the same in Zed. You c
 
 #### Configuring GitHub Copilot
 
-You should be able to sign-in to GitHub Copilot by clicking on the Zeta icon in the status bar and following the setup instructions.
-You can also add this to your settings:
+1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows)
+2. Navigate to **AI → Edit Predictions**
+3. Click **Configure** next to "Configure Providers"
+4. Under **GitHub Copilot**, click **Sign in to GitHub**
 
-```json
-{
-  "features": {
-    "edit_prediction_provider": "copilot"
-  }
-}
-```
-
-To invoke completions, just start typing. Zed will offer suggestions inline for you to accept.
+Once signed in, just start typing. Zed will offer suggestions inline for you to accept.
 
 #### Additional AI Options
 
diff --git a/docs/src/outline-panel.md b/docs/src/outline-panel.md
index e5f1f911a4e0257427a86b30c835abd2dfa7fd0f..7b31725bf2cec844881e0c5b0b41aac864e28fc9 100644
--- a/docs/src/outline-panel.md
+++ b/docs/src/outline-panel.md
@@ -7,7 +7,7 @@ description: Navigate code structure with Zed's outline panel. View symbols, jum
 
 In addition to the modal outline (`cmd-shift-o`), Zed offers an outline panel. The outline panel can be deployed via `cmd-shift-b` (`outline panel: toggle focus` via the command palette), or by clicking the `Outline Panel` button in the status bar.
 
-When viewing a "singleton" buffer (i.e., a single file on a tab), the outline panel works similarly to that of the outline modal-it displays the outline of the current buffer's symbols, as reported by tree-sitter. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file.
+When viewing a "singleton" buffer (i.e., a single file on a tab), the outline panel works similarly to that of the outline modal-it displays the outline of the current buffer's symbols. Each symbol entry shows its type prefix (such as "struct", "fn", "mod", "impl") along with the symbol name, helping you quickly identify what kind of symbol you're looking at. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file.
 
 ![Using the outline panel in a singleton buffer](https://zed.dev/img/outline-panel/singleton.png)
 
diff --git a/docs/src/performance.md b/docs/src/performance.md
index 09abecdeffe4e268413a73b189ef301511b1a20e..e974d63f8816b68d30a1c06d7cbbc083f8564327 100644
--- a/docs/src/performance.md
+++ b/docs/src/performance.md
@@ -78,7 +78,7 @@ Download the importer
 - `cd import && mkdir build && cd build`
 - Run cmake to generate build files: `cmake -G Ninja -DCMAKE_BUILD_TYPE=Release ..`
 - Build the importer: `ninja`
-- Run the importer on the trace file: `./tracy-import-miniprofiler /path/to/trace.miniprof /path/to/output.tracy`
+- Run the importer on the trace file: `./tracy-import-miniprofiler /path/to/trace.miniprof.json /path/to/output.tracy`
 - Open the trace in tracy:
   - If you're on windows download the v0.12.2 version from the releases on the upstream repo
   - If you're on other platforms open it on the website: https://tracy.nereid.pl/ (the version might mismatch so your luck might vary, we need to host our own ideally..)
@@ -87,7 +87,7 @@ Download the importer
 
 - Run the action: `zed open performance profiler`
 - Hit the save button. This opens a save dialog or if that fails to open the trace gets saved in your working directory.
-- Convert the profile so it can be imported in tracy using the importer: `./tracy-import-miniprofiler  output.tracy`
+- Convert the profile so it can be imported in tracy using the importer: `./tracy-import-miniprofiler  output.tracy`
 - Go to  hit the 'power button' in the top left and then open saved trace.
 - Now zoom in to see the tasks and how long they took
 
diff --git a/docs/src/reference/all-settings.md b/docs/src/reference/all-settings.md
index 0211114aee3ce95d63621a2702677290bd2c575b..32fec4a84d56cf996dc85cf112e4daec7893311b 100644
--- a/docs/src/reference/all-settings.md
+++ b/docs/src/reference/all-settings.md
@@ -519,8 +519,6 @@ Note: Dirty files (files with unsaved changes) will not be automatically closed
 - `"unified"`: Show changes inline with added and deleted lines stacked vertically
 - `"split"`: Display old and new versions side by side in separate panes (default)
 
-> **Changed in Preview (v0.225).** Values renamed from `"stacked"`/`"side_by_side"` to `"unified"`/`"split"`.
-
 See [Git documentation](../git.md#diff-view-styles) for more details.
 
 ## Disable AI
@@ -1802,17 +1800,7 @@ While other options may be changed at a runtime and should be placed under `sett
 }
 ```
 
-3. Use Supermaven as the edit prediction provider:
-
-```json [settings]
-{
-  "edit_predictions": {
-    "provider": "supermaven"
-  }
-}
-```
-
-4. Turn off edit predictions across all providers
+3. Turn off edit predictions across all providers
 
 ```json [settings]
 {
@@ -2774,8 +2762,6 @@ These values take in the same options as the root-level settings with the same n
 
 ### Document Symbols
 
-> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
-
 - Description: Controls the source of document symbols used for outlines and breadcrumbs.
 - Setting: `document_symbols`
 - Default: `off`
@@ -5140,8 +5126,6 @@ See the [debugger page](../debugger.md) for more information about debugging sup
 
 ## Git Worktree Directory
 
-> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
-
 - Description: Directory where git worktrees are created, relative to the repository working directory.
 - Setting: `git.worktree_directory`
 - Default: `"../worktrees"`
diff --git a/docs/src/reference/cli.md b/docs/src/reference/cli.md
index b7c4fafd0b8b30fc64a59a1871a7698b0572fca3..788e287c3abe1f1fe752e00b938de477bcf9d78b 100644
--- a/docs/src/reference/cli.md
+++ b/docs/src/reference/cli.md
@@ -71,8 +71,6 @@ zed -n ~/projects/myproject
 
 ### `-a`, `--add`
 
-> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
-
 Add paths to the currently focused workspace instead of opening a new window. When multiple workspace windows are open, files open in the focused window:
 
 ```sh
diff --git a/docs/src/repl.md b/docs/src/repl.md
index f1cc0ef08ae384c280a3eaaf3d2de0bcfd5c7395..2e782cb0c14e17cd0ce35dec264d4173a46d404f 100644
--- a/docs/src/repl.md
+++ b/docs/src/repl.md
@@ -151,8 +151,6 @@ TBD: Improve Julia REPL instructions
 
 ## Changing which kernel is used per language {#changing-kernels}
 
-> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
-
 Zed automatically detects available kernels and organizes them in the kernel picker:
 
 - **Recommended**: The Python environment matching your active toolchain (if detected)
@@ -193,8 +191,6 @@ To configure a different default kernel for a language, you can assign a kernel
 
 ## Interactive Input
 
-> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
-
 When code execution requires user input (such as Python's `input()` function), the REPL displays an input prompt below the cell output.
 
 Type your response in the text field and press `Enter` to submit. The kernel receives your input and continues execution.
diff --git a/docs/src/semantic-tokens.md b/docs/src/semantic-tokens.md
index ab30525c504455fc7f1fa431b212b975c1d75061..d26666ca7e7e60614bd4f1f9f06e771168611de2 100644
--- a/docs/src/semantic-tokens.md
+++ b/docs/src/semantic-tokens.md
@@ -48,7 +48,7 @@ You can configure this globally or per-language:
 Semantic tokens are styled using rules that map LSP token types and modifiers to theme styles or custom colors. Zed provides sensible defaults, but you can customize these in your settings.json: add rules under `global_lsp_settings.semantic_token_rules` key.
 
 Rules are matched in order, and the first matching rule wins.
-User-defined rules take precedence over defaults.
+User-defined rules take highest precedence, followed by extension-provided language rules, then Zed defaults.
 
 ### Rule Structure
 
@@ -139,7 +139,7 @@ To disable highlighting for a specific token type, add an empty rule that matche
 }
 ```
 
-Since user rules are prepended to defaults and the first match wins, this empty rule prevents any styling from being applied to comment tokens.
+Since user rules take highest precedence and the first match wins, this empty rule prevents any styling from being applied to comment tokens.
 
 ## Default Rules
 
diff --git a/docs/src/snippets.md b/docs/src/snippets.md
index 72cbec7b20ff694304a58a70cd9b142a60fc58a2..9f6b6c880be9edcace23f0e3fd0a02263549776a 100644
--- a/docs/src/snippets.md
+++ b/docs/src/snippets.md
@@ -42,24 +42,4 @@ To create JSX snippets you have to use `javascript.json` snippets file, instead
 ## Known Limitations
 
 - Only the first prefix is used when a list of prefixes is passed in.
-- Currently only the `json` snippet file format is supported, even though the `simple-completion-language-server` supports both `json` and `toml` file formats.
-
-## See also
-
-The `feature_paths` option in `simple-completion-language-server` is disabled by default.
-
-If you want to enable it you can add the following to your `settings.json`:
-
-```json [settings]
-{
-  "lsp": {
-    "snippet-completion-server": {
-      "settings": {
-        "feature_paths": true
-      }
-    }
-  }
-}
-```
-
-For more configuration information, see the [`simple-completion-language-server` instructions](https://github.com/zed-industries/simple-completion-language-server/tree/main).
+- Currently only the `json` snippet file format is supported.
diff --git a/docs/src/tasks.md b/docs/src/tasks.md
index 9e0e38690096d674fc84d968c90091761ae43082..482ca7b4d5779a4861756332ce2c0f25eaad4ad4 100644
--- a/docs/src/tasks.md
+++ b/docs/src/tasks.md
@@ -223,6 +223,35 @@ This could be useful for launching a terminal application that you want to use i
 }
 ```
 
+## VS Code Task Format
+
+When importing VS Code tasks from `.vscode/tasks.json`, you can omit the `label` field. Zed automatically generates labels based on the task type:
+
+- **npm tasks**: `npm: 
         {{/if}}
+        
+        
     
     
     
@@ -307,6 +309,50 @@ {{/next}}
+
@@ -407,23 +453,82 @@ {{/if}} {{/if}} - - + +
diff --git a/extensions/glsl/languages/glsl/brackets.scm b/extensions/glsl/languages/glsl/brackets.scm index 62e137ef2629f3b7f7aeafbad419a36d19361d19..e83d67f411a71f2602dc774531d904a949c45b9a 100644 --- a/extensions/glsl/languages/glsl/brackets.scm +++ b/extensions/glsl/languages/glsl/brackets.scm @@ -1,3 +1,8 @@ -("[" @open "]" @close) -("{" @open "}" @close) -("(" @open ")" @close) +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("(" @open + ")" @close) diff --git a/extensions/glsl/languages/glsl/highlights.scm b/extensions/glsl/languages/glsl/highlights.scm index 09f94d4fb587963254c9bc31ec25b66a0e1e4323..9e40610ff5494102f8524b287ad2e50ec48d78db 100644 --- a/extensions/glsl/languages/glsl/highlights.scm +++ b/extensions/glsl/languages/glsl/highlights.scm @@ -1,82 +1,136 @@ "break" @keyword + "case" @keyword + "const" @keyword + "continue" @keyword + "default" @keyword + "do" @keyword + "else" @keyword + "enum" @keyword + "extern" @keyword + "for" @keyword + "if" @keyword + "inline" @keyword + "return" @keyword + "sizeof" @keyword + "static" @keyword + "struct" @keyword + "switch" @keyword + "typedef" @keyword + "union" @keyword + "volatile" @keyword + "while" @keyword "#define" @keyword + "#elif" @keyword + "#else" @keyword + "#endif" @keyword + "#if" @keyword + "#ifdef" @keyword + "#ifndef" @keyword + "#include" @keyword + (preproc_directive) @keyword "--" @operator + "-" @operator + "-=" @operator + "->" @operator + "=" @operator + "!=" @operator + "*" @operator + "&" @operator + "&&" @operator + "+" @operator + "++" @operator + "+=" @operator + "<" @operator + "==" @operator + ">" @operator + "||" @operator "." @delimiter + ";" @delimiter (string_literal) @string + (system_lib_string) @string (null) @constant + (number_literal) @number + (char_literal) @number (identifier) @variable (field_identifier) @property + (statement_identifier) @label + (type_identifier) @type + (primitive_type) @type + (sized_type_specifier) @type (call_expression function: (identifier) @function) + (call_expression function: (field_expression field: (field_identifier) @function)) + (function_declarator declarator: (identifier) @function) + (preproc_function_def name: (identifier) @function.special) ((identifier) @constant - (#match? @constant "^[A-Z][A-Z\\d_]*$")) + (#match? @constant "^[A-Z][A-Z\\d_]*$")) (comment) @comment @@ -111,7 +165,5 @@ (extension_storage_class) @storageclass -( - (identifier) @variable.builtin - (#match? @variable.builtin "^gl_") -) +((identifier) @variable.builtin + (#match? @variable.builtin "^gl_")) diff --git a/extensions/html/languages/html/highlights.scm b/extensions/html/languages/html/highlights.scm index bb3b43e813929de705605e3ecc3e0b1052c48297..21bf193cf346313024ba8df6e7457c785e21476e 100644 --- a/extensions/html/languages/html/highlights.scm +++ b/extensions/html/languages/html/highlights.scm @@ -1,12 +1,17 @@ (tag_name) @tag + (doctype) @tag.doctype + (attribute_name) @attribute + [ "\"" "'" (attribute_value) ] @string + (comment) @comment + (entity) @string.special "=" @punctuation.delimiter.html diff --git a/extensions/html/languages/html/indents.scm b/extensions/html/languages/html/indents.scm index 436663dba3e1993c84e151f09c581844fdcb977a..6e5bf97d4c3edeb251cdcffdaf6c9f9659d39849 100644 --- a/extensions/html/languages/html/indents.scm +++ b/extensions/html/languages/html/indents.scm @@ -1,5 +1,8 @@ -(start_tag ">" @end) @indent -(self_closing_tag "/>" @end) @indent +(start_tag + ">" @end) @indent + +(self_closing_tag + "/>" @end) @indent (element (start_tag) @start diff --git a/extensions/html/languages/html/injections.scm b/extensions/html/languages/html/injections.scm index 525b3efe29dca541afc8829dd41ff217f48439c3..e9c2c98155768fdee9a4fcefe672bebf7d4ce8f4 100644 --- a/extensions/html/languages/html/injections.scm +++ b/extensions/html/languages/html/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) (script_element (raw_text) @injection.content @@ -11,11 +10,15 @@ (#set! injection.language "css")) (attribute - (attribute_name) @_attribute_name (#match? @_attribute_name "^style$") - (quoted_attribute_value (attribute_value) @injection.content) - (#set! injection.language "css")) + (attribute_name) @_attribute_name + (#match? @_attribute_name "^style$") + (quoted_attribute_value + (attribute_value) @injection.content) + (#set! injection.language "css")) (attribute - (attribute_name) @_attribute_name (#match? @_attribute_name "^on[a-z]+$") - (quoted_attribute_value (attribute_value) @injection.content) - (#set! injection.language "javascript")) + (attribute_name) @_attribute_name + (#match? @_attribute_name "^on[a-z]+$") + (quoted_attribute_value + (attribute_value) @injection.content) + (#set! injection.language "javascript")) diff --git a/extensions/html/languages/html/overrides.scm b/extensions/html/languages/html/overrides.scm index 434f610e70242be8589a9f58cc7fd4704d5d9296..3e9e499e5c95b960e7ec9fe4e46bb078b8043092 100644 --- a/extensions/html/languages/html/overrides.scm +++ b/extensions/html/languages/html/overrides.scm @@ -1,4 +1,5 @@ (comment) @comment + (quoted_attribute_value) @string [ diff --git a/extensions/proto/languages/proto/highlights.scm b/extensions/proto/languages/proto/highlights.scm index 923e00bb1dfca30afcf41a6ab681846d8f20b900..f17c48127380a4c314f4d5b8498b16d4c9d85be6 100644 --- a/extensions/proto/languages/proto/highlights.scm +++ b/extensions/proto/languages/proto/highlights.scm @@ -52,11 +52,11 @@ "}" "<" ">" -] @punctuation.bracket +] @punctuation.bracket [ - ";" - "," + ";" + "," ] @punctuation.delimiter "=" @operator diff --git a/extensions/proto/languages/proto/indents.scm b/extensions/proto/languages/proto/indents.scm index acb44a5e1e617cc0d735228af022129c0b39d561..c096b82d2b2d6856bcb6c39bf44212507b605e38 100644 --- a/extensions/proto/languages/proto/indents.scm +++ b/extensions/proto/languages/proto/indents.scm @@ -1,3 +1,11 @@ -(_ "{" "}" @end) @indent -(_ "[" "]" @end) @indent -(_ "(" ")" @end) @indent +(_ + "{" + "}" @end) @indent + +(_ + "[" + "]" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/extensions/proto/languages/proto/outline.scm b/extensions/proto/languages/proto/outline.scm index f90b1bae33effade920bf8f2c76d7f2d187f1d8e..f0aa6eff9801cdbfe8f61395901cfb96806e10a7 100644 --- a/extensions/proto/languages/proto/outline.scm +++ b/extensions/proto/languages/proto/outline.scm @@ -1,19 +1,19 @@ (message - "message" @context - (message_name - (identifier) @name)) @item + "message" @context + (message_name + (identifier) @name)) @item (service - "service" @context - (service_name - (identifier) @name)) @item + "service" @context + (service_name + (identifier) @name)) @item (rpc - "rpc" @context - (rpc_name - (identifier) @name)) @item + "rpc" @context + (rpc_name + (identifier) @name)) @item (enum - "enum" @context - (enum_name - (identifier) @name)) @item + "enum" @context + (enum_name + (identifier) @name)) @item diff --git a/extensions/proto/languages/proto/textobjects.scm b/extensions/proto/languages/proto/textobjects.scm index 90ea84282da39df8a2023108c367c3ef76a0ef9a..7e859c0d65bd5d119b616d626f3d88ee6d1fc6ee 100644 --- a/extensions/proto/languages/proto/textobjects.scm +++ b/extensions/proto/languages/proto/textobjects.scm @@ -1,17 +1,21 @@ -(message (message_body +(message + (message_body "{" (_)* @class.inside "}")) @class.around -(enum (enum_body + +(enum + (enum_body "{" (_)* @class.inside "}")) @class.around + (service - "service" - (_) - "{" - (_)* @class.inside - "}") @class.around + "service" + (_) + "{" + (_)* @class.inside + "}") @class.around (rpc) @function.around diff --git a/extensions/test-extension/languages/gleam/highlights.scm b/extensions/test-extension/languages/gleam/highlights.scm index 4b85b88d0151a1bfe9018f0c526497261d6e1801..50de3a6acbe6a8b65340d288334aa7185afc8609 100644 --- a/extensions/test-extension/languages/gleam/highlights.scm +++ b/extensions/test-extension/languages/gleam/highlights.scm @@ -1,6 +1,8 @@ ; Comments (module_comment) @comment + (statement_comment) @comment + (comment) @comment ; Constants @@ -9,43 +11,61 @@ ; Variables (identifier) @variable + (discard) @comment.unused ; Modules (module) @module -(import alias: (identifier) @module) + +(import + alias: (identifier) @module) + (remote_type_identifier module: (identifier) @module) + (remote_constructor_name module: (identifier) @module) + ((field_access record: (identifier) @module field: (label) @function) - (#is-not? local)) + (#is-not? local)) ; Functions -(unqualified_import (identifier) @function) -(unqualified_import "type" (type_identifier) @type) -(unqualified_import (type_identifier) @constructor) +(unqualified_import + (identifier) @function) + +(unqualified_import + "type" + (type_identifier) @type) + +(unqualified_import + (type_identifier) @constructor) + (function name: (identifier) @function) + (external_function name: (identifier) @function) + (function_parameter name: (identifier) @variable.parameter) + ((function_call - function: (identifier) @function) - (#is-not? local)) + function: (identifier) @function) + (#is-not? local)) + ((binary_expression - operator: "|>" - right: (identifier) @function) - (#is-not? local)) + operator: "|>" + right: (identifier) @function) + (#is-not? local)) ; "Properties" ; Assumed to be intended to refer to a name for a field; something that comes ; before ":" or after "." ; e.g. record field names, tuple indices, names for named arguments, etc (label) @property + (tuple_access index: (integer) @property) @@ -54,10 +74,12 @@ "@" @attribute name: (identifier) @attribute) -(attribute_value (identifier) @constant) +(attribute_value + (identifier) @constant) ; Type names (remote_type_identifier) @type + (type_identifier) @type ; Data constructors @@ -65,19 +87,24 @@ ; Literals (string) @string + ((escape_sequence) @warning - ; Deprecated in v0.33.0-rc2: - (#eq? @warning "\\e")) + ; Deprecated in v0.33.0-rc2: + (#eq? @warning "\\e")) + (escape_sequence) @string.escape + (bit_string_segment_option) @function.builtin + (integer) @number + (float) @number ; Reserved identifiers ; TODO: when tree-sitter supports `#any-of?` in the Rust bindings, ; refactor this to use `#any-of?` rather than `#match?` ((identifier) @warning - (#match? @warning "^(auto|delegate|derive|else|implement|macro|test|echo)$")) + (#match? @warning "^(auto|delegate|derive|else|implement|macro|test|echo)$")) ; Keywords [ @@ -102,8 +129,12 @@ ; Operators (binary_expression operator: _ @operator) -(boolean_negation "!" @operator) -(integer_negation "-" @operator) + +(boolean_negation + "!" @operator) + +(integer_negation + "-" @operator) ; Punctuation [ @@ -116,10 +147,11 @@ "<<" ">>" ] @punctuation.bracket + [ "." "," - ;; Controversial -- maybe some are operators? + ; Controversial -- maybe some are operators? ":" "#" "=" diff --git a/extensions/test-extension/languages/gleam/indents.scm b/extensions/test-extension/languages/gleam/indents.scm index 112b414aa45f277138d0c681851129a608ee96e0..92f1a04d86d34d60763cceb872c5ac1004ba4601 100644 --- a/extensions/test-extension/languages/gleam/indents.scm +++ b/extensions/test-extension/languages/gleam/indents.scm @@ -1,3 +1,11 @@ -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/extensions/test-extension/languages/gleam/outline.scm b/extensions/test-extension/languages/gleam/outline.scm index 5df7a6af800e8e3c9f0b00834576f2e059bd12b0..2d1a7d800fb9c662f41a0a865e492716b876f2fd 100644 --- a/extensions/test-extension/languages/gleam/outline.scm +++ b/extensions/test-extension/languages/gleam/outline.scm @@ -1,31 +1,31 @@ (external_type - (visibility_modifier)? @context - "type" @context - (type_name) @name) @item + (visibility_modifier)? @context + "type" @context + (type_name) @name) @item (type_definition - (visibility_modifier)? @context - (opacity_modifier)? @context - "type" @context - (type_name) @name) @item + (visibility_modifier)? @context + (opacity_modifier)? @context + "type" @context + (type_name) @name) @item (data_constructor - (constructor_name) @name) @item + (constructor_name) @name) @item (data_constructor_argument - (label) @name) @item + (label) @name) @item (type_alias - (visibility_modifier)? @context - "type" @context - (type_name) @name) @item + (visibility_modifier)? @context + "type" @context + (type_name) @name) @item (function - (visibility_modifier)? @context - "fn" @context - name: (_) @name) @item + (visibility_modifier)? @context + "fn" @context + name: (_) @name) @item (constant - (visibility_modifier)? @context - "const" @context - name: (_) @name) @item + (visibility_modifier)? @context + "const" @context + name: (_) @name) @item diff --git a/extensions/workflows/shared/bump_version.yml b/extensions/workflows/shared/bump_version.yml index bbf7e9b11ca02d15cdee2c300d3a93caffe3f650..dbe92a43a5a3c7900f6d23fffd8ebd3eee9ca95f 100644 --- a/extensions/workflows/shared/bump_version.yml +++ b/extensions/workflows/shared/bump_version.yml @@ -52,7 +52,7 @@ jobs: app-secret: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} with: bump-type: ${{ needs.determine_bump_type.outputs.bump_type }} - force-bump: true + force-bump: ${{ github.event_name != 'push' }} concurrency: group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}labels cancel-in-progress: true diff --git a/legal/privacy-policy.md b/legal/privacy-policy.md index eaf8ece78160e7f643b1fd646e42a71432aafbd0..1eacc5273c93f5035579a72fe5241beca7e3718f 100644 --- a/legal/privacy-policy.md +++ b/legal/privacy-policy.md @@ -3,266 +3,178 @@ title: Privacy Policy slug: privacy-policy --- -At Zed Industries, Inc. ("Zed", "Company" or "we"), we take privacy and the security of data seriously. This Privacy Policy is established to help advise you about how we treat your personal data. By using or accessing our website located at zed.dev, or the Solution or services available pursuant the Zed End User Terms located at [https://zed.dev/terms](https://zed.dev/terms) (collectively, the "Services"), you acknowledge awareness of the practices and policies outlined below, and hereby consent that we will collect, use, and share your personal data as described in this Privacy Policy. +**Last Updated**: March 2, 2026 -As we grow and expand our Services, we may modify this Privacy Policy from time to time. When material modifications are made, we will alert you to any such changes by placing a notice on the Company website, by sending you an email and/or by some other means. Please note that if you've opted not to receive legal notice emails from us (or haven't provided us with a valid email address), those legal notices will still govern your use of the Services. If you use the Services after any changes to the Privacy Policy have been published on our website, you consent and agree to all of the changes. +## Summary -## What this Privacy Policy Covers +Zed collects user information (name, email, username, IP address, etc.), device and usage data, and web analytics to improve our products and services, as well as payment details if you subscribe. We do not store your source code and we process but do not store audio/video/screenshare from collaboration sessions. -Our Privacy Policy covers how we treat Personal Data that we gather when you access or use our Services. "Personal Data" means information that identifies or relates to a particular individual and includes information referred to as "personally identifiable information" or "personal information" under applicable data privacy laws, rules or regulations. Our Privacy Policy does not cover the practices of companies we don't own or control or people we don't manage. +- **We don't sell your data and we don't allow AI providers to train on it** +- **You control Zed editor telemetry** — disable it in Settings or learn more at [https://zed.dev/docs/telemetry](/telemetry) +- **We honor Global Privacy Control** (GPC) signals from your browser +- To access, correct, or delete your data, email privacy@zed.dev with subject "Privacy Request" -## Personal Data +Questions? Contact us at privacy@zed.dev. The full policy below contains complete details. -### Categories of Personal Data We Collect +## Introduction -This chart details the categories of Personal Data that we collect and have collected over the past 12 months: +This Privacy Policy explains how Zed Industries, Inc. ("**Zed**," "**we**," "**our**," or "**us**") collects, uses, and shares your personal data when you use our websites (including [https://zed.dev](https://zed.dev)), our downloadable software ("**Software**"), our subscription service ("**Service**"), or otherwise interact with us. -| Category of personal data | Examples of data we collect | Categories of third parties with whom we share this data | -| ----------------------------- | ------------------------------------------------------------- | -------------------------------------------------------- | -| Profile or contact data | First and last name
Email address
GitHub username | Cloud infrastructure providers
Analytics providers | -| IP data | IP address & derived geolocation data | Cloud infrastructure providers
Analytics providers | -| Web analytics | Interactions
Referrer
Request IDs
Statistics | Cloud infrastructure providers
Analytics providers | -| Photos, videos and recordings | Screenshots
Videos and video recordings you share with us | Cloud infrastructure providers | -| Audio, screenshare data | Audio and screen sharing during collaboration calls | Cloud infrastructure providers | +As used in this Privacy Policy, “personal data” means any information relating to an identified or identifiable individual and includes any information that constitutes "personally identifiable information," “personal data,” or "personal information" under applicable privacy or data protection laws or regulations. -Note that "collection" does not necessarily imply long-term storage. +You acknowledge the collection, use, disclosure, procedures, and other processing described in this Privacy Policy. Beyond the Privacy Policy, your use of our products and services is also subject to our Terms of Service included alongside. This Privacy Policy does not apply to the extent we process personal data in the role of a processor or service provider on behalf of our Zed Business customers. Such processing is governed by our Data Processing Agreement, available upon request. For information about the sub-processors we engage in that capacity, see [https://zed.dev/subprocessors](/subprocessors). -### Categories of Sources of Personal Data - -We collect Personal Data about you from the following categories of sources: - -#### You - -- When you provide such information directly to us. Examples include: - - When you create an account - - When you voluntarily provide information through our Services or through responses to surveys or questionnaires. - - When you send us an email or otherwise contact us. - - When you sign up to our mailing list. -- When you use our hosted Services and such information is collected automatically. Examples include: - - Cookies (defined in the "Tracking Tools and Opt-Out" section below). -- When you use the client software we provide on your machine. Examples include: - - Authentication information when you sign in. - - Version and system metadata when the software checks for updates. - - Usage data, unless you opt out. - - Crash reports, unless you opt out. - - When you make requests to language models we host for you. - - Zed does not store or train on your requests without consent. - - Other relevant data necessary to provide you with our Services. - -#### Third Parties +## Personal Data We Collect -- When you login to the service using a third-party service like GitHub. -- Information collected by content delivery networks or similar service providers -- We may use analytics providers to analyze how you interact and engage with the Services, or third parties may help us provide you with customer support. +We may collect personal data from or about you and your devices from the sources and in the manner described below. If you do not provide requested data, some features may not work - either because we need that data to deliver them, or because we're legally required to collect it. -## Our Business Purposes for Collecting or Disclosing Personal Data +For authorized users on Zed Business plans, certain data described in this section may be processed by Zed as a data processor on behalf of the Zed Business customer. In those cases, the Zed Business customer determines the purposes and lawful basis for that processing, as set forth in our Data Processing Agreement. -- Providing, Customizing and Improving the Services - - Creating and managing your account or other user profiles. - - Processing orders or other fee-based transactions; billing. - - Providing you with the products, services or information you request. - - Meeting or fulfilling the reason you provided the information to us. - - Providing support and assistance for the Services. - - Improving the Services, including testing, research, internal analytics and product development. - - Doing fraud protection, security and debugging. - - Carrying out other business purposes stated when collecting your Personal Data or as otherwise set forth in applicable data privacy laws. -- Marketing the Services - - Marketing and selling the Services. -- Corresponding with You - - Responding to correspondence that we receive from you, contacting you when necessary or requested, and sending you information about Zed or our Services. - - Sending emails and other communications according to your preferences or that display content that we think will interest you. -- Meeting Legal Requirements and Enforcing Legal Terms - - Fulfilling our legal obligations under applicable law, regulation, court order or other legal process, such as preventing, detecting and investigating security incidents and potentially illegal or prohibited activities. - - Protecting the rights, property or safety of you, Zed or another party. - - Enforcing any agreements with you. - - Responding to claims that any posting or other content violates third-party rights. - - Resolving disputes. +### Personal Data You Provide to Us -We will not collect additional categories of Personal Data or use the Personal Data we collected for materially different, unrelated or incompatible purposes without providing you notice as is described above. +- **Contact Information** - We may collect your personal data when you inquire about Zed, our products and services, or when you otherwise interact with us, including when you sign up for, attend, or take part in our demos, events, or webinars. This data may include your full name, work email, company name, company size, and any other data you share with us. -## How We Disclose Your Personal Data +- **Communications** - When you contact us directly, we may receive personal data about you, such as your name, email address, message contents and attachments, and - if you join a live collaboration session - we process, but do not store, your audio and shared screen. When you sign up for news and updates, we will collect your email address and any other data you share. When you communicate with us online, our third-party vendors may receive and store these communications on our behalf. Our emails may include tracking pixels to track information about how you interact with our emails, such as whether you open them and whether you access any included links, your approximate Location Information (described below) based on your IP address, and Device Information (described below), to improve our website, products, and services. -We disclose your Personal Data to categories of service providers and other parties listed in this section. Some of these disclosures may constitute a "sale" of your Personal Data as defined under applicable laws. For more information, please refer to the state-specific sections below. +- **Account Information** - When you create an Account with Zed, we collect the data you provide to create, update, or manage your Service account. Examples include: your name, username, and email address. -- Service Providers. These parties help us provide the Services or perform business functions on our behalf. They include: - - Hosting, technology and communication providers. - - Providers of artificial intelligence or machine learning models - - Payment processors. - - If you are using our Services on a fee-basis, our payment processing partner Stripe, Inc. ("Stripe") collects your voluntarily-provided payment card information necessary to process your payment. - - Please see Stripe Terms of Service and Stripe Privacy Policy for information on its use and storage of your Personal Data. -- Analytics Partners. These parties provide analytics on web traffic or usage of the Services. They include: - - Companies that track how users found or were referred to the Services. - - Companies that track how users interact with the Services. -- Authorized authentication providers (e.g. GitHub OAuth) +- **Careers** - If you apply for a job with us, you may submit your contact information and your resume online. We will collect any information you choose to provide on your resume, such as your contact information, education, and employment experience. -### Fulfilling Legal Obligations +- **Payment Information** - If you make a payment, your payment details, such as credit card, address, phone number, or other financial information, are collected by our third-party payment processor on our behalf. Zed does not collect, process, or store your payment information directly. -We may share any Personal Data that we collect with third parties in relation to the activities set forth under "Meeting Legal Requirements and Enforcing Legal Terms" in the "Our Business Purposes for Collecting Personal Data" section above. +- **Regarding Third-Party Services** - If you use or integrate third-party tools or link third-party services with the Software or Service, we may receive personal data about you, such as your [GitHub username and other related information](https://docs.github.com/en/apps/oauth-apps/using-oauth-apps/connecting-with-third-party-applications) that permits us to authenticate your user identity and keep your account secure. You can learn more about Zed Third Parties here: [https://zed.dev/](https://zed.dev/acceptable-use-policies)[acceptable-use-policies](/acceptable-use-policies) -### Business Transfers +### Personal Data We Collect When You Use Our Websites, Software, or Service -Personal Data collected may be transferred to a third party if we undergo a merger, acquisition, bankruptcy or other transaction in which such third party assumes control of our business (in whole or in part). In such an event, we will make reasonable efforts to notify you before your information becomes subject to different privacy and security policies and practices as authorized or mandated by applicable law. +- **Website, Software, and Service Telemetry** - We automatically collect telemetry - technical logs, metrics, and usage data - to improve and support Zed’s websites, Software, and Service. You may opt out of local telemetry collection in the Software settings. However, when you sign into or use the websites or Service (including via the Software) we collect telemetry on our servers related to use of the websites and Service. -## Data that is Not Personal Data +Learn more about telemetry and your choices and how to opt out of Software telemetry collection at [https://zed.dev/docs/telemetry](/telemetry) -We may create aggregated, de-identified or anonymized data from the Personal Data we collect, including by removing information that makes the data personally identifiable to a particular user. We may use such aggregated, de-identified or anonymized data and share it with third parties for our lawful business purposes, including to analyze, build and improve the Services and promote our business, provided that we will not share such data in a manner that could identify you. +- **Device and Location Information** - When you use the website, Software, or Service we may collect information about your device and software, including IP address (and inferred approximate location), device type, device identifiers, browser (type, version, user-agent, and language), and operating system or mobile device type. We do so to support improving and securing the Software and Service. Zed does not collect precise location information. -## Tracking Tools and Opt-Out +- **Usage Information** - We automatically collect information about how you use our website and Service, like the pages or other content you view and the dates and times of your visits. We do so to support improving and securing the websites, Software, and Service. -The Services use cookies and similar technologies such as pixel tags, web beacons, clear GIFs and JavaScript (collectively, "Cookies") to enable our servers to recognize your web browser, tell us how and when you visit and use our Services, analyze trends, learn about our user base and operate and improve our Services. Cookies are small pieces of data– usually text files – placed on your computer, tablet, phone or similar device when you use that device to access our Services. We may also supplement the information we collect from you with information received from third parties, including third parties that have placed their own Cookies on your device(s). +- **Information from Cookies and Similar Technologies** - We and our third-party partners may collect information using cookies, beacons, and similar technologies (collectively “**Cookies**”) to provide functionality and to recognize you across visits. See our [Cookie Policy](/cookie-policy), which includes information on how to control or opt out of these Cookies. -### We use the following types of Cookies: +## How We Use the Personal Data We Collect -- Essential Cookies. Essential Cookies are required for providing you with features or services that you have requested. For example, certain Cookies enable you to log into secure areas of our Services. Disabling these Cookies may make certain features and services unavailable. -- Functional Cookies. Functional Cookies are used to record your choices and settings regarding our Services, maintain your preferences over time and recognize you when you return to our Services. These Cookies help us to personalize our content for you, greet you by name and remember your preferences (for example, your choice of language or region). -- Performance/Analytical Cookies. Performance/Analytical Cookies allow us to understand how visitors use our Services. They do this by collecting information about the number of visitors to the Services, what pages visitors view on our Services and how long visitors are viewing pages on the Services. Performance/Analytical Cookies also help us measure the performance of our advertising campaigns to help us improve our campaigns and Services' content for those who engage with our advertising. +We use the personal data we collect: -You can decide whether or not to accept Cookies through your internet browser's settings. Most browsers have an option for turning off the Cookie feature, which will prevent your browser from accepting new Cookies, as well as (depending on the sophistication of your browser software) allow you to decide on acceptance of each new Cookie in a variety of ways. You can also delete all Cookies that are already on your device. If you do this, however, you may have to manually adjust some preferences every time you visit our website and some of the Services and functionalities may not work. +- To deliver and improve our products: Providing the Software and Service functionality you request, debugging issues, and developing new features based on usage patterns; -To find out more information about Cookies generally, including information about how to manage and delete Cookies, please visit [https://allaboutcookies.org/](https://allaboutcookies.org/) or [https://ico.org.uk/for-the-public/online/cookies/](https://ico.org.uk/for-the-public/online/cookies/) if you are located in the European Union. +- To communicate with you: Responding to support requests, sending service announcements, and (with your consent) marketing communications; -## Data Security +- To secure our services: Detecting and preventing fraud, abuse, and security threats; -We endeavor to protect your Personal Data from unauthorized access, use and disclosure using appropriate physical, technical, organizational and administrative security measures based on our Services,the type of Personal Data being collected and how we are processing that data. You should also help protect your data by selecting and protecting your password and/or other sign-on mechanism(s) with care; limiting access to your computer or device and browser; and signing off after you have finished accessing your account. Although we work to protect the security of your account and other data that we hold in our records, be aware that no method of transmitting data over the internet or storing data is completely secure. +- To meet legal obligations: Complying with tax, accounting, and regulatory requirements; -## Data Retention +- To process payments: Completing transactions through our payment processor; and -We retain Personal Data about you for as long as reasonably necessary to provide you with our Services or otherwise in support of our business or commercial purposes for utilization of your Personal Data, as expressed. When establishing a retention period for particular categories of data, we consider who we collected the data from, our need for the Personal Data, why we collected the Personal Data, and the sensitivity of the Personal Data. In some cases we retain Personal Data for a longer period, if doing so is necessary to comply with our legal obligations, resolve disputes or collect fees owed, or as is otherwise permitted or required by applicable law, rule or regulation. We may further retain information in an anonymous or aggregated form where such information would not identify you personally. +- To understand aggregate usage: Generating anonymized statistics to guide product decisions. -For example: +We do not use your personal data for purposes materially different from those described above without providing you notice and, where required by law, obtaining your consent. -- We retain your profile information and credentials for as long as you have an account with us. -- We retain your payment data for as long as we need to process your purchase or subscription. -- We retain your device/IP data for as long as we need it to ensure that our systems are working appropriately, effectively and efficiently. +## Legal Bases for Processing European Personal Data -It's worth noting that we avoid retaining data unless necessary to provide our Service. For example: +If you are located in the European Economic Area (“**EEA**”) or the United Kingdom (“**UK**”), we only process your personal data when we have a valid “legal basis,” including as set forth below. -- We do not currently store source code that we proxy during collaboration sessions. -- We do not currently store audio or video recordings of Collaboration calls handled by LiveKit. +- **Consent** - We may process your personal data where you have consented to certain processing of your personal data. For example, we may process your personal data to send you marketing communications or to use Cookies where you have consented to such use. -## Personal Data of Children +- **Contractual Necessity** - We may process your personal data where required to provide you with our products and services. For example, we may need to process your personal data to respond to your inquiries or requests. -We do not knowingly collect or solicit Personal Data from children under 13 years of age; if you are a child under the age of 13, please do not attempt to register for or otherwise use the Services or send us any Personal Data. If we learn we have collected Personal Data from a child under 13 years of age, we will delete that information as quickly as possible. If you believe that a child under 13 years of age may have provided Personal Data to us, please contact us at hi@zed.dev. +- **Compliance with a Legal Obligation** - We may process your personal data where we have a legal obligation to do so. For example, we may process your personal data to comply with tax, labor and accounting obligations. -## California Resident Rights +- **Legitimate Interests** - We may process your personal data where we or a third party have a legitimate interest in processing your personal data. Specifically, we have a legitimate interest in using your personal data for product development and internal analytics purposes, and otherwise to improve the safety, security, and performance of our products and services. We only rely on our or a third party’s legitimate interests to process your personal data when these interests are not overridden by your rights and interests. -If you are a California resident, you have the rights set forth in this section. Please see the "Exercising Your Rights" section below for instructions regarding how to exercise these rights. Please note that we may process Personal Data of our customers' end users or employees in connection with our provision of certain services to our customers. If we are processing your Personal Data as a service provider, you may contact the entity that collected your Personal Data in the first instance to address your rights with respect to such data as desired. +## How We Disclose the Personal Data We Collect -If there are any conflicts between this section and any other provision of this Privacy Policy and you are a California resident, the portion that is more protective of Personal Data shall control to the extent of such conflict. If you have any questions about this section or whether any of the following rights apply to you, please contact us at hi@zed.dev. +The disclosures described below relate to Zed’s processing as a data controller. When we process data on behalf of Zed Business customers as a data processor, some of the third-parties described below may act as sub-processors under our Data Processing Agreement. -### Access +- **Zed does not sell your personal data to third-parties**.  We also do not share your data with third-parties for the purposes of cross-context advertising. -You have the right to request certain information about our collection and use of your Personal Data over the past 12 months. In response, we will provide you with the following information: +- **Partners and Affiliates** - We may share information we receive to our current or future affiliates (companies under common ownership with Zed) for any of the lawful business purposes described in this Privacy Policy above. -- The categories of Personal Data that we have collected about you. -- The categories of sources from which that Personal Data was collected. -- The business or commercial purpose for collecting or selling your Personal Data. -- The categories of third parties with whom we have shared your Personal Data. -- The specific pieces of Personal Data that we have collected about you. -- If we have disclosed your Personal Data to any third parties for a business purpose over the past 12 months, we will identify the categories of Personal Data shared with each category of third party recipient. If we have sold your Personal Data over the past 12 months, we will identify the categories of Personal Data sold to each category of third party recipient. +- **Vendors and Service Providers** - We may disclose information we receive to vendors and service providers retained in connection with operating, maintaining, or monitoring our websites, products, and services for any of the lawful business purposes described in this Privacy Policy above. -### Deletion +- **AI Service Providers** - We may disclose information we receive to vendors that provide artificial intelligence services in connection with our websites, software, or services for legitimate business purposes only, including website performance monitoring and sales and marketing of our products and services. Zed does not utilize third-party services which use this information for AI training purposes. -You have the right to request that we delete the Personal Data that we have collected about you. Under the CCPA, this right is subject to certain exceptions: for e.g., we may need to retain your Personal Data to provide you with the Services or complete a transaction or other action you may have requested, or if deletion of your Personal Data involves disproportionate effort to achieve. If your deletion request is subject to one of these exceptions, we may deny your deletion request to such data. +- **Web Analytics** - We use analytics services such as Amplitude to collect and process certain analytics data related to your use of our websites. These services utilize first-party cookies to collect information about your use of our websites, apps, and online resources via HTTP referrer and/or depending on your choices regarding cookies. Zed does not use third-party tracking cookies that collect your activity for other websites. -### Correction +- **As Required By Law and Similar Disclosures** - We may access, preserve, and disclose your information if we believe doing so is required or appropriate to: -You have the right to request that we correct any inaccurate Personal Data we have collected about you. Under the CCPA, this right is subject to certain exceptions: for example, if we reasonably decide, based on the totality of circumstances related to your Personal Data, that such data is correct. If your correction request is subject to one of these CCPA exceptions, we may deny your request to correct such data. + - Comply with law enforcement requests and legal process, such as a court order or subpoena; + - Respond to your requests; + - Protect your, our, or others’ rights, property, security, or safety; + - Protect against legal liability; or + - Investigate fraud or other unlawful activity. -### Processing of Sensitive Personal Information Opt-Out + For the avoidance of doubt, the disclosure of your information may occur if you post any objectionable, harmful, or illegal content on or through our websites or products and services. -Consumers have certain rights over the processing of their sensitive information. However, we do not intentionally collect sensitive categories of personal information, but it is possible to share sensitive information with us through your use of the Services. It is your responsibility not to share any such sensitive information when you use the Services. +- **Merger, Sale, or Other Asset Transfers** - We may transfer your personal data to service providers, advisors, potential transactional partners, or other third parties in connection with the consideration, negotiation, or completion of a corporate transaction in which we are acquired by or merged with another company or we sell, liquidate, or transfer all or a portion of our assets. -### Personal Data Sales Opt-Out and Opt-In +- **With Your Consent** - We may also disclose your information for other purposes with your permission. -We will not sell your Personal Data, and have not done so over the last 12 months. To our knowledge, we do not sell the Personal Data of minors under 16 years of age. Under the CCPA, California residents have certain rights when a business "shares" Personal Data with third parties for purposes of cross-contextual behavioral advertising. We have shared the foregoing categories of Personal Data for the purposes of cross-contextual behavioral advertising, as applicable. +## Your Choices -Under California Civil Code Sections 1798.83-1798.84, California residents are entitled to contact us to prevent disclosure of Personal Data to third parties for such third parties' direct marketing purposes; in order to submit such a request, please contact us at hi@zed.dev. +- **Marketing Communications** - You can unsubscribe from our promotional emails via the link provided in the emails. Even if you opt out of receiving promotional messages from us, you will continue to receive administrative and security-related messages from us as long as you maintain a Service account. -Your browser may offer you a "Do Not Track" option, which allows you to signal to operators of websites and web applications and services that you do not wish such operators to track certain of your online activities over time and across different websites. Our Services do not support Do Not Track requests at this time. To find out more about "Do Not Track," you can visit [www.allaboutdnt.com](https://www.allaboutdnt.com). +- **Do Not Track** - Because there is no widely-accepted standard on how to respond to “Do Not Track” signals, we instead utilize and honor [Global Privacy Control (GPC)](https://globalprivacycontrol.org/#gpc-spec) as an alternative where and when feasible. -### Exercising Your Rights under CCPA +- **Opting-out of Software Telemetry** - Learn more about telemetry and how to opt out of Software telemetry collection at [https://zed.dev/docs/telemetry](/telemetry) -To exercise the rights described in this Privacy Policy, you or, if you are a California resident, your Authorized Agent (as defined below) can send us a request that (1) provides sufficient information to allow us to adequately verify that you are the person about whom we have collected Personal Data, and (2) describes your request in sufficient detail to allow us to understand, evaluate and respond ( a "Valid Request"). We are not obligated to respond to requests that do not meet these criteria. We will only use Personal Data provided in a Valid Request to verify your identity and complete your request. +- **Disabling Image Loading for Email** - In order to prevent the use of tracking pixels, you may disable image loading in your own email client. -We are committed to respond to Valid Requests within the time frame required by applicable law. We will not charge you a fee for making a Valid Request unless your Valid Request(s) is excessive, repetitive or manifestly unfounded. If we determine that your Valid Request warrants a fee, we will notify you of the fee and explain that decision before completing your request. +## Your Privacy Rights -You may submit a Valid Request using the following methods: +Depending on where you are located, applicable data protection laws may provide you with specific rights regarding your personal data. These may include the right to: -- Email us at: hi@zed.dev +- Request access to the personal data we maintain about you, update, and correct inaccuracies in your personal data, restrict or object to the processing of your personal data, have your personal data anonymized or deleted, as appropriate, or exercise your right to data portability to easily transfer your personal data to another company. -If you are a California resident, you may also authorize an agent (an "Authorized Agent") to exercise your rights on your behalf. +- Withdraw any consent you previously provided to us regarding the processing of your personal data at any time and free of charge. We will apply your preferences going forward and this will not affect the lawfulness of the processing before you withdrew your consent. -### We Will Not Discriminate Against You for Exercising Your Rights +- **Your European Privacy Rights** - If you are located in the European Economic Area (EEA) or the United Kingdom (UK), you may exercise any of the rights described above under GDPR or applicable local data protection law. You also have the right to lodge a complaint with a supervisory authority, including in your country of residence, place of work, or where an incident took place. -We will not discriminate against you for exercising your rights under applicable data protection laws. We will not deny you our goods or services, charge you different prices or rates, or provide you a lower quality of goods and services if you exercise your rights under applicable law. However, we may offer different tiers of our Services, as allowed by applicable law, with varying prices, rates or levels of quality of the goods or services you receive related to the value of Personal Data that we receive from you. +### How to Exercise Your Privacy Rights -# European Union and United Kingdom Data Subject Rights +Regardless of where you are located, you may exercise these rights by contacting us at [privacy@zed.dev](mailto:privacy@zed.dev) or by using the contact details at the end of this Privacy Policy. Please include the subject line "Privacy Request" and include: (1) the specific right you wish to exercise, (2) your account email address, and (3) any details that help us locate your data. -## EU and UK Residents +Before fulfilling your request, we may ask you to provide reasonable information to verify your identity. Zed will respond to these requests without undue delay and in any event, within one month and will execute the request within one month of responding. Complex requests may require an additional 60 days with notice provided to you. -If you are a resident of the European Union ("EU"), United Kingdom ("UK"), Lichtenstein, Norway or Iceland, you may have additional rights under the EU or UK General Data Protection Regulation (the "GDPR") with respect to your Personal Data, as outlined below. -We use the terms "Personal Data" and "processing" as they are defined in the GDPR in this section, but "Personal Data" generally means information that can be used to individually identify a person, and "processing" generally covers actions that can be performed in connection with data such as collection, use, storage and disclosure. Company will be the controller of your Personal Data processed in connection with the Services. -If there are any conflicts between this section and any other provision of this Privacy Policy, the policy or portion that is more protective of Personal Data shall control to the extent of such conflict. If you have any questions about this section or whether any of the following applies to you, please contact us at hi@zed.dev. Note that we may also process Personal Data of our customers' end users or employees in connection with our provision of certain services to you, in which case we are the processor of Personal Data. If we are the processor of your Personal Data, please contact the controller party in the first instance to address your rights with respect to such data. +Please note that there are exceptions and limitations to each of these rights, and that while any changes you make will be reflected in active user databases instantly or within a reasonable period of time, we may retain personal data for backups, archiving, prevention of fraud and abuse, satisfaction of legal obligations, or where we otherwise reasonably believe that we have a legitimate and lawful reason to do so. -## Personal Data We Collect +## Third Parties -The "Categories of Personal Data We Collect" section above details the Personal Data that we collect from you. +Our websites, products, and services may contain links to other websites, products, or services that we do not own or operate or permit you to integrate with third-party services. We are not responsible for the privacy or security practices of these third parties. Please be aware that this Privacy Policy does not apply to your activities on these third-party services or any data you disclose to these third parties. We encourage you to read their privacy policies before providing any data to them. -## Personal Data Use and Processing Grounds +## Retention -The "Our Commercial or Business Purposes for Collecting Personal Data" section above explains how we use your Personal Data. +We keep personal data as long as necessary to provide, maintain, and secure our websites, products, and services. We take measures to avoid retaining data we don't need - for example, we don't store source code proxied during collaboration sessions, or audio, video, and screen contents from calls. -We will only process your Personal Data if we have a lawful basis for doing so. Lawful bases for processing include consent, contractual necessity and our "legitimate interests" or the legitimate interest of others, as further described below. +When you request deletion, we take measures to delete your personal data or anonymize it, unless we're legally required to retain it. We determine retention periods based on the type of service, our relationship with you, legal requirements, and applicable statutes of limitations. -- Contractual Necessity: We process the following categories of Personal Data as a matter of "contractual necessity", meaning that we need to process the data to perform under our End User Terms with you, which enables us to provide you with the Services. When we process data due to contractual necessity, failure to provide such Personal Data will result in your inability to use some or all portions of the Services that require such data. - - Profile or Contact Data - - Payment Data -- Legitimate Interest: We process the following categories of Personal Data when we believe it furthers the legitimate interest of us or third parties: - - Device/IP Data - - Web Analytics - - We may also de-identify or anonymize Personal Data to further our legitimate interests. -- Examples of these legitimate interests include (as described in more detail above): - - Providing, customizing and improving the Services. - - Marketing the Services. - - Corresponding with you. - - Meeting legal requirements and enforcing legal terms. - - Completing corporate transactions. -- Consent: In some cases, we process Personal Data based on the consent you expressly grant to us at the time we collect such data. - - Other Processing Grounds: From time to time we may also need to process Personal Data to comply with a legal obligation, if it is necessary to protect the interests of you or other data subjects, or if it is necessary in the public interest. +## Security -## Sharing Personal Data +Designing Zed and our Service with “secure-by-default” as an objective is of utmost importance to us. We take your security and ours very seriously and strive to follow industry best-practice in order to uphold that principle.  To learn more about Zed’s security program, please visit [https://zed.dev/docs/ai/privacy-and-security](https://zed.dev/docs/ai/privacy-and-security). -The "How We Share Your Personal Data" section above details how we share your Personal Data with third parties. +Zed will notify users as soon as possible should an incident affect their security or privacy. However, because no electronic transmission or storage of data can be proven entirely secure, we can make no guarantees as to the security or privacy of your data. -## EU Data Subject Rights +## Children’s Privacy -For more information about these EU or UK personal data terms and your rights related thereto, or to submit a request for information, please email us at hi@zed.dev. Please note that in some circumstances, we may not be able to fully comply with your request, such as if it is frivolous or impractical, if it jeopardizes the rights of others, or if it is not required by law, but, in those circumstances, we are committed to respond to notify you of such a decision regardless. In some cases, we may also need you to provide us with additional information, which may include Personal Data, if necessary to verify your identity and the nature of your request. +We do not knowingly collect, maintain, or use personal data from children under 18 years of age, and no part of our websites, products, or services is directed to children. If you learn that a child has provided us with personal data in violation of this Privacy Policy, alert us at [privacy@zed.dev](mailto:privacy@zed.dev). -- Access: You can request more information about the Personal Data we hold about you and request a copy of such Personal Data. You can also access certain of your Personal Data by logging on to your account. -- Rectification: If you believe that any Personal Data we are holding about you is incorrect or incomplete, you can request that we correct or supplement such data. You can also correct some of this information directly by logging on to your account. -- Erasure: You can request that we erase some or all of your Personal Data from our systems. -- Withdrawal of Consent: If we are processing your Personal Data based on your consent, you have the right to withdraw your consent at any time. Please note, however, that if you exercise this right, you may have to then provide express consent on a case-by-case basis for the use or disclosure of certain of your Personal Data, if such use or disclosure is necessary to enable you to utilize some or all of our Services. -- Portability: You can ask for a copy of your Personal Data in a machine-readable format. You can also request that we transmit the data to another controller where technically feasible. -- Objection: You can contact us to let us know that you object to the further use or disclosure of your Personal Data for certain purposes, such as for direct marketing purposes. -- Restriction of Processing: You can ask us to restrict further processing of your Personal Data. -- Right to File Complaint: You have the right to lodge a complaint about Company's practices with respect to your Personal Data with the supervisory authority of your country or EU Member State. A list of Supervisory Authorities is available here: [https://edpb.europa.eu/about-edpb/board/members_en](https://edpb.europa.eu/about-edpb/board/members_en) +## International Visitors -## Transfers of Personal Data +Our websites, products, and services are hosted in the United States (“**U.S.**”). If you choose to use our websites or products and services from the EEA, the UK or other regions of the world with laws governing data collection and use that may differ from U.S. law, then please note that you are transferring your personal data outside of those regions to the U.S. for storage and processing. We may transfer personal data from the EEA or the UK to the U.S. and other third countries based on European Commission-approved or UK Government-approved Standard Contractual Clauses, or otherwise in accordance with applicable data protection laws. We may also transfer your data from the U.S. to other countries or regions in connection with storage and processing of data, fulfilling your requests, and operating our websites, products, and services. By providing any data, including personal data, on or to the websites, products, or services, you consent to such transfer, storage, and processing. For more information about the tools that we use to transfer personal data, or to obtain a copy of the contractual safeguards we use for such transfers (if applicable), you may contact us as described below. -The Services are hosted and operated in the United States ("U.S.") through Company and its service providers. By using the Services, you acknowledge that any Personal Data about you is being provided to Company in the U.S. and will be hosted on U.S. servers, and you authorize Company to transfer, store and process your information to and in the U.S., and possibly other countries. In some circumstances, your Personal Data may be transferred to the U.S. pursuant to a data processing agreement incorporating legally required data protection clauses. +## Changes to this Privacy Policy -# Contact Information: +We will post any adjustments to the Privacy Policy on this page, and the revised version will be effective when it is posted. Registered customers will be notified of material privacy policy changes via the email on file with Zed. -If you have additional questions about this Privacy Policy, the methods in which we collect and use your Personal Data or your choices and rights regarding such collection and use, please do not hesitate to contact us at: +## Contact Information -- Website: zed.dev -- Email Address: hi@zed.dev -- Corporate Address: - Zed Industries, Inc. - 2590 Welton St - Suite 200 - PO Box 1916 - Denver CO 80205 +When data is used as outlined in this Privacy Policy, Zed is the data controller and responsible for the processing of your personal data. When Zed processes personal data on behalf of Zed Business customers as a data processor, the terms of our Data Processing Agreement apply. If you have any questions, comments, or concerns about our processing activities, please email us at [privacy@zed.dev](mailto:privacy@zed.dev) or write to us at: -**DATE: May 6, 2025** +Zed Industries, Inc. +2590 Welton St +Suite 200, PO Box 1916 +Denver, CO 80205 diff --git a/legal/subprocessors.md b/legal/subprocessors.md index df3a5f7c9fd1ff5d3fb309a58d58700f8a08681a..7bd95e888473e66e0f9eb232bef1d3e7d67fb802 100644 --- a/legal/subprocessors.md +++ b/legal/subprocessors.md @@ -3,24 +3,100 @@ title: Subprocessor List slug: subprocessors --- -This page provides information about the Subprocessors Zed has engaged to provide processing activities on Customer Data as defined in the [Zed End User Terms](https://zed.dev/terms). - -| Subprocessor | Purpose | Location | -| ------------------- | ------------------------ | ------------- | -| Cloudflare | Cloud Infrastructure | Worldwide | -| Amazon Web Services | Cloud Infrastructure | United States | -| DigitalOcean | Cloud Infrastructure | United States | -| Vercel | Cloud Infrastructure | United States | -| ConvertKit | Email Marketing | United States | -| Axiom | Analytics | United States | -| Hex Technologies | Analytics | United States | -| Snowflake | Analytics | United States | -| LiveKit | Audio/Video Conferencing | United States | -| GitHub | Authentication | United States | -| Anthropic | AI Services | United States | -| BaseTen | AI Services | United States | -| Exa Labs | AI Services | United States | -| Google | AI Services | United States | -| OpenAI | AI Services | United States | - -**DATE: May 6th, 2025** +Zed uses select third-party subprocessors to deliver core product functionality. Each subprocessor processes customer personal data only as necessary to provide its service, and all are subject to appropriate data protection agreements. + +### How Zed Uses Subprocessors + +To provide fast, reliable, and secure functionality, Zed relies on a small number of carefully vetted third-party subprocessors. These vendors help us deliver essential capabilities such as hosting, billing, analytics, real-time collaboration, and hosted AI features. + +Each subprocessor only processes customer personal data as needed to provide its service. + +Zed maintains contracts and data protection agreements with all subprocessors, including GDPR-compliant terms where applicable. We do not sell customer data, and we do not share customer personal data with vendors for advertising or marketing purposes. + +### AI Subprocessors + +Zed offers three modes for AI: + +1. **Bring your own API key** — data goes directly from the customer to the model provider; Zed does not process or store it. +2. [**External Agents**](https://zed.dev/docs/ai/external-agents) — Zed uses ACP to provide an enhanced experience with terminal-based AI code agents like Claude Code or OpenAI Codex. Data is not processed or stored by Zed when using external agents. +3. **Zed-hosted models** — Zed sends customer prompts to one of its AI providers (listed below). These vendors act as subprocessors only for customers who choose this mode. + +### Ongoing Updates + +**Last Updated**: March 2, 2026 + +This subprocessor list is reviewed regularly. Zed will notify customers of material changes in accordance with our [Terms](https://zed.dev/terms) and [Privacy Policy](https://zed.dev/privacy-policy). + +--- + +## Infrastructure & Hosting + +| Subprocessor | Purpose | Data Location | +| ----------------------- | ---------------------------------------- | ------------- | +| **Cloudflare** | Network services, Cloudflare Workers | Global | +| **Amazon Web Services** | Telemetry ingestion pipeline, S3 buckets | United States | +| **DigitalOcean** | Application database hosting | United States | +| **Vercel** | Website and edge infrastructure hosting | United States | + +--- + +## Billing & Payments + +| Subprocessor | Purpose | Data Location | +| ------------ | ------------------------------------------------------------ | ------------- | +| **Stripe** | Payment processing | United States | +| **Orb** | Usage tracking, subscription management, and metered billing | United States | + +--- + +## Operational Tools + +| Subprocessor | Purpose | Data Location | +| ------------ | ------------------------------------- | ------------- | +| **Day.ai** | Customer relationship management | United States | +| **Linear** | Issue tracking and project management | United States | + +--- + +## Email & Communication + +| Subprocessor | Purpose | Data Location | +| -------------- | ---------------------------------------------------------- | ------------- | +| **ConvertKit** | Product update and feature announcement emails | United States | +| **Loops** | Email marketing and product communications | United States | +| **Plain** | Consolidated platform for end-user support across channels | United States | + +--- + +## Analytics & Data Processing + +| Subprocessor | Purpose | Data Location | +| -------------------- | ---------------------------------------------------------------------------------------- | ------------- | +| **Amplitude** | Product analytics | United States | +| **Axiom** | Application telemetry, observability, and logs | United States | +| **Fivetran** | Automates data pipeline integration (extract, transformation, and load services) for Zed | United States | +| **Hex Technologies** | Analytics and debugging | United States | +| **Snowflake** | Data warehouse | United States | + +--- + +## Collaboration Services + +| Subprocessor | Purpose | Data Location | +| ------------ | -------------------------------------------------------------- | ------------- | +| **LiveKit** | Real-time audio/video and collaborative session infrastructure | United States | + +--- + +## AI Services (Zed-Hosted Models) + +_These subprocessors apply only when customers opt to use Zed's hosted AI models. When users supply their own API keys, or use external agents, data is sent directly to the provider and does not pass through Zed's infrastructure._ + +| Subprocessor | Purpose | Data Location | +| ------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------- | +| **Anthropic** | Requests may be sent to Anthropic even if you have another provider's model selected in chat (e.g. for summarization or generating git commit messages). We have a zero data retention agreement with Anthropic. | United States | +| **Baseten** | Inference infrastructure for Edit Predictions | United States | +| **Exa Labs** | AI-powered contextual search and retrieval | United States | +| **Google (Vertex)** | Requests may be sent to Google even if you have another provider's model selected in chat (e.g. for summarization). We have a zero data retention agreement with Google. | United States | +| **OpenAI** | Requests may be sent to OpenAI even if you have another provider's model selected in chat (e.g. for summarization or generating git commit messages). We have a zero data retention agreement with OpenAI. | United States | +| **xAI** | Requests may be sent to xAI even if you have another provider's model selected in chat (e.g. for summarization or generating git commit messages). We have a zero data retention agreement with xAI. | United States | diff --git a/legal/terms.md b/legal/terms.md index 88afa36aa9cb17c55b1b2fe50a26893c4e5a3389..ed90fd36c835ddcc0949a3ad0d49e35fb7e79c8a 100644 --- a/legal/terms.md +++ b/legal/terms.md @@ -1,197 +1,254 @@ --- -title: Zed End User Terms +title: Terms of Service slug: terms --- -PLEASE READ THESE TERMS AND CONDITIONS CAREFULLY BEFORE USING THE SERVICE OR SOFTWARE OFFERED BY ZED INDUSTRIES, INC. ("ZED", OR "WE"). BY ACCESSING OR USING THE SOLUTION (AS DEFINED BELOW) IN ANY MANNER, YOU ("YOU" OR "CUSTOMER") AGREE TO BE BOUND BY THESE TERMS (THE "AGREEMENT") TO THE EXCLUSION OF ALL OTHER TERMS. YOU REPRESENT AND WARRANT THAT YOU HAVE THE AUTHORITY TO ENTER INTO THIS AGREEMENT; IF YOU ARE ENTERING INTO THIS AGREEMENT ON BEHALF OF AN ORGANIZATION OR ENTITY, REFERENCES TO "CUSTOMER" AND "YOU" IN THIS AGREEMENT, REFER TO THAT ORGANIZATION OR ENTITY. IF YOU DO NOT AGREE TO ALL OF THE FOLLOWING, YOU MAY NOT USE OR ACCESS THE SOLUTION IN ANY MANNER. IF THE TERMS OF THIS AGREEMENT ARE CONSIDERED AN OFFER, ACCEPTANCE IS EXPRESSLY LIMITED TO SUCH TERMS. +**Last Updated**: March 2, 2026 -## 1. ACCESS TO AND USE OF THE SOLUTION +Welcome, and thank you for your interest in Zed Industries, Inc. (“**Zed**,” “**we**,” or “**us**”) and our website at [www.zed.dev](https://www.zed.dev), along with our downloadable Zed software (the “**Software**”) and related subscription service (the “**Service**”). These Terms of Service are a legally binding contract between you and Zed regarding your use of the Service. -Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2. +Please read the following Terms carefully. -## 2. TERMS APPLICABLE TO THE EDITOR +**By accessing or using the Service, you (“You” or “Customer”) agree to these Terms of Service, the Data Processing Addendum (“DPA”), available upon request, and Zed’s [Privacy Policy](/privacy-policy) (collectively, the “Terms”).** -### 2.1. License Grant +If you are not eligible, or do not agree to the Terms, you may not access or use the Service. -Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2. +By using the Service, you confirm that you have read and understand these Terms and that they form a binding agreement between you and Zed. -### 2.2. License Limitations +**ARBITRATION NOTICE**. Except for certain kinds of disputes described in Section 15.2 (Dispute Resolution and Arbitration), you agree that disputes arising under these Terms will be resolved by binding, individual arbitration, and BY ACCEPTING THESE TERMS, YOU AND ZED ARE EACH WAIVING THE RIGHT TO A TRIAL BY JURY OR TO PARTICIPATE IN ANY CLASS ACTION OR REPRESENTATIVE PROCEEDING. ALTERNATIVELY, CUSTOMER MAY OPT OUT OF ARBITRATION PER SECTION 17.2(a). -You agree that You shall not: (a) exceed the scope of the licenses granted in Section 2.1; (b) make copies of the Editor; (c) distribute, sublicense, assign, delegate, rent, lease, sell, time-share or otherwise transfer the benefits of, use under, or rights to, the license granted in Section 2.1; (d) reverse engineer, decompile, disassemble or otherwise attempt to learn the source code, structure or algorithms underlying the Editor, except to the extent required to be permitted under applicable law; (e) modify, translate or create derivative works of the Editor; or (f) remove any copyright, trademark, patent or other proprietary notice that appears on the Editor or copies thereof. +## 1. Overview -### 2.3. Open Source Software +Subject to these Terms, Zed will permit Customer to access and use Zed’s AI-enabled software-as-a-service offering (the “**Service**”), which enables certain additional features and functionality (including artificial intelligence and collaboration features) in Zed’s open source code editing software (“**Software**”). -Zed makes certain versions of the Editor and related software available at the Zed GitHub Repository: [https://github.com/zed-industries/zed](https://github.com/zed-industries/zed) (the "Repo"). Your use of such software is subject to the open source software licenses declared in the Repo. +## 2. Service -## 3. TERMS APPLICABLE TO THE ZED SERVICE +### 2.1. Eligibility -### 3.1. Access to and Scope of Zed Service +Customer must be at least 18 years old to use the Service. By agreeing to these Terms, Customer represents and warrants to Zed that: (a) Customer is at least 18 years old; (b) Customer has not previously been suspended or removed from the Service; and (c) Customer’s registration and use of the Service is in compliance with any and all applicable laws and regulations. If Customer is an entity, organization, or company, the individual accepting these Terms on Customer’s behalf represents and warrants that they have authority to bind Customer to these Terms and Customer agrees to be bound by these Terms. -If you have elected to use the Zed Service by enabling or activating the Zed Service, Zed will use commercially reasonable efforts to make the Zed Service available to You as set forth in this Agreement. Once you elected to use the Zed Service, You may access and use the Zed Service during the Term, subject to Your compliance with the terms and conditions of the Agreement. +### 2.2. Access Grant -### 3.2. Restrictions +During the Term, subject to Customer’s compliance with the terms of the Terms, Customer may access and use the Service only for Customer’s internal business purposes or for individuals, for personal non-commercial purposes, in accordance with the then-current version of Zed’s usage guidelines and standard technical documentation for the Service that Zed makes generally available to its customers (“**Documentation**”), the Terms, and any terms set forth in the applicable Subscription Service (as defined in Section 3.4 below). Customer agrees to access the Service only through the mechanisms designated by Zed. Without limiting the foregoing, to access the Service, Customer may be required to associate an existing third-party account with the Service to enable authentication (e.g., via OAuth). Customer will be responsible for the acts and omissions of all persons who access the Service through Customer’s account as though such acts and omissions were Customer’s own. Customer will promptly notify Zed if it becomes aware of any compromise to its Zed account. -You will use the Zed Service only in accordance with all applicable laws, including, but not limited to, laws related to data (whether applicable within the United States, the European Union, or otherwise). You agree not to (and will not allow any third party to): (i) remove or otherwise alter any proprietary notices or labels from the Zed Service or any portion thereof; (ii) reverse engineer, decompile, disassemble, or otherwise attempt to discover the underlying structure, ideas, or algorithms of the Zed Service or any software used to provide or make the Zed Service available; or (iii) rent, resell or otherwise allow any third party access to or use of the Zed Service. Zed may suspend Your access to or use of the Zed Service as follows: (a) immediately if Zed reasonably believes Your use of the Zed Service may pose a security risk to or may adversely impact the Zed Service; or (b) if You are in breach of this Agreement. +### 2.3. Acceptable Use -### 3.3. Customer Data +The Service uses technology provided by multiple third party AI subprocessors (the “AI Providers”) including but not limited to: Anthropic, PBC (“Anthropic”), Google LLC (“Google”), LiveKit Incorporated, OpenAI, LLC (“OpenAI”) etc., as may be updated from time to time. Customer may not use the Service in a manner that violates any applicable AI Provider policy which are listed on [https://zed.dev/acceptable-use-policies](https://zed.dev/acceptable-use-policies), including Anthropic’s [Usage Policy](https://www.anthropic.com/legal/aup), Google Gemini’s [Generative AI Prohibited Use Policy](https://policies.google.com/terms/generative-ai/use-policy), GitHub's [Acceptable Use Policy](https://docs.github.com/en/site-policy/acceptable-use-policies/github-acceptable-use-policies), LiveKit’s [Acceptable Use Policy](https://livekit.io/legal/acceptable-use-policy); OpenAI’s [Usage Policies](https://openai.com/policies/usage-policies/) or [Sharing and Publication Policy](https://openai.com/api/policies/sharing-publication/); and [Community Guidelines](https://openai.com/api/policies/community-guidelines/); each of which may be updated from time to time and are expressly incorporated by reference. Customer is solely responsible to check for updates to the applicable AI Provider policy from time to time. -You are solely responsible for Customer Data including, but not limited to: (a) compliance with all applicable laws and this Agreement; (b) any claims relating to Customer Data; and (c) any claims that Customer Data infringes, misappropriates, or otherwise violates the rights of any third party. You agree and acknowledge that Customer Data may be irretrievably deleted if Your account is terminated. For purposes of this Agreement, "Customer Data" shall mean any data, information or other material provided, uploaded, or submitted by You to the Zed Service in the course of using the Zed Service. Notwithstanding anything to the contrary, You represent and warrant that You will not transfer or make available to Zed any personally identifiable information or related information subject to applicable data privacy laws or regulations, unless otherwise agreed to in writing by Zed. +### 2.4. Restrictions -#### 3.3.1. Customer Data Made Available to Zed +Customer will not (and will not permit anyone else to), directly or indirectly, do any of the following: (a) provide access to, distribute, sell, or sublicense the Service to a third party; (b) seek to access non-public APIs associated with the Service; (c) copy any element of the Service; (d) interfere with the operation of the Service, circumvent any access restrictions, or conduct any security or vulnerability test of the Service; (e) transmit any viruses or other harmful materials to the Service or others; (f) take any action that risks harm to others or to the security, availability, or integrity of the Service except for the purposes of legitimate security or malware research; or (g) access or use the Service or Output in a manner that violates any applicable relevant local, state, federal or international laws, regulations, or conventions, including those related to data privacy or data transfer, international communications, or export of data (collectively, “**Laws**”), or the Terms. The Service incorporates functionality provided by third-party services, the use of which is subject to additional terms. Customer agrees that if Customer accesses or uses services, features or functionality in the Software or Service that are provided by a third party, Customer will comply with any applicable terms promulgated by that third party, including as set forth at [https://zed.dev/acceptable-use-policies](/acceptable-use-policies) (as may be updated from time to time). Customer further acknowledges that certain components of the Software or Service may be covered by open source licenses ("**Open Source Component**"), including but not limited to Apache License, Version 2.0, GNU General Public License v3.0, and the GNU Affero General Public License v3.0. To the extent required by such open source license for the applicable Open Source Component, the terms of such license will apply to such Open Source Component in lieu of the relevant provisions of these Terms. If such open source license prohibits any of the restrictions in these Terms, such restrictions will not apply to such Open Source Component. Zed shall provide Customer with a list of Open Source Components upon Customer's request. -To the extent You elect to make Customer Data available to Zed, the same may only be used by Zed according to the Customer Data type and the use rights regarding the same as described herein: +## 3. General Payment Terms -#### 3.3.2. Usage Data +Accessing certain features and tiers of the Service requires Customer to pay fees. Before Customer pays any fees, Customer will have an opportunity to review and accept the fees that Customer will be charged. Unless otherwise specifically provided for in these Terms, all fees are in U.S. Dollars and are non-refundable, except as required by law. -To improve the Editor and understand how You use it, Zed optionally collects the following usage data: +### 3.1. Price -- (a) file extensions of opened files; -- (b) features and tools You use within the Editor; -- (c) project statistics (e.g., number of files); and -- (d) frameworks detected in Your projects +Zed reserves the right to determine pricing for the Service. Zed will make reasonable efforts to keep pricing information published on our pricing page at [https://zed.dev/pricing](https://zed.dev/pricing) up to date. Zed encourages Customer to check Zed’s pricing page periodically for current pricing information. Zed may change the fees for any feature of the Service, including by adding fees or charges, if Zed gives Customer advance notice of changes before they apply. -(a-d collectively, "Usage Data"). Usage Data does not include any of Your software code or sensitive project details. You may change Your preferences disabling the collection of Usage Data and You can audit Usage Data collected by the Editor at any time. See [https://zed.dev/docs/telemetry](https://zed.dev/docs/telemetry) for more. +### 3.2. Taxes -Usage Data is associated with a secure random telemetry ID which may be linked to Your email address. This linkage currently serves two purposes: (1) it allows Zed to analyze usage patterns over time while maintaining Your privacy; and (2) it enables Zed to reach out to specific user groups for feedback and improvement suggestions. Zed may contact You based on Your usage patterns to better understand your needs and improve the Solution. If You delete Your account, the link between Your telemetry ID and Your email address will be permanently removed. By continuing to use Editor or Solution with this feature enabled You agree to this Usage Data collection. +Customer is responsible for any sales, use, GST, value-added, withholding, or similar taxes or levies that apply to Orders, whether domestic or foreign, other than Zed’s income tax (“**Taxes**”). Fees are exclusive of all Taxes. If Customer is compelled to make a deduction or set-off for any such Taxes, Customer will pay Zed such additional amounts as necessary to ensure receipt by Zed of the full amount Zed would have received but for the deduction. -#### 3.3.3. Crash Reports +### 3.3. Authorization -Customer Data consisting of data related to the behavior of the Solution prior to a crash or failure, such as stack traces are collected and classified as "Crash Reports". Zed will use commercially reasonable efforts to exclude any personally identifiable information from Crash Reports, but due to the nature of a crash, Zed does not ensure that information such as paths will be excluded from Crash Reports. Crash Reports will be used solely for Zed's internal purposes in connection with diagnosing defects in the Solution that led to the crash. You may grant us permission to capture Crash Reports when installing or activating the Solution, and You may change Your preferences at any time in the settings feature of the Solution. Once You grant us this permission, Zed will retain the Crash Reports indefinitely. +Customer authorizes Zed to charge all sums for the orders that Customer makes, the level of Service that Customer selects, and Customer’s submission of prompts or other Customer Data (defined below) to the Service to generate Output (defined below) as described in these Terms or published by Zed, including all applicable taxes, to the payment method specified in Customer’s account. If Customer pays any fees with a credit card, then Zed may seek pre-authorization of Customer’s credit card account prior to Customer’s purchase to verify that the credit card is valid and has the necessary funds or credit available to cover Customer’s purchase. -#### 3.3.4. User Content +### 3.4. Subscription Service -• You may access, modify or create certain data or information in connection with your access or use of the Zed Editor or the Solution. Such data and information may include, but is not limited to any of the following: +The Service may include certain subscription-based plans with automatically recurring payments for periodic charges ("**Subscription Service**"). The "**Subscription Billing Date**" is the date when Customer purchases its first subscription to the Service. The Subscription Service will begin on the Subscription Billing Date and continue for the subscription period that Customer selects on its account (such period, the "**Initial Subscription Period**"), and will automatically renew for successive periods of the same duration as the Initial Subscription Period (the Initial Subscription Period and each such renewal period, each a "**Subscription Period**") unless Customer cancels the Subscription Service or Zed terminates it. If Customer activates a Subscription Service, then Customer authorizes Zed or its third-party payment processors to periodically charge, on a going-forward basis and until cancellation of the Subscription Service, all accrued sums on or before the payment due date. For information on the "Subscription Fee", please see Zed’s pricing page at [https://zed.dev/pricing](https://zed.dev/pricing). Customer’s account will be charged automatically on the Subscription Billing Date and thereafter on the renewal date of its Subscription Service for all applicable fees and taxes for the next Subscription Period. Customer must cancel its Subscription Service before it renews in order to avoid billing of the next periodic Subscription Fee to Customer’s account. Zed or its third-party payment processor will bill the periodic Subscription Fee to the payment method associated with Customer’s account or that Customer otherwise provides to Zed. Customer may cancel the Subscription Service from the account page at https://zed.dev/account or by contacting us at [billing-support@zed.dev](mailto:billing-support@zed.dev). **YOUR CANCELLATION MUST BE RECEIVED BEFORE THE RENEWAL DATE IN ORDER TO AVOID BEING CHARGED FOR THE NEXT SUBSCRIPTION PERIOD.** -- (a) file contents and associated metadata (e.g., filename, paths, size, timestamps); -- (b) source control history, comments and metadata (e.g., git history, commit messages); -- (c) configuration data (e.g., settings, keymaps); -- (d) anything typed, pasted and/or displayed on screen while using the Editor; -- (e) derivative works of the above generated by the Editor (e.g., format conversions, summaries, indexes, caches); -- (f) metadata, code and other derivative works of the above returned by language servers and other local tooling; and -- (g) metadata, code and other derivative works of the above returned by services integrated with the Zed Editor +### 3.5. Consumption Fees -(a-g collectively, "User Content"). +Customer’s subscription to the Service may permit Customer to submit prompts or other Customer Data for the purpose of generating Output, at no additional charge for a certain number of times each month. If Customer elects to submit a volume of prompts in excess of the quantity included in its Subscription Fee, then Customer authorizes Zed to charge, and Customer will be charged, a fee for each additional prompt at the rates set forth at [https://zed.dev/docs/ai/models](https://zed.dev/docs/ai/models). -#### 3.3.5. Handling of User Content +### 3.6. Delinquent Accounts -Zed will make use of or transfer User Content only as specified in this Agreement, or as necessary to comply with applicable law. +Zed may suspend or terminate access to the Service, including fee-based portions of the Service, for any account for which any amount is due but unpaid. In addition to the amount due for the Service, a delinquent account will be charged with fees or charges that are incidental to any chargeback or collection of any unpaid amount, including collection fees. If your payment method is no longer valid at the time a renewal Subscription Fee is due, then Zed reserves the right to delete your account and any information or Customer Data associated with your account without any liability to Customer. -#### 3.3.5.1. Zed Collaboration Services +## 4. Data -When using Zed Collaboration Services, User Content is transmitted from Your environment only if You collaborate with other Zed users by electing to share a project in the Editor. Once You share a project, Zed may transmit User Content consisting of file paths, file contents, and metadata regarding the code returned by language servers. Currently, Zed does not persist any User Content beyond the Your collaboration session. If You unshare a project or disconnect from the Solution, all information associated with such project will be deleted from Zed servers. In the future, Zed may save User Content regarding projects beyond the scope of a single collaboration session. We may share such User Content with those users You elected to grant access to. Zed's access to such User Content is limited to debugging and making improvements to the Solution. +### 4.1. Zed's Use of Customer Data -#### 3.3.5.2. Other Services +Customer hereby grants Zed a non-exclusive, worldwide, royalty-free, fully paid-up, non-sublicensable (except to service providers and Customer’s designees), non-transferable (except as set forth in Section 15.1) right to use, copy, store, disclose, transmit, transfer, display, modify, create derivative works from, collect, access, store, host, or otherwise process (“**Process**”) any materials that Customer inputs into or otherwise makes available to the Service (including prompts and other written content) (collectively, “**Customer Data**”) solely: (a) to perform its obligations set forth in the Terms, including its Support obligations as applicable; (b) to derive and generate Telemetry (see Section 4.4); and (c) as necessary to comply with applicable Laws. Except as required by applicable Laws, Zed will not provide Customer Data to any person or entity other than Customer’s designees (including pursuant to Section 7) or service providers. In the event that autocomplete suggestions are turned on, Customer understands and agrees that the Service will periodically send Customer Data in the background to an AI Provider for the purpose of generating autocomplete input suggestions in the Services. Autocomplete features can be turned off at any time, in which case Customer Data will not be sent. -The Zed Editor supports integration with API-based services maintained and not operated by Zed (the "Other Services"). By way of example, Other Services includes those made available by GitHub, Anthropic, OpenAI, and similar providers, or those You host or manage directly. You may configure the Zed Editor to interoperate, communicate with, and exchange data (including User Content) directly with the Other Services. Zed is not responsible or otherwise liable with respect to Your use of any Other Service, including but not limited to the exchange of data between the Other Service and the Zed Editor. The terms and conditions, including the applicable privacy policy, with respect to the Other Service are those made available by the applicable Other Service, not these Terms. +### 4.2. Customer's Ownership of Output -#### 3.3.5.3. Zed AI Services +The Service may generate specifically for, and make available to, Customer text and written content based on or in response to Customer Data input into the Service (collectively, “**Output**”), including through the use of technologies that incorporate or rely upon artificial intelligence, machine learning techniques, and other similar technology and features. As between the Parties, to the greatest extent permitted by applicable Laws, Customer owns all Output and Zed hereby irrevocably assigns to Customer all right, title, and interest in and to the Output that Zed may possess. **For the avoidance of doubt, Zed and its AI Providers will not retain or use Customer Data for the purpose of improving or training the Service or any AI Provider products, except to the extent Customer explicitly opts-in on Zed’s specific feature to allow training and/or such improvement (such as fine-tuning) and is solely for the benefit of Customer.** -The Zed Editor supports integration with API-based services maintained and operated by Zed (the "Zed AI Services"). You may elect to use Zed AI Services as the provider for various Zed Editor features (e.g., Agent Panel, Inline Assistant, Edit Predictions, and similar features). In connection with Your use of these features, the Zed Editor and Zed AI Services may make use of User Content to generate contextually relevant responses (the “Output”). Other than as specified in Section 3.3.5.4 of these Terms, Zed will not use User Content for training of its models, or disclose User Content. +### 4.3. Zed's Collection of Output Rating -Output is provided "as is" without any warranties or guarantees of functionality, security, or fitness for a particular purpose. While efforts are made to ensure the accuracy and reliability, Output may include errors, vulnerabilities, and defects. You are responsible for reviewing, testing, and validating Output before use in any production or critical environment. Zed assumes no liability for any damages, losses, or liability arising from the use, modification, reliance on, or deployment of Output. Any such use is at Your own risk. +The Service may enable Customer, at its option, to rate or otherwise provide feedback with respect to Output generated through the Service. If Customer opts in to provide feedback concerning Output using the features of the Software or Service (e.g., by clicking an Output rating button), then Customer agrees that Zed may Process that Output and associated Customer Data for the purpose of product development and improvement (“Output Rating”). For clarity, Customer’s decision to opt in to provide Output Rating is specific to the corresponding Output. Your decision to provide Output Rating with respect to one instance of Output does not give Zed the right to use any other Output for Output Rating purposes. -#### 3.3.5.4. Improvement Feedback +### 4.4. Telemetry -When using Zed AI Services to provide Edit Predictions in connection with certain open source software projects, You may elect to share requests, responses and feedback comments (collectively "Model Improvement Feedback") with Zed, and Zed may use the same to improve Zed Edit Predictions models. You may opt-out of sharing Model Improvement Feedback at any time. +Zed may collect, generate, and Process information, including technical logs, metrics, and data and learnings, related to the Software and Service (“**Telemetry**”) to improve and support the Services and for other lawful business purposes. Customer may configure the Software to opt out of the collection of certain Telemetry Processed locally by the Software itself, but Zed may still collect, generate, and Process Telemetry on Zed’s servers. Zed may not disclose Telemetry to any third-party other than Zed’s Representatives unless it is de-identified so that it does not identify Customer as the source thereof and is aggregated with data across other customers. **For avoidance of doubt, Telemetry expressly does not include Customer Data.** -For more information on Zed Edit Predictions please see: [https://zed.dev/docs/ai/ai-improvement](https://zed.dev/docs/ai/ai-improvement) +## 5. Customer Obligations -When using Zed AI Services in connection with the Agent Panel, You may elect to share with Zed requests, responses and feedback regarding the Agent Panel and related Output (the “Agent Improvement Feedback”) with Zed, and Zed may use the same to improve the Agent Panel and related Output. Zed will only collect Agent Improvement Feedback when You elect to share the same. +Customer is responsible for its Customer Data and will comply with applicable Laws when using the Service. Customer represents and warrants that it has obtained all rights, consents, and permissions necessary for Zed to Process Customer Data and exercise the rights granted to it in the Terms without violating or infringing Laws or third-party rights. Customer Data shall not contain: (a) any “protected health information” or “PHI” as defined under HIPAA (including 45 C.F.R. Parts 160 and 164); or (b) any payment card or cardholder data subject to PCI DSS (including primary account numbers, full track or chip data, CVV/CVC codes, PINs, or similar payment card security data). Customer is solely responsible for ensuring compliance with this restriction and shall be liable for, and shall indemnify Zed against, any claims, fines, or penalties arising from Customer’s breach of this Section. Zed disclaims any and all liability in connection with Customer Data. -For more information regarding the Agent Panel please see: [https://zed.dev/docs/ai/ai-improvement](https://zed.dev/docs/ai/ai-improvement) +## 6. Suspension of Service -#### 3.4. Privacy Policy +Zed may immediately suspend Customer’s access to any or all of the Service if: (a) Customer breaches Section 2.2 - 2.4 or Section 5; (b) any payments required under the Terms are overdue by 30 days or more; (c) changes to Laws or new Laws require that Zed suspend the Service or otherwise may impose additional liability on Zed in connection with its provision of the Service to Customer; or (d) Customer’s breach of the Terms risks harm to any of Zed’s other customers or the security, availability, or integrity of the Service or other services and entities. Where practicable, Zed will use reasonable efforts to provide Customer with prior notice of the suspension (email sufficing). If the issue that led to the suspension is resolved, Zed will restore Customer’s access to the Service. -You and Zed are bound by the terms and conditions contained in the Zed Privacy Policy which is incorporated by reference hereto. The Zed Privacy Policy is available at the following URL: [https://zed.dev/privacy-policy](https://zed.dev/privacy-policy). +## 7. Data Sharing and Third-Party Integrations -## 4. FEE BASED SERVICES, FEES AND PAYMENT TERMS +### 7.1. Collaboration Services -### 4.1. Fee Based Services +Certain features of the Service may allow Customer to share data between accounts on the Service, including accounts controlled by persons and entities not associated with Customer (“**Collaboration Features**”). If Customer elects to use Collaboration Features, Customer acknowledges and agrees that Zed will, and authorizes Zed to, make available Customer Data consisting of file paths, file contents, and metadata regarding the code returned by language servers to the third parties designated by Customer, and that Zed exercises no control over, and has no liability for, the acts or omissions of such third parties (including in connection with the Customer Data). Currently, with the exception of the Channel notes feature, Zed does not persist any shared Customer Data beyond the designated Collaboration Feature session. -The Zed AI Services is made available with additional usage benefits (the “Enhanced Use ”) as described in the table published at [zed.dev/pricing](https://zed.dev/pricing) (the “Pricing Table”), subject to the requirements and limitations set forth in the Pricing Table and these Terms. In order to make use of the Enhanced Use, Customer must access the Zed AI Services through a Zed registered account. +### 7.2. Third-Party Integrations -### 4.2. Fees +The Service may support integration with third-party platforms, add-ons, services, or products not provided by Zed (“**Third-Party Platforms**”). Use of any Third-Party Platforms integrated with or made available through the Service is subject to Customer’s agreement with the relevant provider and not these Terms. Zed does not control and has no liability for Third-Party Platforms, including their security, functionality, operation, availability, or interoperability with the Service. By enabling a Third-Party Platform to interact with the Service, Customer authorizes Zed to access and exchange Customer Data with such Third-Party Platform on Customer’s behalf. -Customer shall pay to Zed the applicable fees set forth in Pricing Table, together with any applicable taxes and shipping and handling (collectively, the “Fees”). Customer shall have no right of return, and all Fees shall be non-refundable. +## 8. Disclaimers; No Warranties by Zed -### 4.3. Payment Terms +THE SOFTWARE, SERVICE, OUTPUT, AND ALL OTHER ZED SERVICES ARE PROVIDED “AS IS” AND “AS AVAILABLE”. ZED, ON ITS OWN BEHALF AND ON BEHALF OF ITS SUPPLIERS AND LICENSORS, MAKES NO OTHER WARRANTIES, WHETHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, INCLUDING WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE, OR NONINFRINGEMENT. ZED DOES NOT WARRANT THAT CUSTOMER’S USE OF THE SOFTWARE OR SERVICE WILL BE UNINTERRUPTED OR ERROR-FREE OR THAT IT WILL MAINTAIN CUSTOMER DATA WITHOUT LOSS. ZED IS NOT LIABLE FOR DELAYS, FAILURES, OR PROBLEMS INHERENT IN USE OF THE INTERNET AND ELECTRONIC COMMUNICATIONS OR OTHER SYSTEMS OUTSIDE OF ZED’S CONTROL. ZED IS NOT RESPONSIBLE FOR ANY DAMAGE THAT MAY RESULT FROM THE SOFTWARE OR SERVICE OR OUTPUT OR CUSTOMER’S DEALING WITH ANY OTHER SERVICE USER. Without limiting the foregoing, Customer acknowledges and agrees that: (a) the Service may produce inaccurate or erroneous Output; (b) Customer is responsible for independently evaluating the Output and any other information Customer receives from the Service; and (c) due to the nature of the Service and artificial intelligence technologies generally, Output may not be unique and other users of the Service may receive output from the Service that is similar or identical to the Output (and, notwithstanding anything to the contrary, such similar or identical output will not be understood to be Output). -All amounts payable to Zed under this Agreement shall be paid in United States dollars and paid Zed according to the method of payment, frequency and calculated as set forth in the Pricing Table. +THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS IN THIS SECTION 8 (DISCLAIMERS; NO WARRANTIES BY ZED) APPLY TO THE FULLEST EXTENT PERMITTED BY LAW. Zed does not disclaim any warranty or other right that Zed is prohibited from disclaiming under applicable law. -### 4.4. Taxes; Set-offs +## 9. Term, Termination, and Modification of the Service -Any and all payments made by Customer in accordance with this Agreement are exclusive of any taxes that might be assessed by any jurisdiction. Customer shall pay or reimburse Zed for all sales, use, property and similar taxes; all customs duties, import fees, stamp duties, license fees and similar charges; and all other mandatory payments to government agencies of whatever kind, except taxes imposed on the net or gross income of Zed. All amounts payable to Zed under this Agreement shall be without set-off and without deduction of any taxes, levies, imposts, charges, withholdings and/or duties of any nature which may be levied or imposed, including without limitation, value added tax, customs duty and withholding tax. +### 9.1. Term -## 5. TERM AND TERMINATION +These Terms are effective beginning when Customer accepts the Terms or first downloads, installs, accesses, or uses the Service, and ending when terminated as described in Section 9.2 (Termination). -### 5.1. Term +### 9.2. Termination -The term of this Agreement shall commence on the date You first download the Editor or use the Zed Service (the "Effective Date"), and unless terminated earlier according to this Section 3, will end pursuant to this Section 5 (the "Term"). +If Customer violates any provision of these Terms, then Customer is not authorized to access the Service and these Terms automatically terminate. In addition, Zed may, at its sole discretion, terminate these Terms or Customer’s account on the Service, or suspend or terminate Customer’s access to the Service, at any time for any reason or no reason, with or without notice, and without any liability to Customer arising from such termination. Customer may terminate its account and these Terms at any time by contacting Zed at [hi@zed.dev](mailto:hi@zed.dev). -### 5.2. Termination +### 9.3. Effect of Termination -This Agreement may be terminated: (a) by either party if the other has materially breached this Agreement; or (b) by Zed at any time and for any reason upon notice to Customer. You acknowledge that Zed is under no obligation to continue to operate the Zed Service or make the Editor available, and We may end any programs in connection with the same at any time. +Upon termination of these Terms: a) Customer’s license to access and use the Service will terminate and Customer must immediately cease all use of the Service; b) Customer will no longer be authorized to access its account or the Service; c) Customer must pay Zed any unpaid amount that was due prior to termination; and d) all payment obligations accrued prior to termination and Section(s) 2.4 (Restrictions), 3 (General Payment Terms) with the exception of 3.4 (Subscription Service), 4.2 (Customer’s Ownership of Output), 4.4 (Telemetry), 8 (Disclaimers; No Warranties by Zed), 9.3 (Effect of Termination), 10 (Ownership; Feedback), 11 (Limitations of Liability), 12 (Indemnity), 15 (Governing Law, Dispute Resolution and Arbitration); and 16 (General Terms), will survive. If Customer’s account has been terminated for a breach of these Terms, then Customer is prohibited from creating a new account on the Service. -### 5.3. Effect of Termination and Survival +### 9.4. Modification of the Service -Upon any expiration or termination of this Agreement, Customer shall (i) immediately cease use of the Zed Service, and (ii) return all Zed Confidential Information and other materials provided by Zed. The following provisions will survive termination of this Agreement: Sections 3.3 (Customer Data), Section 3.4 (Privacy Policy), Section 5.3 (Effect of Termination and Survival), Section 6 (Ownership), Section 7 (Indemnification), Section 9 (Limitation of Liability), Section 10 (Third Party Services), and Section 11 (Miscellaneous). +Zed reserves the right to modify or discontinue all or any portion of the Service at any time (including by limiting or discontinuing certain features of the Service), temporarily or permanently, without notice to Customer. Zed will have no liability to Customer for any change to the Service. -## 6. OWNERSHIP +## 10. Ownership; Feedback -Zed retains all right, title, and interest in and to the Zed Service, Editor, and any software, products, works or other intellectual property created, used, provided, or made available by Zed under or in connection with the Zed Service or Editor. Customer may from time to time provide suggestions, comments, or other feedback to Zed with respect to the Zed Service or Editor ("Feedback"). Customer shall, and hereby does, grant to Zed a nonexclusive, worldwide, perpetual, irrevocable, transferable, sublicensable, royalty-free, fully paid-up license to use and exploit the Feedback for any purpose. You retain all right, title and interest in and to the Customer Data, including all intellectual property rights therein. No intellectual property rights with respect to any software code you develop or modify with the Editor or Zed Service (collectively, the “Output”) are transferred or assigned to Zed hereunder. +Neither Party grants the other Party any rights or licenses not expressly set out in the Terms. Except as expressly provided in the Terms, as between the Parties, Customer retains all intellectual property rights and other rights in and to the Customer Data and Output. Except for the rights and licenses granted in the Terms, Zed and its licensors retain all intellectual property rights in and to the Service and Software. To the extent Customer provides Zed with feedback (including suggestions and comments for enhancements or new functionality) regarding the Service or Software, Output, or Zed’s products, services, or other technology (“**Feedback**”), Zed has the full and unrestricted right (but no obligation) to use or incorporate Feedback in any manner, including to improve and develop any of its products, services, technology, or other materials without attribution to Customer. -## 7. INDEMNIFICATION +## 11. Limitations of Liability -Customer will defend, indemnify, and hold Zed, its affiliates, suppliers and licensors harmless and each of their respective officers, directors, employees and representatives from and against any claims, damages, losses, liabilities, costs, and expenses (including reasonable attorneys' fees) arising out of or relating to any third party claim with respect to: (a) Customer Data; (b) breach of this Agreement or violation of applicable law by Customer; or (c) alleged infringement or misappropriation of third-party's intellectual property rights resulting from Customer Data. +### 11.1. -## 8. WARRANTY +TO THE FULLEST EXTENT PERMITTED BY LAW, IN NO EVENT WILL THE ZED ENTITIES BE LIABLE TO CUSTOMER FOR ANY INDIRECT, INCIDENTAL, SPECIAL, CONSEQUENTIAL, OR PUNITIVE DAMAGES (INCLUDING DAMAGES FOR LOSS OF PROFITS, GOODWILL, OR ANY OTHER INTANGIBLE LOSS) ARISING OUT OF OR RELATING TO YOUR ACCESS TO OR USE OF, OR YOUR INABILITY TO ACCESS OR USE, THE SERVICE OR ANY MATERIALS OR CONTENT ON THE SERVICE, WHETHER BASED ON WARRANTY, CONTRACT, TORT (INCLUDING NEGLIGENCE), STATUTE, OR ANY OTHER LEGAL THEORY, AND WHETHER OR NOT ANY ZED ENTITY HAS BEEN INFORMED OF THE POSSIBILITY OF DAMAGE. -Zed does not represent or warrant that the operation of the Zed Service or Editor (or any portion thereof) will be uninterrupted or error free, or that the Zed Service or Editor (or any portion thereof) will operate in combination with other hardware, software, systems or data not provided by Zed. CUSTOMER ACKNOWLEDGES THAT, ZED MAKES NO EXPRESS OR IMPLIED REPRESENTATIONS OR WARRANTIES OF ANY KIND WITH RESPECT TO THE SERVICE OR SOFTWARE, OR THEIR CONDITION. ZED HEREBY EXPRESSLY EXCLUDES, ANY AND ALL OTHER EXPRESS OR IMPLIED REPRESENTATIONS OR WARRANTIES, WHETHER UNDER COMMON LAW, STATUTE OR OTHERWISE, INCLUDING WITHOUT LIMITATION ANY AND ALL WARRANTIES AS TO MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, SATISFACTORY QUALITY OR NON-INFRINGEMENT OF THIRD-PARTY RIGHTS. +### 11.2. -## 9. LIMITATIONS OF LIABILITY +TO THE FULLEST EXTENT PERMITTED BY LAW, THE AGGREGATE LIABILITY OF THE ZED ENTITIES TO CUSTOMER FOR ALL CLAIMS ARISING OUT OF OR RELATING TO THE USE OF OR ANY INABILITY TO USE ANY PORTION OF THE SERVICE, OR OTHERWISE ARISING UNDER THESE TERMS, WHETHER IN CONTRACT, TORT, OR OTHERWISE, IS LIMITED TO THE GREATER OF:  THE AMOUNT CUSTOMER HAS PAID TO ZED FOR ACCESS TO AND USE OF THE SERVICE IN THE 12 MONTHS PRIOR TO THE EVENT OR CIRCUMSTANCE GIVING RISE TO THE CLAIM OR US$100. -IN NO EVENT SHALL ZED BE LIABLE FOR ANY LOST DATA, LOST PROFITS, BUSINESS INTERRUPTION, REPLACEMENT SERVICE OR OTHER SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR INDIRECT DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THEORY OF LIABILITY. ZED'S LIABILITY FOR ALL CLAIMS ARISING UNDER THIS AGREEMENT, WHETHER IN CONTRACT, TORT OR OTHERWISE, SHALL NOT EXCEED THE GREATER OF: THE FEES PAID TO ZED BY CUSTOMER DURING THE TWELVE (12) MONTH PERIOD PRECEDING THE DATE OF THE CLAIM, OR ONE THOUSAND US DOLLARS ($1,000). +### 11.3. -## 10. Third Party Services +EACH PROVISION OF THESE TERMS THAT PROVIDES FOR A LIMITATION OF LIABILITY, DISCLAIMER OF WARRANTIES, OR EXCLUSION OF DAMAGES IS INTENDED TO AND DOES ALLOCATE THE RISKS BETWEEN THE PARTIES UNDER THESE TERMS. THIS ALLOCATION IS AN ESSENTIAL ELEMENT OF THE BASIS OF THE BARGAIN BETWEEN THE PARTIES. EACH OF THESE PROVISIONS IS SEVERABLE AND INDEPENDENT OF ALL OTHER PROVISIONS OF THESE TERMS. THE LIMITATIONS IN THIS SECTION 11 (LIMITATION OF LIABILITY) WILL APPLY EVEN IF ANY LIMITED REMEDY FAILS OF ITS ESSENTIAL PURPOSE. -Zed may make certain third party services available to You within the Editor or the Zed Service (each a "Third Party Service"). You acknowledge and agree that (a) use of each Third Party Service is subject to the corresponding terms and conditions available at the following URL: [https://zed.dev/third-party-terms](https://zed.dev/third-party-terms) and/or presented in connection with Your use of such Third Party Service; (b) the terms and conditions of this Agreement do not apply with respect to Your use of any Third Party Service; and (c) Zed is not liable in any way regarding Your use of any Third Party Service. +## 12. Indemnity -## 11. MISCELLANEOUS +To the fullest extent permitted by law, Customer is responsible for its use of the Service, and Customer will defend and indemnify Zed, its affiliates, and their respective shareholders, directors, managers, members, officers, employees, consultants, and agents (together, the "Zed Entities") from and against every claim brought by a third party, and any related liability, damage, loss, and expense, including attorneys' fees and costs, arising out of or connected with: (1) Customer’s unauthorized use of, or misuse of, the Service; (2) the Customer Data; (3) Customer’s use of Output; (4) Customer’s violation or alleged violation of any portion of these Terms, any representation, warranty, or agreement referenced in these Terms, or any applicable law or regulation; (5) Customer’s violation or alleged violation of any third-party right, including any intellectual property right or publicity, confidentiality, other property, or privacy right; or (6) any dispute or issue between Customer and any third party. Zed reserves the right, at Zed’s own expense, to assume the exclusive defense and control of any matter otherwise subject to indemnification by Customer (without limiting Customer’s indemnification obligations with respect to that matter), and in that case, Customer agrees to cooperate with our defense of those claims. -### 11.1. Export Control +## 13. Confidentiality -You hereby certify that You will comply with all current US Export Control laws. You agree to defend, indemnify and hold Zed harmless from any liability for Your violation of U.S. Export Control laws. +### 13.1. Definition -### 11.2. Compliance with Laws +“**Confidential Information**” means information disclosed to the receiving Party (“**Recipient**”) under the Terms that is designated by the disclosing Party (“**Discloser**”) as proprietary or confidential or that should be reasonably understood to be proprietary or confidential due to its nature and the circumstances of its disclosure. Zed’s Confidential Information includes the terms and conditions of the Terms and the Service (including any technical or performance information about the Service). -You shall comply with all applicable laws and regulations in its use of the Solution, including without limitation the unlawful gathering or collecting, or assisting in the gathering or collecting of information in violation of any privacy laws or regulations. You shall, at its own expense, defend, indemnify and hold harmless Zed from and against any and all claims, losses, liabilities, damages, judgments, government or federal sanctions, costs and expenses (including attorneys' fees) incurred by Zed arising from any claim or assertion by any third party of violation of privacy laws or regulations by You or any of its agents, officers, directors or employees. +### 13.2. Obligations -### 11.3. Assignment +As Recipient, each Party will: (a) hold Confidential Information in confidence and not disclose it to third parties except as permitted in the Terms, including Section 4.1; and (b) only use Confidential Information to fulfill its obligations and exercise its rights under the Terms. Recipient may disclose Confidential Information to its employees, agents, contractors, and other representatives having a legitimate need to know (including, for Zed, the subcontractors referenced in Section 16.5) (“**Representatives**”), provided Recipient remains responsible for its respective Representatives’ compliance with this Section 13 and such Representatives are bound by written agreements (or, in the case of professional advisers like attorneys and accountants, ethical duties) imposing confidentiality and non-use obligations no less protective than this Section 13. -Neither party may transfer and assign its rights and obligations under this Agreement without the prior written consent of the other party. Notwithstanding the foregoing, Zed may transfer and assign its rights under this Agreement without consent from the other party in connection with a change in control, acquisition or sale of all or substantially all of its assets. +### 13.3. Exclusions -### 11.4. Force Majeure +These confidentiality obligations do not apply to information that Recipient can document: (a) is or becomes public knowledge through no fault of Recipient or its Representatives; (b) it rightfully knew or possessed prior to receipt under the Terms; (c) it rightfully received from a third party without breach of confidentiality obligations; or (d) it independently developed without using Confidential Information. -Neither party shall be responsible for failure or delay in performance by events out of their reasonable control, including but not limited to, acts of God, Internet outage, terrorism, war, fires, earthquakes and other disasters (each a "Force Majeure"). Notwithstanding the foregoing: if a Force Majeure continues for more than thirty (30) days, either party may to terminate this agreement by written notice to the other party. +### 13.4. Remedies -### 11.5. Notice +Unauthorized use or disclosure of Confidential Information may cause substantial harm for which damages alone are an insufficient remedy. Discloser may seek appropriate equitable relief, in addition to other available remedies, for breach or threatened breach of this Section 13, without the  necessity of posting a bond or proving actual damages. -All notices between the parties shall be in writing and shall be deemed to have been given if personally delivered or sent by registered or certified mail (return receipt), or by recognized courier service. +### 13.5. Required Disclosures -### 11.6. No Agency +Nothing in the Terms prohibits Recipient from making disclosures, including of Customer Data and other Confidential Information, if required by Laws, subpoena, or court order, provided (if permitted by Laws) it notifies Discloser in advance and cooperates in any effort to obtain confidential treatment. -Both parties agree that no agency, partnership, joint venture, or employment is created as a result of this Agreement. You do not have any authority of any kind to bind Zed. +## 14. Publicity -### 11.7. Governing Law +Neither Party may publicly announce that the Parties have entered into the Terms, except with the other Party’s prior consent or as required by Laws. However, Zed may use the name, brand, or logo of Customer (or Customer’s parent company) for the purpose of identifying Customer as a licensee or customer on Zed’s website or in other promotional materials. Zed will cease further use at Customer’s written request. -This Agreement shall be governed exclusively by, and construed exclusively in accordance with, the laws of the United States and the State of California, without regard to its conflict of laws provisions. The federal courts of the United States in the Northern District of California and the state courts of the State of California shall have exclusive jurisdiction to adjudicate any dispute arising out of or relating to this Agreement. Each party hereby consents to the jurisdiction of such courts and waives any right it may otherwise have to challenge the appropriateness of such forums, whether on the basis of the doctrine of forum non conveniens or otherwise. The United Nations Convention on Contracts for the International Sale of Goods shall not apply to this Agreement or any Purchase Order issued under this Agreement. +## 15. Governing Law, Dispute Resolution and Arbitration -### 11.8. Updated Agreement +### 15.1. Governing Law, Jurisdiction and Venue -Zed reserves the right to update this Agreement at any time. The terms and conditions of the updated version of the Agreement shall apply to the Zed Service and Editor downloaded, or accessed following the date of publication of the updated version. If You do not agree with any terms of the updated Agreement, You may not use or access the Zed Service or Editor in any manner. Zed may from time-to-time provide release notes applicable to the Editor or Zed Service, and such release notes may contain additional use restrictions or terms applicable to Customer Data. Your use of the Editor or Zed Service after the applicable release notes are made available shall be subject to the additional use restrictions or terms applicable to Customer Data. +The Terms are governed by the laws of the State of Delaware and the United States without regard to conflicts of laws provisions that would result in the application of the laws of another jurisdiction and without regard to the United Nations Convention on the International Sale of Goods. The parties further agree that except as stated below in the Arbitration provision, and for any claims under Section 15.2 (b), each party irrevocably consents to the exclusive jurisdiction and venue of the state and federal courts located in New Castle County, Delaware, for any action arising out of or relating to these Terms, and waive any objection based on venue or forum non conveniens. ANY CAUSE OF ACTION OR CLAIM CUSTOMER MAY HAVE ARISING OUT OF OR RELATING TO THESE TERMS MUST BE COMMENCED WITHIN ONE (1) YEAR AFTER THE CAUSE OF ACTION OR CLAIM ACCRUES, OTHERWISE, SUCH CAUSE OF ACTION OR CLAIM IS PERMANENTLY BARRED. -### 11.9. Entire Agreement +### 15.2. Dispute Resolution and Arbitration -This Agreement is the complete and exclusive statement of the mutual understanding of the parties and supersedes and cancels all previous written and oral agreements, communications, and other understandings relating to the subject matter of this Agreement, and all waivers and modifications must be in a writing signed by both parties, except as otherwise provided herein. Any term or provision of this Agreement held to be illegal or unenforceable shall be, to the fullest extent possible, interpreted so as to be construed as valid, but in any event the validity or enforceability of the remainder hereof shall not be affected. +ANY CONTROVERSY OR CLAIM ARISING OUT OF OR RELATING TO THESE TERMS, OR THE BREACH THEREOF, SHALL BE SETTLED BY ARBITRATION AND JUDGMENT ON THE AWARD RENDERED BY THE ARBITRATOR MAY BE ENTERED IN ANY COURT HAVING JURISDICTION THEREOF. IF THERE IS A DISPUTE ABOUT WHETHER THIS ARBITRATION AGREEMENT CAN BE ENFORCED OR APPLIES TO THE DISPUTE, CUSTOMER AND ZED AGREE THAT THE ARBITRATOR WILL DECIDE THAT ISSUE. -**DATE: May 6, 2025** +**a. Opt-Out.** If Customer does not wish to resolve disputes by binding arbitration, Customer may opt out of the provisions of this Section 17.2 (Dispute Resolution and Arbitration) within 30 days after the date that Customer agrees to these Terms by sending an email to [arbitration-opt-out@zed.dev](mailto:arbitration-opt-out@zed.dev) or a letter to Zed Industries, Inc., Attention: Legal Department – Arbitration Opt-Out, 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 that specifies: Customer’s full legal name, the email address associated with Customer’s account on the Service, and a statement that Customer wishes to opt out of arbitration (“**Opt-Out Notice**”). Once Zed receives Customer’s Opt-Out Notice, this Section 15.2 (Dispute Resolution and Arbitration) will be void and any action arising out of these Terms will be resolved as set forth in Section 15.1 (Governing Law). The remaining provisions of these Terms will not be affected by Customer’s Opt-Out Notice. + +**b. Pre-Arbitration Dispute Resolution and Notification.** Prior to initiating an arbitration, Customer and Zed each agree to notify the other party of the dispute and attempt to negotiate an informal resolution to it first. Zed will contact Customer at the email address Customer has provided to Zed; Customer can contact Zed by email at [legal@zed.dev](mailto:legal@zed.dev). If after a good faith effort to negotiate, one party feels the dispute has not and cannot be resolved informally, the party intending to pursue arbitration agrees to notify the other party via email prior to initiating the arbitration. + +**c. Exceptions to Arbitration.** Customer and Zed each agree that the following claims are exceptions to arbitration and will be brought in a judicial proceeding in a court of competent jurisdiction: (i) Any claim related to actual or threatened infringement, misappropriation or violation of a party’s copyrights, trademarks, trade secrets, patents, or other intellectual property rights; or (ii) Any claim seeking emergency injunctive relief based on exigent circumstances (e.g., imminent danger or commission of a crime, hacking, cyber-attack). + +**d. Arbitration Rules.** (1) If Customer is domiciled in the U.S. - Any controversy or claim arising out of or relating to this contract, or the breach thereof, shall be settled by arbitration administered by the American Arbitration Association in accordance with its Commercial Arbitration Rules, and judgment on the award rendered by the arbitrator may be entered in any court having jurisdiction thereof. (2) If Customer is domiciled internationally outside the U.S. - Any controversy or claim arising out of or relating to this contract, or the breach thereof, shall be determined by arbitration administered by the International Centre for Dispute Resolution in accordance with its International Arbitration Rules. + +**e. Modification to AAA Rules - Arbitration Hearing/Location.** Customer agrees that any required arbitration hearing will be conducted in the English language by one (1) mutually agreed upon arbitrator, (a) in city/county and state of Customer’s headquarters unless both parties agree otherwise; and appearances may be made via telephonic or video hearing; and (b) for any claim or counterclaim under $25,000, by solely the submission of documents to the arbitrator. + +### 15.3. Waiver of Jury Trial and Class Action Waiver + +EACH PARTY HEREBY IRREVOCABLY WAIVES ALL RIGHT TO TRIAL BY JURY IN ANY ACTION, SUIT, PROCEEDING, CLAIM, OR COUNTERCLAIM ARISING OUT OF OR RELATING TO THESE TERMS. CUSTOMER AND ZED EACH AGREE THAT ANY SUIT, PROCEEDING, OR OTHER ACTION ARISING OUT OF OR RELATED TO THESE TERMS WILL BE CONDUCTED ONLY ON AN INDIVIDUAL BASIS AND NOT IN A CLASS, CONSOLIDATED OR REPRESENTATIVE ACTION. + +## 16. General Terms + +### 16.1. + +These Terms, including the Privacy Policy and any other agreements expressly incorporated by reference into these Terms, are the entire and exclusive understanding and agreement between Customer and Zed regarding your use of the Service. Customer may not assign or transfer these Terms or its rights under these Terms, in whole or in part, by operation of law or otherwise, without Zed’s prior written consent. Zed may assign these Terms and all rights granted under these Terms at any time without notice or consent. The failure to require performance of any provision will not affect Zed’s right to require performance at any other time after that, nor will a waiver by Zed of any breach or default of these Terms, or any provision of these Terms, be a waiver of any subsequent breach or default or a waiver of the provision itself. Use of Section headers in these Terms are for convenience only and will not have any impact on the interpretation of any provision. Throughout these Terms the use of the word “including” means “including but not limited to.” If any part of these Terms are held to be invalid or unenforceable, then the unenforceable part will be given effect to the greatest extent possible, and the remaining parts will remain in full force and effect. + +### 16.2. Notices + +Except as set out in the Terms, any notice or consent under the Terms must be in writing to the Customer email address on the Order and Customer shall send all notices to Zed at Zed Industries, Inc., 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 with cc: to [legal@zed.dev](mailto:legal@zed.dev) and will be deemed given: (a) upon receipt if by personal delivery; (b) upon receipt if by certified or registered U.S. mail (return receipt requested); or (c) one day after dispatch if by a commercial overnight delivery service. Either Party may update its address with notice to the other Party pursuant to this Section. Zed may also send operational notices to Customer by email or through the Service. + +### 16.3. DPA + +The terms of the Data Processing Agreement (“**DPA**”), available upon request, are incorporated into these Terms by reference. + +### 16.4. Modification of Terms + +Zed may, from time to time, change these Terms. Please check these Terms periodically for changes. Revisions will be effective immediately except that, for existing users, material revisions will be effective 30 days after posting or notice to Customer of the revisions unless otherwise stated. Zed may require that Customer accept modified Terms in order to continue to use the Service. If Customer does not agree to the modified Terms, then Customer should discontinue its use of the Service and notify Zed at hi@zed.dev, in which case Zed will provide a pro-rated refund of any prepaid Subscription Fee. The terms in any Customer purchase order or business form will not amend or modify the Terms and are expressly rejected by Zed; any of these Customer documents are for administrative purposes only and have no legal effect with respect to the Terms. + +### 16.5. Subcontractors + +Zed may use subcontractors and permit them to exercise Zed’s rights, but Zed remains responsible for their compliance with the Terms and for its overall performance under the Terms. + +### 16.6. Independent Contractors + +The Parties are independent contractors, not agents, partners, or joint venturers. + +### 16.7. Export + +Customer will comply with all relevant U.S. and foreign export and import Laws in using the Service. Customer: (a) represents and warrants that it is not listed on any U.S. government list of prohibited or restricted parties or located in (or a national of) a country that is subject to a U.S. government embargo or that has been designated by the U.S. government as a “terrorist supporting” country; (b) agrees not to access or use the Service in violation of any U.S. export embargo, prohibition, or restriction; and (c) will not submit to the Service any information controlled under the U.S. International Traffic in Arms Regulations. + +### 16.8. Government End-Users + +Elements of the Service may include commercial computer software. If the user or licensee of the Service is an agency, department, or other entity of the United States Government, the use, duplication, reproduction, release, modification, disclosure, or transfer of the Service or any related documentation of any kind, including technical data and manuals, is restricted by the terms of the Terms in accordance with Federal Acquisition Regulation 12.212 for civilian purposes and Defense Federal Acquisition Regulation Supplement 227.7202 for military purposes. The Service was developed fully at private expense. All other use is prohibited. + +### 16.9. Privacy Policy + +Please read the [Zed Privacy Policy](/privacy-policy) (the “**Privacy Policy**”) carefully for information relating to our collection, use, storage, and disclosure of your personal information. The Zed Privacy Policy is incorporated by this reference into, and made a part of, these Terms. + +### 16.10. Additional Terms + +Customer’s use of the Service is subject to all additional terms, policies, rules, or guidelines applicable to the Service or certain features of the Service that we may post on or link to from the Service (the “**Additional Terms**”). All Additional Terms are incorporated by this reference into, and made a part of, these Terms. + +### 16.11. Consent to Electronic Communications + +By using the Service, Customer consents to receiving certain electronic communications from Zed as further described in the Privacy Policy. Please read the Privacy Policy to learn more about Zed’s electronic communications practices. Customer agrees that any notices, agreements, disclosures, or other communications that Zed sends to Customer electronically will satisfy any legal communication requirements, including that those communications be in writing. Zed may send Customer emails concerning Zed products and services, as well as those of third parties. Customer may opt out of promotional emails by following the unsubscribe instructions in the promotional email itself. + +### 16.12. Contact Information + +The Service is offered by Zed Industries, Inc. Customer may contact Zed by sending correspondence to 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 with cc: to [legal@zed.dev](mailto:legal@zed.dev). + +### 16.13. Notice to California Residents + +If Customer is a California resident, then under California Civil Code Section 1789.3, Customer may contact the Complaint Assistance Unit of the Division of Consumer Services of the California Department of Consumer Affairs in writing at 1625 N. Market Blvd., Suite N 112, Sacramento, California 95834, or by telephone at +1-800-952-5210 in order to resolve a complaint regarding the Service or to receive further information regarding use of the Service. diff --git a/legal/third-party-terms.md b/legal/third-party-terms.md index 4c4a0f6cce319369283c42d68f150699f9c1565c..6d4153d0b4771a5ccb9cca924caae682eece145c 100644 --- a/legal/third-party-terms.md +++ b/legal/third-party-terms.md @@ -1,53 +1,39 @@ --- -title: 3rd Party Terms -slug: third-party-terms +title: Acceptable Use Policies +slug: acceptable-use-policies --- -In addition to the [Zed End User Terms](/terms) and [Zed Privacy Policy](/privacy-policy) usage of certain Zed features may also subject you to additional 3rd party terms and conditions. These terms and conditions may include, but are not limited to, the following: +**Last Updated:** March 2, 2026 -## Anthropic - -- [Anthropic Usage Policy](https://www.anthropic.com/legal/aup) -- [Anthropic Privacy Policy](https://www.anthropic.com/legal/privacy) -- [Anthropic Commercial Terms of Service](https://www.anthropic.com/legal/commercial-terms) +Some third-party services accessible through Zed have their own acceptable use policies. These apply whether Zed hosts the service on your behalf or you connect your own account. The applicable policies are listed below and apply alongside the [Zed End User Terms](/terms) and [Zed Privacy Policy](/privacy-policy). -## Baseten +These policies may be updated from time to time by the applicable provider. -- [BaseTen Terms and Conditions](https://www.baseten.co/terms-and-conditions/) - -### Exa.ai +## Anthropic -- [Exa Labs Terms and Conditions](https://exa.ai/assets/Exa_Labs_Terms_of_Service.pdf) -- [Exa Labs Privacy Policy](https://exa.ai/privacy-policy) +- [Anthropic Usage Policy](https://www.anthropic.com/legal/aup) ## GitHub -- [GitHub Terms of Service](https://docs.github.com/en/site-policy/github-terms/github-terms-of-service) -- [GitHub Privacy Statement](https://docs.github.com/en/site-policy/privacy-policies/github-general-privacy-statement) - [GitHub Acceptable Use Policies](https://docs.github.com/en/site-policy/acceptable-use-policies/github-acceptable-use-policies) - [GitHub Copilot Product Specific Terms](https://github.com/customer-terms/github-copilot-product-specific-terms) ## Google -- [Google APIs Terms of Service](https://developers.google.com/terms) -- [Google Gemini API Additional Terms of Service](https://ai.google.dev/gemini-api/terms) - [Google Generative AI Prohibited Use Policy](https://policies.google.com/terms/generative-ai/use-policy) -## LiveKit +## OpenAI + +- [OpenAI Usage Policies](https://openai.com/policies/usage-policies/) -- [LiveKit Terms of Service](https://livekit.io/legal/terms-of-service) -- [LiveKit Privacy Policy](https://livekit.io/legal/privacy-policy) +## OpenRouter -## OpenAI +- [OpenRouter Terms of Service](https://openrouter.ai/terms) -- [OpenAI Terms of Use](https://openai.com/policies/terms-of-use/) -- [OpenAI Privacy Policy](https://openai.com/policies/privacy-policy/) -- [OpenAI Business terms](https://openai.com/policies/business-terms/) -- [OpenAI Service terms](https://openai.com/policies/service-terms/) +## Vercel -## SuperMaven +- [Vercel Acceptable Use Policy](https://vercel.com/legal/acceptable-use-policy) -- [SuperMaven Terms of Service](https://supermaven.com/terms-of-service) -- [SuperMaven Privacy Policy](https://supermaven.com/privacy-policy) +## xAI -**DATE: May 6, 2025** +- [xAI Acceptable Use Policy](https://x.ai/legal/acceptable-use-policy) diff --git a/nix/build.nix b/nix/build.nix index 28031337da6877cebda056e9cf2eab0f8f0d3ff7..68f8a4acdbe83f7e8981659dd0376ec87ef52dfe 100644 --- a/nix/build.nix +++ b/nix/build.nix @@ -1,4 +1,6 @@ { + pkgs, + system, lib, stdenv, @@ -24,10 +26,18 @@ fontconfig, freetype, git, + glib, + libdrm, + libgbm, libgit2, libglvnd, + libva, + libxcomposite, + libxdamage, + libxext, + libxfixes, libxkbcommon, - livekit-libwebrtc, + libxrandr, nodejs_22, openssl, perl, @@ -161,11 +171,21 @@ let ] ++ lib.optionals stdenv'.hostPlatform.isLinux [ alsa-lib + glib + libva libxkbcommon wayland gpu-lib xorg.libX11 xorg.libxcb + libdrm + libgbm + libva + libxcomposite + libxdamage + libxext + libxfixes + libxrandr ] ++ lib.optionals stdenv'.hostPlatform.isDarwin [ apple-sdk_15 @@ -200,7 +220,7 @@ let }; ZED_UPDATE_EXPLANATION = "Zed has been installed using Nix. Auto-updates have thus been disabled."; RELEASE_VERSION = version; - LK_CUSTOM_WEBRTC = livekit-libwebrtc; + LK_CUSTOM_WEBRTC = pkgs.callPackage ./livekit-libwebrtc/package.nix { }; PROTOC = "${protobuf}/bin/protoc"; CARGO_PROFILE = profile; @@ -213,6 +233,7 @@ let lib.makeLibraryPath [ gpu-lib wayland + libva ] }"; @@ -244,6 +265,16 @@ let postPatch = '' substituteInPlace webrtc-sys/build.rs --replace-fail \ "cargo:rustc-link-lib=static=webrtc" "cargo:rustc-link-lib=dylib=webrtc" + + substituteInPlace webrtc-sys/build.rs --replace-fail \ + 'add_gio_headers(&mut builder);' \ + 'for lib_name in ["glib-2.0", "gio-2.0"] { + if let Ok(lib) = pkg_config::Config::new().cargo_metadata(false).probe(lib_name) { + for path in lib.include_paths { + builder.include(&path); + } + } + }' '' + lib.optionalString withGLES '' cat ${glesConfig} >> .cargo/config/config.toml diff --git a/nix/livekit-libwebrtc/0001-shared-libraries.patch b/nix/livekit-libwebrtc/0001-shared-libraries.patch new file mode 100644 index 0000000000000000000000000000000000000000..2a7fcf0cbdd519d51d9df446d5b9db00b22d521e --- /dev/null +++ b/nix/livekit-libwebrtc/0001-shared-libraries.patch @@ -0,0 +1,17 @@ +--- a/BUILD.gn ++++ b/BUILD.gn +@@ -143,8 +143,12 @@ + # target_defaults and direct_dependent_settings. + config("common_inherited_config") { + defines = [ "PROTOBUF_ENABLE_DEBUG_LOGGING_MAY_LEAK_PII=0" ] +- cflags = [] +- ldflags = [] ++ cflags = [ "-fvisibility=default" ] ++ ldflags = [ "-lavutil", "-lavformat", "-lavcodec" ] ++ ++ if (is_linux) { ++ ldflags += [ "-Wl,--version-script=" + rebase_path("//libwebrtc.version", root_build_dir) ] ++ } + + if (rtc_objc_prefix != "") { + defines += [ "RTC_OBJC_TYPE_PREFIX=${rtc_objc_prefix}" ] diff --git a/nix/livekit-libwebrtc/README.md b/nix/livekit-libwebrtc/README.md new file mode 100644 index 0000000000000000000000000000000000000000..87d4fc5599fa0a3b50f853ad53f19e90c5c2121e --- /dev/null +++ b/nix/livekit-libwebrtc/README.md @@ -0,0 +1,7 @@ +# Vendored livekit-libwebrtc build + +The contents of this directory is vendored from [this nixpkgs +PR](https://github.com/NixOS/nixpkgs/pull/478907). + +It should be removed as soon as said PR is merged and the new version of libwebrtc hits +nixpkgs-unstable. diff --git a/nix/livekit-libwebrtc/chromium-129-rust.patch b/nix/livekit-libwebrtc/chromium-129-rust.patch new file mode 100644 index 0000000000000000000000000000000000000000..1fe0c7f87324d8a046ae5226ccfbb06aa42d30b1 --- /dev/null +++ b/nix/livekit-libwebrtc/chromium-129-rust.patch @@ -0,0 +1,21 @@ +diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn +index 45086d6838cac..81132ad8ecb31 100644 +--- a/build/config/compiler/BUILD.gn ++++ b/build/config/compiler/BUILD.gn +@@ -1727,16 +1727,6 @@ config("runtime_library") { + configs += [ "//build/config/c++:runtime_library" ] + } + +- # Rust and C++ both provide intrinsics for LLVM to call for math operations. We +- # want to use the C++ intrinsics, not the ones in the Rust compiler_builtins +- # library. The Rust symbols are marked as weak, so that they can be replaced by +- # the C++ symbols. This config ensures the C++ symbols exist and are strong in +- # order to cause that replacement to occur by explicitly linking in clang's +- # compiler-rt library. +- if (is_clang && !is_nacl && !is_cronet_build) { +- configs += [ "//build/config/clang:compiler_builtins" ] +- } +- + # TODO(crbug.com/40570904): Come up with a better name for is POSIX + Fuchsia + # configuration. + if (is_posix || is_fuchsia) { diff --git a/nix/livekit-libwebrtc/libwebrtc.version b/nix/livekit-libwebrtc/libwebrtc.version new file mode 100644 index 0000000000000000000000000000000000000000..abf9d5b9df61640d4775e2e1aeea6f113954a944 --- /dev/null +++ b/nix/livekit-libwebrtc/libwebrtc.version @@ -0,0 +1,22 @@ +/* Linker version script for libwebrtc.so (Linux only). + * + * When libwebrtc.so is built with rtc_use_pipewire=true and + * -fvisibility=default, PipeWire lazy-load trampoline stubs (pw_*, spa_*) + * are exported as weak symbols. If the PipeWire ALSA plugin + * (libasound_module_pcm_pipewire.so) is later dlopen'd by libasound, + * the dynamic linker may resolve the plugin's pw_* references through + * libwebrtc.so's broken trampolines instead of the real libpipewire.so, + * causing a SIGSEGV (NULL function pointer dereference). + * + * This script hides only those third-party symbol namespaces while + * keeping every WebRTC / BoringSSL / internal symbol exported (which + * the Rust webrtc-sys bindings require). + */ +{ + global: + *; + + local: + pw_*; + spa_*; +}; diff --git a/nix/livekit-libwebrtc/mkSystemLibraries.nix b/nix/livekit-libwebrtc/mkSystemLibraries.nix new file mode 100644 index 0000000000000000000000000000000000000000..4293798faf9031ddc80f6c2a9e70a34b6fd56d62 --- /dev/null +++ b/nix/livekit-libwebrtc/mkSystemLibraries.nix @@ -0,0 +1,64 @@ +{ + brotli, + fontconfig, + freetype, + harfbuzz, + icu, + jsoncpp, + libpng, + libwebp, + libxml2, + libxslt, + minizip, + ffmpeg_6, +}: +{ + "brotli" = { + package = brotli; + path = "third_party/brotli/BUILD.gn"; + }; + "fontconfig" = { + package = fontconfig; + path = "third_party/fontconfig/BUILD.gn"; + }; + "freetype" = { + package = freetype; + path = "build/config/freetype/freetype.gni"; + }; + "harfbuzz-ng" = { + package = harfbuzz; + path = "third_party/harfbuzz-ng/harfbuzz.gni"; + }; + "jsoncpp" = { + package = jsoncpp; + path = "third_party/jsoncpp/BUILD.gn"; + }; + "icu" = { + package = icu; + path = "third_party/icu/BUILD.gn"; + }; + "libpng" = { + package = libpng; + path = "third_party/libpng/BUILD.gn"; + }; + "libwebp" = { + package = libwebp; + path = "third_party/libwebp/BUILD.gn"; + }; + "libxml" = { + package = libxml2; + path = "third_party/libxml/BUILD.gn"; + }; + "libxslt" = { + package = libxslt; + path = "third_party/libxslt/BUILD.gn"; + }; + "zlib" = { + package = minizip; + path = "third_party/zlib/BUILD.gn"; + }; + "ffmpeg" = { + package = ffmpeg_6; + path = "third_party/ffmpeg/BUILD.gn"; + }; +} diff --git a/nix/livekit-libwebrtc/package.nix b/nix/livekit-libwebrtc/package.nix new file mode 100644 index 0000000000000000000000000000000000000000..dd7b5808ac65ab07d1293683905b694910ee503a --- /dev/null +++ b/nix/livekit-libwebrtc/package.nix @@ -0,0 +1,342 @@ +{ + stdenv, + clang, + gclient2nix, + lib, + gn, + fetchurl, + fetchpatch, + xcbuild, + python3, + ninja, + git, + cpio, + pkg-config, + glib, + alsa-lib, + pulseaudio, + nasm, + brotli, + fontconfig, + freetype, + harfbuzz, + icu, + jsoncpp, + libpng, + libwebp, + libxml2, + libxslt, + minizip, + ffmpeg_6, + libepoxy, + libgbm, + libGL, + libxcomposite, + libxdamage, + libxext, + libxfixes, + libxrandr, + libxtst, + pipewire, + xorg, +}: +let + platformMap = { + "x86_64" = "x64"; + "i686" = "x86"; + "arm" = "arm"; + "aarch64" = "arm64"; + }; + cpuName = stdenv.hostPlatform.parsed.cpu.name; + gnArch = platformMap."${cpuName}" or (throw "unsupported arch ${cpuName}"); + gnOs = + if stdenv.hostPlatform.isLinux then + "linux" + else if stdenv.hostPlatform.isDarwin then + "mac" + else + throw "unknown platform ${stdenv.hostPlatform.config}"; + boringSslSymbols = fetchurl { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/refs/tags/webrtc-dac8015-6/webrtc-sys/libwebrtc/boringssl_prefix_symbols.txt"; + hash = "sha256-dAweArv8zjsFPENEKi9mNBQkt4y+hh3rCqG6QZjRC20="; + }; + gnSystemLibraries = import ./mkSystemLibraries.nix { + inherit + brotli + fontconfig + freetype + harfbuzz + icu + jsoncpp + libpng + libwebp + libxml2 + libxslt + minizip + ffmpeg_6 + ; + }; +in +stdenv.mkDerivation { + pname = "livekit-libwebrtc"; + version = "137-unstable-2025-11-24"; + + gclientDeps = gclient2nix.importGclientDeps ./sources.json; + sourceRoot = "src"; + + patches = [ + # Adds missing dependencies to generated LICENSE + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/add_licenses.patch"; + hash = "sha256-9A4KyRW1K3eoQxsTbPX0vOnj66TCs2Fxjpsu5wO8mGI="; + }) + # Fixes the certificate chain, required for Let's Encrypt certs + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/ssl_verify_callback_with_native_handle.patch"; + hash = "sha256-RBvRcJzoKItpEbqpe07YZe1D1ZVGS12EnDSISldGy+0="; + }) + # Adds dependencies and features required by livekit + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/add_deps.patch"; + hash = "sha256-DwRtGdU5sppmiFsVuyhJoVCQrRl5JFmZJfxgUPhYXBg="; + }) + # Fix gcc-related errors + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/force_gcc.patch"; + hash = "sha256-1d73Pi1HkbunjYvp1NskUNE4xXbCmnh++rC6NrCJHbY="; + stripLen = 1; + extraPrefix = "build/"; + }) + # fix a gcc-related dav1d compile option + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/david_disable_gun_source_macro.patch"; + hash = "sha256-RCZpeeSQHaxkL3dY2oFFXDjYeU0KHw7idQFONGge8+0="; + stripLen = 1; + extraPrefix = "third_party/"; + }) + # Required for dynamically linking to ffmpeg libraries, exposing symbols, + # and hiding PipeWire symbols via version script (Linux only) to prevent + # SIGSEGV when ALSA's PipeWire plugin is loaded. + ./0001-shared-libraries.patch + # Borrow a patch from chromium to prevent a build failure due to missing libclang libraries + ./chromium-129-rust.patch + ]; + + postPatch = '' + substituteInPlace .gn \ + --replace-fail "vpython3" "python3" + + substituteInPlace tools/generate_shim_headers/generate_shim_headers.py \ + --replace-fail "OFFICIAL_BUILD" "GOOGLE_CHROME_BUILD" + + substituteInPlace BUILD.gn \ + --replace-fail "rtc_static_library" "rtc_shared_library" \ + --replace-fail "complete_static_lib = true" "" + + substituteInPlace webrtc.gni \ + --replace-fail "!build_with_chromium && is_component_build" "false" + + substituteInPlace rtc_tools/BUILD.gn \ + --replace-fail "\":frame_analyzer\"," "" + + for lib in ${toString (builtins.attrNames gnSystemLibraries)}; do + if [ -d "third_party/$lib" ]; then + find "third_party/$lib" -type f \ + \! -path "third_party/$lib/chromium/*" \ + \! -path "third_party/$lib/google/*" \ + \! -path "third_party/harfbuzz-ng/utils/hb_scoped.h" \ + \! -regex '.*\.\(gn\|gni\|isolate\)' \ + \! -name 'LICENSE*' \ + \! -name 'COPYING*' \ + -delete + fi + done + + # Trick the update_rust.py script into thinking we have *this specific* rust available. + # It isn't actually needed for the libwebrtc build, but GN will fail if it isn't there. + mkdir -p third_party/rust-toolchain + (python3 tools/rust/update_rust.py --print-package-version || true) \ + | head -n 1 \ + | sed 's/.* expected Rust version is \([^ ]*\) .*/rustc 1.0 1234 (\1 chromium)/' \ + > third_party/rust-toolchain/VERSION + '' + + lib.optionalString stdenv.hostPlatform.isLinux '' + mkdir -p buildtools/linux64 + ln -sf ${lib.getExe gn} buildtools/linux64/gn + cp ${./libwebrtc.version} libwebrtc.version + substituteInPlace build/toolchain/linux/BUILD.gn \ + --replace 'toolprefix = "aarch64-linux-gnu-"' 'toolprefix = ""' + '' + + lib.optionalString stdenv.hostPlatform.isDarwin '' + mkdir -p buildtools/mac + ln -sf ${lib.getExe gn} buildtools/mac/gn + chmod +x build/toolchain/apple/linker_driver.py + patchShebangs build/toolchain/apple/linker_driver.py + substituteInPlace build/toolchain/apple/toolchain.gni --replace-fail "/bin/cp -Rc" "cp -a" + ''; + + outputs = [ + "dev" + "out" + ]; + + nativeBuildInputs = + (builtins.concatLists ( + lib.mapAttrsToList ( + _: library: if (library.package ? dev) then [ library.package.dev ] else [ ] + ) gnSystemLibraries + )) + ++ [ + gclient2nix.gclientUnpackHook + gn + (python3.withPackages (ps: [ ps.setuptools ])) + ninja + git + cpio + pkg-config + ] + ++ lib.optionals stdenv.hostPlatform.isDarwin [ xcbuild ]; + + buildInputs = [ + nasm + ] + ++ (lib.mapAttrsToList (_: library: library.package) gnSystemLibraries) + ++ (lib.optionals stdenv.hostPlatform.isLinux [ + glib + alsa-lib + pulseaudio + libepoxy + libgbm + libGL + libxcomposite + libxdamage + libxext + libxfixes + libxrandr + libxtst + pipewire + xorg.libX11 + xorg.libXi + ]); + + preConfigure = '' + echo "generate_location_tags = true" >> build/config/gclient_args.gni + echo "0" > build/util/LASTCHANGE.committime + + python build/linux/unbundle/replace_gn_files.py \ + --system-libraries ${toString (builtins.attrNames gnSystemLibraries)} + ''; + + gnFlags = [ + "is_debug=false" + "rtc_include_tests=false" + ''target_os="${gnOs}"'' + ''target_cpu="${gnArch}"'' + "treat_warnings_as_errors=false" + "rtc_enable_protobuf=false" + "rtc_include_tests=false" + "rtc_build_examples=false" + "rtc_build_tools=false" + "rtc_libvpx_build_vp9=true" + "enable_libaom=true" + "use_dummy_lastchange=true" + "is_component_build=true" + "enable_stripping=true" + "rtc_use_h264=true" + "rtc_use_h265=true" + "use_custom_libcxx=false" + "use_rtti=true" + ] + ++ (lib.optionals stdenv.hostPlatform.isLinux [ + "rtc_use_pipewire=true" + "symbol_level=0" + "enable_iterator_debugging=false" + "rtc_use_x11=true" + "use_sysroot=false" + "use_custom_libcxx_for_host=false" + "use_libcxx_modules=false" + "use_llvm_libatomic=false" + "is_clang=false" + ]) + ++ (lib.optionals stdenv.hostPlatform.isDarwin [ + ''mac_deployment_target="${stdenv.hostPlatform.darwinMinVersion}"'' + "rtc_enable_symbol_export=true" + "rtc_enable_objc_symbol_export=true" + "rtc_include_dav1d_in_internal_decoder_factory=true" + "clang_use_chrome_plugins=false" + "use_lld=false" + ''clang_base_path="${clang}"'' + ]); + + ninjaFlags = [ + ":default" + ] + ++ lib.optionals stdenv.hostPlatform.isDarwin [ + "api/audio_codecs:builtin_audio_decoder_factory" + "api/task_queue:default_task_queue_factory" + "sdk:native_api" + "sdk:default_codec_factory_objc" + "pc:peer_connection" + "sdk:videocapture_objc" + "sdk:mac_framework_objc" + "desktop_capture_objc" + ]; + + postBuild = + lib.optionalString stdenv.hostPlatform.isLinux '' + objcopy --redefine-syms="${boringSslSymbols}" "libwebrtc.so" + '' + + '' + # Generate licenses + python3 "../../tools_webrtc/libs/generate_licenses.py" \ + --target ${if stdenv.hostPlatform.isDarwin then ":webrtc" else ":default"} $PWD $PWD + ''; + + installPhase = '' + runHook preInstall + + mkdir -p $out/lib + mkdir -p $dev/include + + install -m0644 obj/webrtc.ninja obj/modules/desktop_capture/desktop_capture.ninja args.gn LICENSE.md $dev + + pushd ../.. + find . -name "*.h" -print | cpio -pd $dev/include + find . -name "*.inc" -print | cpio -pd $dev/include + popd + '' + + lib.optionalString stdenv.hostPlatform.isLinux '' + install -m0644 libwebrtc.so libthird_party_boringssl.so $out/lib + '' + + lib.optionalString stdenv.hostPlatform.isDarwin '' + install -m0644 WebRTC.framework/Versions/A/WebRTC $out/lib/libwebrtc.dylib + install -m0644 libthird_party_boringssl.dylib $out/lib + '' + + '' + ln -s $out/lib $dev/lib + + runHook postInstall + ''; + + postFixup = lib.optionalString stdenv.hostPlatform.isDarwin '' + boringssl="$out/lib/libthird_party_boringssl.dylib" + webrtc="$out/lib/libwebrtc.dylib" + + install_name_tool -id "$boringssl" "$boringssl" + install_name_tool -id "$webrtc" "$webrtc" + install_name_tool -change @rpath/libthird_party_boringssl.dylib "$boringssl" "$webrtc" + ''; + + passthru.updateScript = ./update.sh; + + meta = { + description = "WebRTC library used by livekit"; + homepage = "https://github.com/livekit/rust-sdks/"; + license = lib.licenses.bsd3; + maintainers = with lib.maintainers; [ + WeetHet + niklaskorz + ]; + platforms = lib.platforms.linux ++ lib.platforms.darwin; + }; +} diff --git a/nix/livekit-libwebrtc/sources.json b/nix/livekit-libwebrtc/sources.json new file mode 100644 index 0000000000000000000000000000000000000000..2db785a840f1db0e86a255c5d8c540f5c566ac59 --- /dev/null +++ b/nix/livekit-libwebrtc/sources.json @@ -0,0 +1,372 @@ +{ + "src": { + "args": { + "hash": "sha256-+PgmOZD2Fi+SC66nguixhSwDsoXi4Sz693qOZZrLXm8=", + "owner": "webrtc-sdk", + "repo": "webrtc", + "rev": "624fa1dce239af785fc5fa9ca3b21b9250d3f835" + }, + "fetcher": "fetchFromGitHub" + }, + "src/base": { + "args": { + "hash": "sha256-MTG+pjMPY6/dqeEUy+xJVxPuICETtV98S+h/lFwGItg=", + "rev": "86c814633cf284bc8057a539bc722e2a672afe2f", + "url": "https://chromium.googlesource.com/chromium/src/base" + }, + "fetcher": "fetchFromGitiles" + }, + "src/build": { + "args": { + "hash": "sha256-qFZ12YFX4qxFEHU+VWOG+HDYYPXodgGz+iJ7WEc7cD8=", + "owner": "webrtc-sdk", + "repo": "build", + "rev": "01021e6c12636951a6b4e5342e16b2101b352367" + }, + "fetcher": "fetchFromGitHub" + }, + "src/buildtools": { + "args": { + "hash": "sha256-YWtmMKL1ydueNJ4XM/Pq+8OpqIFe5A6/vYyfZTv7/EI=", + "rev": "0f32cb9025766951122d4ed19aba87a94ded3f43", + "url": "https://chromium.googlesource.com/chromium/src/buildtools" + }, + "fetcher": "fetchFromGitiles" + }, + "src/testing": { + "args": { + "hash": "sha256-s65cABkyMo+FkAmilS67qM3VnrT7iYZg9scycrXzxyE=", + "rev": "a89c37d36bf80c05963727e28b9916835ae88d3a", + "url": "https://chromium.googlesource.com/chromium/src/testing" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party": { + "args": { + "hash": "sha256-q+xVOFlpC0vnLMSF9Z6ZRL7mb/cu8jBpsWjDNFFgiKM=", + "rev": "8062e0e102496ff14a8c58b586f014527424953d", + "url": "https://chromium.googlesource.com/chromium/src/third_party" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/boringssl/src": { + "args": { + "hash": "sha256-5Efqc8pLs4ZskXQGpFdTb5cw//v3+DR285m/DsrWSWA=", + "rev": "34492c89a8e381e0e856a686cc71b1eb5bd728db", + "url": "https://boringssl.googlesource.com/boringssl.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/breakpad/breakpad": { + "args": { + "hash": "sha256-0ynZuxIqBIpNkfD3Y9XdPFQr7HeQcsUO3lhnqvH+k8c=", + "rev": "232a723f5096ab02d53d87931efa485fa77d3b03", + "url": "https://chromium.googlesource.com/breakpad/breakpad.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/catapult": { + "args": { + "hash": "sha256-FIJZE1Qu1MLZA4qxB68k1NjhgSbFTjf57YF85JicVZw=", + "rev": "000f47cfa393d7f9557025a252862e2a61a60d44", + "url": "https://chromium.googlesource.com/catapult.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/ced/src": { + "args": { + "hash": "sha256-ySG74Rj2i2c/PltEgHVEDq+N8yd9gZmxNktc56zIUiY=", + "rev": "ba412eaaacd3186085babcd901679a48863c7dd5", + "url": "https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/clang-format/script": { + "args": { + "hash": "sha256-d9uweklBffiuCWEb03ti1eFLnMac2qRtvggzXY1n/RU=", + "rev": "37f6e68a107df43b7d7e044fd36a13cbae3413f2", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/clang/tools/clang-format.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/colorama/src": { + "args": { + "hash": "sha256-6ZTdPYSHdQOLYMSnE+Tp7PgsVTs3U2awGu9Qb4Rg/tk=", + "rev": "3de9f013df4b470069d03d250224062e8cf15c49", + "url": "https://chromium.googlesource.com/external/colorama.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/compiler-rt/src": { + "args": { + "hash": "sha256-yo7BFGgwJNScsXwnCAu8gFBdZVS8/HJplzUk2e73mVg=", + "rev": "57213f125d03209892fed26189feb3b736e96735", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/compiler-rt.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/crc32c/src": { + "args": { + "hash": "sha256-KBraGaO5LmmPP+p8RuDogGldbTWdNDK+WzF4Q09keuE=", + "rev": "d3d60ac6e0f16780bcfcc825385e1d338801a558", + "url": "https://chromium.googlesource.com/external/github.com/google/crc32c.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/dav1d/libdav1d": { + "args": { + "hash": "sha256-+DY4p41VuAlx7NvOfXjWzgEhvtpebjkjbFwSYOzSjv4=", + "rev": "8d956180934f16244bdb58b39175824775125e55", + "url": "https://chromium.googlesource.com/external/github.com/videolan/dav1d.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/depot_tools": { + "args": { + "hash": "sha256-DWQyYtpAAGiryeGJzIWlUwY5yn4cNwXY957vlPDUNak=", + "rev": "fa8fc854e1766b86f10c9a15902cf3cc23adaac2", + "url": "https://chromium.googlesource.com/chromium/tools/depot_tools.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/ffmpeg": { + "args": { + "hash": "sha256-hNzQZQxaa2Wtl7GWWF852cFmmXy4pc15Pp0d59TTfnI=", + "rev": "01f23648c6b84de6c0f717fa4e1816f53b9ee72e", + "url": "https://chromium.googlesource.com/chromium/third_party/ffmpeg.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/flatbuffers/src": { + "args": { + "hash": "sha256-tbc45o0MbMvK5XqRUJt5Eg8BU6+TJqlmwFgQhHq6wRM=", + "rev": "8db59321d9f02cdffa30126654059c7d02f70c32", + "url": "https://chromium.googlesource.com/external/github.com/google/flatbuffers.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/fontconfig/src": { + "args": { + "hash": "sha256-W5WIgC6A52kY4fNkbsDEa0o+dfd97Rl5NKfgnIRpI00=", + "rev": "14d466b30a8ab4a9d789977ed94f2c30e7209267", + "url": "https://chromium.googlesource.com/external/fontconfig.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/freetype/src": { + "args": { + "hash": "sha256-Vlin6Z+QisUyj6R+TclVOm8x6673YhUIWob9Ih6gzC8=", + "rev": "1da283b8ae6d6b94f34a5c4b8c1227adc9dbb1d8", + "url": "https://chromium.googlesource.com/chromium/src/third_party/freetype2.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/fuzztest/src": { + "args": { + "hash": "sha256-L2QG0pUmGjGdtdlivxYfxSqO9YaVHpIT6lvJwBMTxMw=", + "rev": "b10387fdbbca18192f85eaa5323a59f44bf9c468", + "url": "https://chromium.googlesource.com/external/github.com/google/fuzztest.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/google_benchmark/src": { + "args": { + "hash": "sha256-cH8s1gP6kCcojAAfTt5iQCVqiAaSooNk4BdaILujM3w=", + "rev": "761305ec3b33abf30e08d50eb829e19a802581cc", + "url": "https://chromium.googlesource.com/external/github.com/google/benchmark.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/googletest/src": { + "args": { + "hash": "sha256-QT9PQ9bF+eCPfRLkcHpH4jc0UZfGPc98fHf8QDV5bZg=", + "rev": "cd430b47a54841ec45d64d2377d7cabaf0eba610", + "url": "https://chromium.googlesource.com/external/github.com/google/googletest.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/grpc/src": { + "args": { + "hash": "sha256-xivmP36VCSbiMAV3PDUjzCrF+AJzFXJdMe5e2q9yW/k=", + "rev": "957c9f95224b1e1318c0ecb98d0e7584ea5ccff2", + "url": "https://chromium.googlesource.com/external/github.com/grpc/grpc.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/gtest-parallel": { + "args": { + "hash": "sha256-VUuk5tBTh+aU2dxVWUF1FePWlKUJaWSiGSXk/J5zgHw=", + "rev": "96f4f904922f9bf66689e749c40f314845baaac8", + "url": "https://chromium.googlesource.com/external/github.com/google/gtest-parallel" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/harfbuzz-ng/src": { + "args": { + "hash": "sha256-lNnCtgIegUy4DLhYaGZXcEaFw83KWAHoKpz69AEsWp4=", + "rev": "9f83bbbe64654b45ba5bb06927ff36c2e7588495", + "url": "https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/icu": { + "args": { + "hash": "sha256-eGI/6wk6IOUPvX7pRTm4VJk1CqkkxalTu84L36i/D6k=", + "rev": "4c8cc4b365a505ce35be1e0bd488476c5f79805d", + "url": "https://chromium.googlesource.com/chromium/deps/icu.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/instrumented_libs": { + "args": { + "hash": "sha256-8kokdsnn5jD9KgM/6g0NuITBbKkGXWEM4BMr1nCrfdU=", + "rev": "69015643b3f68dbd438c010439c59adc52cac808", + "url": "https://chromium.googlesource.com/chromium/third_party/instrumented_libraries.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/jsoncpp/source": { + "args": { + "hash": "sha256-bSLNcoYBz3QCt5VuTR056V9mU2PmBuYBa0W6hFg2m8Q=", + "rev": "42e892d96e47b1f6e29844cc705e148ec4856448", + "url": "https://chromium.googlesource.com/external/github.com/open-source-parsers/jsoncpp.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libFuzzer/src": { + "args": { + "hash": "sha256-Lb+HczYax0T7qvC0/Nwhc5l2szQTUYDouWRMD/Qz7sA=", + "rev": "e31b99917861f891308269c36a32363b120126bb", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/compiler-rt/lib/fuzzer.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libaom/source/libaom": { + "args": { + "hash": "sha256-ngVZ+xK0b+jKUmawteQ7VFAQzoebX4jqZ3hP9pW+Q0Q=", + "rev": "a23a4799ec2d7dd6e436c7b64a34553773014ed7", + "url": "https://aomedia.googlesource.com/aom.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libc++/src": { + "args": { + "hash": "sha256-lqeuVUgeAKm1pxo+w1vyUbBkBXBzLCQ+Lfu44neKLPo=", + "rev": "917609c669e43edc850eeb192a342434a54e1dfd", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libc++abi/src": { + "args": { + "hash": "sha256-X9cAbyd8ZPSwqOGhPYwIZ6b9E3tVwAuAYZKMgbZQxgk=", + "rev": "f2a7f2987f9dcdf8b04c2d8cd4dcb186641a7c3e", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libjpeg_turbo": { + "args": { + "hash": "sha256-Ig+tmprZDvlf/M72/DTar2pbxat9ZElgSqdXdoM0lPs=", + "rev": "e14cbfaa85529d47f9f55b0f104a579c1061f9ad", + "url": "https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libsrtp": { + "args": { + "hash": "sha256-bkG1+ss+1a2rCHGwZjhvf5UaNVbPPZJt9HZSIPBKGwM=", + "rev": "a52756acb1c5e133089c798736dd171567df11f5", + "url": "https://chromium.googlesource.com/chromium/deps/libsrtp.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libunwind/src": { + "args": { + "hash": "sha256-XdFKn+cGOxA0fHkVMG9UAhCmpML44ocoyHB7XnumX7o=", + "rev": "81e2cb40a70de2b6978e6d8658891ded9a77f7e3", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libvpx/source/libvpx": { + "args": { + "hash": "sha256-NIGpzP6elcPScHJlZmnPHJdmXsuHcbuELT0C4Ha5PcA=", + "rev": "ff1d193f4b9dfa9b2ced51efbb6ec7a69e58e88c", + "url": "https://chromium.googlesource.com/webm/libvpx.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libyuv": { + "args": { + "hash": "sha256-b/EYCWBQvsNoGhea31DPBKpG8eouf0OBi5TgdHDHs9A=", + "rev": "1e40e34573c3861480d107cd4a4ce290df79951f", + "url": "https://chromium.googlesource.com/libyuv/libyuv.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/llvm-libc/src": { + "args": { + "hash": "sha256-yNNx3gOGafMNvZ+aebDKHVj6QM8g0zt0d69PWlWLkyk=", + "rev": "912274164f0877ca917c06e8484ad3be1784833a", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libc.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/lss": { + "args": { + "hash": "sha256-rhp4EcZYdgSfu9cqn+zxxGx6v2IW8uX8V+iA0UfZhFY=", + "rev": "ed31caa60f20a4f6569883b2d752ef7522de51e0", + "url": "https://chromium.googlesource.com/linux-syscall-support.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/nasm": { + "args": { + "hash": "sha256-neYrS4kQ76ihUh22Q3uPR67Ld8+yerA922YSZU1KxJs=", + "rev": "9f916e90e6fc34ec302573f6ce147e43e33d68ca", + "url": "https://chromium.googlesource.com/chromium/deps/nasm.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/openh264/src": { + "args": { + "hash": "sha256-tf0lnxATCkoq+xRti6gK6J47HwioAYWnpEsLGSA5Xdg=", + "rev": "652bdb7719f30b52b08e506645a7322ff1b2cc6f", + "url": "https://chromium.googlesource.com/external/github.com/cisco/openh264" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/perfetto": { + "args": { + "hash": "sha256-I0qiAh3VliVop+3S2/tP6VwCAJOk0Vu7xy8vHJZ1w2A=", + "rev": "a54dd38d60593129ae56d400f1a72860670abea4", + "url": "https://chromium.googlesource.com/external/github.com/google/perfetto.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/protobuf-javascript/src": { + "args": { + "hash": "sha256-zq86SrDASl6aYPFPijRZp03hJqXUFz2Al/KkiNq7i0M=", + "rev": "eb785a9363664a402b6336dfe96aad27fb33ffa8", + "url": "https://chromium.googlesource.com/external/github.com/protocolbuffers/protobuf-javascript" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/re2/src": { + "args": { + "hash": "sha256-f/k2rloV2Nwb0KuJGUX4SijFxAx69EXcsXOG4vo+Kis=", + "rev": "c84a140c93352cdabbfb547c531be34515b12228", + "url": "https://chromium.googlesource.com/external/github.com/google/re2.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/tools": { + "args": { + "hash": "sha256-kZFZl8SC9nZIIOVtNl/5H4huw6BCBsBkJVJ4gaUmly4=", + "rev": "ffcbc837bbb14d80d09147c2af5302ff6bd4bd69", + "url": "https://chromium.googlesource.com/chromium/src/tools" + }, + "fetcher": "fetchFromGitiles" + } +} diff --git a/nix/livekit-libwebrtc/update.sh b/nix/livekit-libwebrtc/update.sh new file mode 100644 index 0000000000000000000000000000000000000000..b28c405b300280b25ab7aa3b85936d0f3ae75878 --- /dev/null +++ b/nix/livekit-libwebrtc/update.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env nix-shell +#!nix-shell -i bash -p gitMinimal curl gojq gclient2nix + +set -eou pipefail +package="livekit-libwebrtc" +pkg_dir="$(dirname "$0")" +nixpkgs="$(git rev-parse --show-toplevel)" + +gh-curl () { + curl --silent ${GITHUB_TOKEN:+-u ":$GITHUB_TOKEN"} "$1" +} + +# Get the current version part before the "-unstable-" for the branch name. +# To manually update to a new major version, you can also invoke the script +# with the new major version, e.g., UPDATE_MAJOR_VERSION=137. +old_version="${UPDATE_NIX_OLD_VERSION:-$(nix-instantiate --eval -E "(import \"$nixpkgs\" { }).$package.version" | tr -d '"')}" +major_version="${UPDATE_MAJOR_VERSION:-${old_version%%-unstable-*}}" +branch="m${major_version}_release" + +# Fetch the current HEAD commit of the release branch +head="$(gh-curl "https://api.github.com/repos/webrtc-sdk/webrtc/git/refs/heads/$branch" | gojq '.object.sha' --raw-output)" +if gojq -e ".src.args.rev == \"$head\"" "$pkg_dir/sources.json"; then + echo "$package is already up-to-date: $head" + exit 0 +fi + +# Get the commit's date for the version field +date="$(gh-curl "https://api.github.com/repos/webrtc-sdk/webrtc/git/commits/$head" | gojq '.committer.date| split("T") | .[0]' --raw-output)" + +echo "Updating sources.json to $head" +gclient2nix generate --root src "https://github.com/webrtc-sdk/webrtc@$head" > "$pkg_dir/sources.json" + +sed -i "s|$old_version|$major_version-unstable-$date|g" "$pkg_dir/package.nix" diff --git a/rust-toolchain.toml b/rust-toolchain.toml index d12da67e318e37e9aff2d7eda14e1782ad6360c5..89b3c648ca2a8a9b893d1b0924697f8170047761 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -4,5 +4,6 @@ profile = "minimal" components = [ "rustfmt", "clippy", "rust-analyzer", "rust-src" ] targets = [ "wasm32-wasip2", # extensions + "wasm32-unknown-unknown", # gpui on the web "x86_64-unknown-linux-musl", # remote server ] diff --git a/script/bundle-linux b/script/bundle-linux index 4e58ac315bd231fd4ae9208abbc15007abc30631..c89d21082dd6c33a11ffcfc908ef87a91554dc18 100755 --- a/script/bundle-linux +++ b/script/bundle-linux @@ -71,7 +71,7 @@ if "$rustup_installed"; then rustup target add "$remote_server_triple" fi -export CC=$(which clang) +export CC=${CC:-$(which clang)} # Build binary in release mode export RUSTFLAGS="${RUSTFLAGS:-} -C link-args=-Wl,--disable-new-dtags,-rpath,\$ORIGIN/../lib" diff --git a/script/clippy b/script/clippy index 5c13b0b39cea3937a43ca54de074e5f65fae7c3b..617d99a5623e6406d1dc01247ea2f5b8e5c3b762 100755 --- a/script/clippy +++ b/script/clippy @@ -16,4 +16,8 @@ if [[ -z "${GITHUB_ACTIONS+x}" ]]; then which typos >/dev/null 2>&1 || exit 0 typos --config typos.toml + + which buf >/dev/null 2>&1 || exit 0 + buf lint crates/proto/proto + buf format --diff --exit-code crates/proto/proto fi diff --git a/script/docs-strip-preview-callouts b/script/docs-strip-preview-callouts index dc4d1eb1764a88a89b7406b8b02577bb7471ceb7..ca88fa22b5e12f2a0af09dd005334e662b128c54 100755 --- a/script/docs-strip-preview-callouts +++ b/script/docs-strip-preview-callouts @@ -117,8 +117,8 @@ if [[ "$DRY_RUN" == "true" ]]; then exit 0 fi -# Check for clean working state -if [[ -n "$(git status --porcelain docs/)" ]]; then +# Check for clean working state (ignore untracked files) +if [[ -n "$(git status --porcelain docs/ | grep -v '^??' || true)" ]]; then error "docs/ directory has uncommitted changes. Please commit or stash first." fi @@ -213,8 +213,11 @@ And: > **Changed in Preview (v0.XXX).** See [release notes](/releases#0.XXX). \`\`\` -These features are now in Stable, so the callouts are no longer needed." \ - --label "documentation" +These features are now in Stable, so the callouts are no longer needed. + +Release Notes: + +- N/A" PR_URL=$(gh pr view --json url --jq '.url') diff --git a/script/docs-suggest-publish b/script/docs-suggest-publish index 24ea1c31addf59ad4abc8ef10e4649fdead4e7bb..23578785159b5fd720e84d3658f7f76dddf3ada9 100755 --- a/script/docs-suggest-publish +++ b/script/docs-suggest-publish @@ -7,16 +7,19 @@ # # This script: # 1. Reads pending suggestions from the docs/suggestions-pending branch -# 2. Uses Droid to apply all suggestions directly to docs files +# 2. Uses Droid to apply suggestions in batches (default 10 per batch) # 3. Runs docs formatting -# 4. Creates a draft PR for human review/merge -# 5. Optionally resets the suggestions branch after successful PR creation +# 4. Validates docs build (action references, JSON schemas, links) +# 5. Creates a draft PR for human review/merge +# 6. Optionally resets the suggestions branch after successful PR creation # # Options: -# --dry-run Show what would be done without creating PR -# --keep-queue Don't reset the suggestions branch after PR creation -# --model MODEL Override Droid model used for auto-apply -# --verbose Show detailed progress +# --dry-run Show what would be done without creating PR +# --keep-queue Don't reset the suggestions branch after PR creation +# --model MODEL Override Droid model used for auto-apply +# --batch-size N Suggestions per Droid invocation (default: 10) +# --skip-validation Skip the docs build validation step +# --verbose Show detailed progress # # Run this as part of the preview release workflow. @@ -25,7 +28,9 @@ set -euo pipefail DRY_RUN=false KEEP_QUEUE=false VERBOSE=false -MODEL="${DROID_MODEL:-claude-sonnet-4-5-20250929}" +SKIP_VALIDATION=false +BATCH_SIZE=10 +MODEL="${DROID_MODEL:-claude-sonnet-4-5-latest}" SUGGESTIONS_BRANCH="docs/suggestions-pending" @@ -66,8 +71,16 @@ while [[ $# -gt 0 ]]; do MODEL="$2" shift 2 ;; + --batch-size) + BATCH_SIZE="$2" + shift 2 + ;; + --skip-validation) + SKIP_VALIDATION=true + shift + ;; -h|--help) - head -26 "$0" | tail -24 + head -30 "$0" | tail -28 exit 0 ;; *) @@ -132,12 +145,16 @@ if [[ "$DRY_RUN" == "true" ]]; then exit 0 fi -# Ensure clean working state -if [[ -n "$(git status --porcelain)" ]]; then +# Ensure clean working state (ignore untracked files with grep -v '??') +if [[ -n "$(git status --porcelain | grep -v '^??' || true)" ]]; then error "Working directory has uncommitted changes. Please commit or stash first." fi -for command in git gh jq droid; do +REQUIRED_COMMANDS=(git gh jq droid) +if [[ "$SKIP_VALIDATION" != "true" ]]; then + REQUIRED_COMMANDS+=(mdbook) +fi +for command in "${REQUIRED_COMMANDS[@]}"; do if ! command -v "$command" > /dev/null 2>&1; then error "Required command not found: $command" fi @@ -157,24 +174,165 @@ git checkout -b "$DOCS_BRANCH" origin/main TMPDIR=$(mktemp -d) trap 'rm -rf "$TMPDIR"' EXIT -SUGGESTIONS_FILE="$TMPDIR/suggestions.md" -APPLY_PROMPT_FILE="$TMPDIR/apply-prompt.md" APPLY_SUMMARY_FILE="$TMPDIR/apply-summary.md" - -# Combine queued suggestion files into one input -for file in $(echo "$MANIFEST" | jq -r '.suggestions[].file'); do - { - echo "## Source: $file" - echo "" - git show "origin/$SUGGESTIONS_BRANCH:$file" 2>/dev/null || error "Suggestion file missing: $file" - echo "" - echo "---" +touch "$APPLY_SUMMARY_FILE" + +# Collect suggestion files into an array +SUGGESTION_FILES=() +while IFS= read -r file; do + SUGGESTION_FILES+=("$file") +done < <(echo "$MANIFEST" | jq -r '.suggestions[].file') + +# Determine which PRs are already in the latest stable release. +# Suggestions queued with --preview may reference features that shipped in stable +# by the time this script runs, so their Preview callouts should be stripped. +STABLE_PRS=() +STABLE_TAG=$(git tag -l 'v*' --sort=-v:refname | grep -v 'pre' | head -1 || true) +if [[ -n "$STABLE_TAG" ]]; then + log "Latest stable release tag: $STABLE_TAG" + for file in "${SUGGESTION_FILES[@]}"; do + pr_num=$(echo "$MANIFEST" | jq -r --arg f "$file" '.suggestions[] | select(.file == $f) | .pr') + # Find the merge commit for this PR + merge_sha=$(gh pr view "$pr_num" --json mergeCommit --jq '.mergeCommit.oid' 2>/dev/null || true) + if [[ -n "$merge_sha" ]] && git merge-base --is-ancestor "$merge_sha" "$STABLE_TAG" 2>/dev/null; then + STABLE_PRS+=("$pr_num") + log "PR #$pr_num is in stable ($STABLE_TAG)" + fi + done + if [[ ${#STABLE_PRS[@]} -gt 0 ]]; then + echo -e "${YELLOW}Note:${NC} ${#STABLE_PRS[@]} suggestion(s) are for PRs already in stable ($STABLE_TAG)." + echo " Preview callouts will be stripped for: ${STABLE_PRS[*]}" echo "" - } >> "$SUGGESTIONS_FILE" -done + fi +else + log "No stable release tag found, treating all suggestions as preview-only" +fi -# Build auto-apply prompt -cat > "$APPLY_PROMPT_FILE" << 'EOF' +# Determine which PRs touch code gated behind feature flags. +# Features behind flags aren't generally available and shouldn't be documented yet. +FLAGGED_PRS=() +FLAGS_FILE="$REPO_ROOT/crates/feature_flags/src/flags.rs" +if [[ -f "$FLAGS_FILE" ]]; then + # Extract feature flag struct names (e.g. SubagentsFeatureFlag, GitGraphFeatureFlag) + FLAG_NAMES=$(grep -oE 'pub struct \w+FeatureFlag' "$FLAGS_FILE" | awk '{print $3}') + if [[ -n "$FLAG_NAMES" ]]; then + FLAG_PATTERN=$(echo "$FLAG_NAMES" | tr '\n' '|' | sed 's/|$//') + log "Feature flags found: $(echo "$FLAG_NAMES" | tr '\n' ' ')" + for file in "${SUGGESTION_FILES[@]}"; do + pr_num=$(echo "$MANIFEST" | jq -r --arg f "$file" '.suggestions[] | select(.file == $f) | .pr') + # Skip PRs already marked as stable (no need to double-check) + is_already_stable=false + for stable_pr in "${STABLE_PRS[@]+"${STABLE_PRS[@]}"}"; do + if [[ "$stable_pr" == "$pr_num" ]]; then + is_already_stable=true + break + fi + done + if [[ "$is_already_stable" == "true" ]]; then + continue + fi + # Check if the PR diff references any feature flag + pr_diff=$(gh pr diff "$pr_num" 2>/dev/null || true) + if [[ -n "$pr_diff" ]] && echo "$pr_diff" | grep -qE "$FLAG_PATTERN"; then + matched_flags=$(echo "$pr_diff" | grep -oE "$FLAG_PATTERN" | sort -u | tr '\n' ', ' | sed 's/,$//') + FLAGGED_PRS+=("$pr_num") + log "PR #$pr_num is behind feature flag(s): $matched_flags" + fi + done + if [[ ${#FLAGGED_PRS[@]} -gt 0 ]]; then + echo -e "${YELLOW}Note:${NC} ${#FLAGGED_PRS[@]} suggestion(s) are for features behind feature flags." + echo " These will be skipped: ${FLAGGED_PRS[*]}" + echo "" + fi + fi +else + log "Feature flags file not found, skipping flag detection" +fi + +# Split into batches +TOTAL=${#SUGGESTION_FILES[@]} +BATCH_COUNT=$(( (TOTAL + BATCH_SIZE - 1) / BATCH_SIZE )) + +if [[ "$BATCH_COUNT" -gt 1 ]]; then + echo "Processing $TOTAL suggestions in $BATCH_COUNT batches of up to $BATCH_SIZE..." +else + echo "Processing $TOTAL suggestions..." +fi +echo "" + +for (( batch=0; batch> "$APPLY_SUMMARY_FILE" + continue + fi + + BATCH_HAS_SUGGESTIONS=true + + # Check if PR is already in stable + is_stable=false + for stable_pr in "${STABLE_PRS[@]+"${STABLE_PRS[@]}"}"; do + if [[ "$stable_pr" == "$pr_num" ]]; then + is_stable=true + break + fi + done + { + echo "## Source: $file" + if [[ "$is_stable" == "true" ]]; then + echo "" + echo "> **ALREADY IN STABLE**: PR #$pr_num shipped in $STABLE_TAG." + echo "> Do NOT add Preview or Changed-in-Preview callouts for this suggestion." + echo "> Apply the documentation content only, without any preview-related callouts." + fi + echo "" + git show "origin/$SUGGESTIONS_BRANCH:$file" 2>/dev/null || error "Suggestion file missing: $file" + echo "" + echo "---" + echo "" + } >> "$BATCH_SUGGESTIONS_FILE" + done + + # Skip this batch if all its suggestions were flagged + if [[ "$BATCH_HAS_SUGGESTIONS" == "false" ]]; then + echo -e " ${YELLOW}Batch $BATCH_NUM skipped (all suggestions behind feature flags)${NC}" + continue + fi + + # Build auto-apply prompt for this batch + cat > "$BATCH_PROMPT_FILE" << 'EOF' # Documentation Auto-Apply Request (Preview Release) Apply all queued documentation suggestions below directly to docs files in this repository. @@ -201,7 +359,15 @@ Before making edits, read and follow these rule files: 6. Keep preview callout semantics correct: - Additive features: `> **Preview:** ...` - Behavior modifications: `> **Changed in Preview (vX.XXX).** ...` + - **Exception**: Suggestions marked "ALREADY IN STABLE" must NOT get any preview callouts. + These features already shipped in a stable release. Apply the content changes only. + - Suggestions for features behind feature flags have been pre-filtered and excluded. + If you encounter references to feature-flagged functionality, do not document it. 7. If a suggestion is too ambiguous to apply safely, skip it and explain why in the summary. +8. **Do not invent `{#kb}` or `{#action}` references.** Only use action names that already + appear in the existing docs files you are editing. If unsure whether an action name is + valid, use plain text instead. The docs build validates all action references against + the compiled binary and will reject unknown names. ## Output format (after making edits) @@ -218,12 +384,30 @@ Do not include a patch in the response; apply edits directly to files. EOF -cat "$SUGGESTIONS_FILE" >> "$APPLY_PROMPT_FILE" + cat "$BATCH_SUGGESTIONS_FILE" >> "$BATCH_PROMPT_FILE" + + log "Running Droid auto-apply (batch $BATCH_NUM) with model: $MODEL" + if ! droid exec -m "$MODEL" -f "$BATCH_PROMPT_FILE" --auto high > "$BATCH_SUMMARY_FILE" 2>&1; then + echo "Droid exec output (batch $BATCH_NUM):" + cat "$BATCH_SUMMARY_FILE" + error "Droid exec failed on batch $BATCH_NUM. See output above." + fi + + # Append batch summary + { + echo "### Batch $BATCH_NUM" + echo "" + cat "$BATCH_SUMMARY_FILE" + echo "" + } >> "$APPLY_SUMMARY_FILE" + + echo -e " ${GREEN}Batch $BATCH_NUM complete${NC}" +done +echo "" -log "Running Droid auto-apply with model: $MODEL" -droid exec -m "$MODEL" -f "$APPLY_PROMPT_FILE" > "$APPLY_SUMMARY_FILE" +log "All batches completed, checking results..." -if [[ -n "$(git status --porcelain | grep -vE '^.. docs/' || true)" ]]; then +if [[ -n "$(git status --porcelain | grep -v '^??' | grep -vE '^.. docs/' || true)" ]]; then error "Auto-apply modified non-doc files. Revert and re-run." fi @@ -232,12 +416,33 @@ if [[ -z "$(git status --porcelain docs/ | grep '^.. docs/src/' || true)" ]]; th fi log "Running docs formatter" -./script/prettier +./script/prettier --write if [[ -z "$(git status --porcelain docs/ | grep '^.. docs/src/' || true)" ]]; then error "No docs/src changes remain after formatting; aborting PR creation." fi +# Validate docs build before creating PR +if [[ "$SKIP_VALIDATION" != "true" ]]; then + echo "Validating docs build..." + log "Generating action metadata..." + if ! ./script/generate-action-metadata > /dev/null 2>&1; then + echo -e "${YELLOW}Warning:${NC} Could not generate action metadata (cargo build may have failed)." + echo "Skipping docs build validation. CI will still catch errors." + else + VALIDATION_DIR="$TMPDIR/docs-validation" + if ! mdbook build ./docs --dest-dir="$VALIDATION_DIR" 2>"$TMPDIR/validation-errors.txt"; then + echo "" + echo -e "${RED}Docs build validation failed:${NC}" + cat "$TMPDIR/validation-errors.txt" + echo "" + error "Fix the errors above and re-run, or use --skip-validation to bypass." + fi + echo -e "${GREEN}Docs build validation passed.${NC}" + fi + echo "" +fi + # Build PR body from suggestions PR_BODY_FILE="$TMPDIR/pr-body.md" cat > "$PR_BODY_FILE" << 'EOF' @@ -309,7 +514,7 @@ Release Notes: EOF git add docs/ -git commit -m "docs: auto-apply preview release suggestions +git commit -m "docs: Auto-apply preview release suggestions Auto-applied queued documentation suggestions from: $(echo "$MANIFEST" | jq -r '.suggestions[] | "- PR #\(.pr)"') @@ -323,9 +528,8 @@ git push -u origin "$DOCS_BRANCH" log "Creating PR..." PR_URL=$(gh pr create \ --draft \ - --title "docs: auto-apply preview release suggestions" \ - --body-file "$PR_BODY_FILE" \ - --label "documentation") + --title "docs: Apply preview release suggestions" \ + --body-file "$PR_BODY_FILE") echo "" echo -e "${GREEN}PR created:${NC} $PR_URL" @@ -366,6 +570,7 @@ EOF Previous suggestions published in: $PR_URL" + # Force push required: replacing the orphan suggestions branch with a clean slate git push -f origin "${SUGGESTIONS_BRANCH}-reset:$SUGGESTIONS_BRANCH" git checkout "$ORIGINAL_BRANCH" git branch -D "${SUGGESTIONS_BRANCH}-reset" @@ -377,9 +582,6 @@ else echo "Suggestions queue kept (--keep-queue). Remember to reset manually after PR is merged." fi -# Cleanup - - echo "" echo -e "${GREEN}Done!${NC}" echo "" diff --git a/script/github-check-new-issue-for-duplicates.py b/script/github-check-new-issue-for-duplicates.py index c8978d421b0c68779dfcd98f628ff1af6f57c91e..245d9aaa9a85bcab686b15bef6dbec7b7aaf6545 100644 --- a/script/github-check-new-issue-for-duplicates.py +++ b/script/github-check-new-issue-for-duplicates.py @@ -89,7 +89,12 @@ def post_comment(issue_number: int, body): def build_duplicate_comment(matches): """Build the comment body for potential duplicates.""" match_list = "\n".join(f"- #{m['number']}" for m in matches) - explanations = "\n\n".join(f"**#{m['number']}:** {m['explanation']}" for m in matches) + explanations = "\n\n".join( + f"**#{m['number']}:** {m['explanation']}\n\n**Shared root cause:** {m['shared_root_cause']}" + if m.get('shared_root_cause') + else f"**#{m['number']}:** {m['explanation']}" + for m in matches + ) return f"""This issue appears to be a duplicate of: @@ -307,7 +312,7 @@ def enrich_magnets(magnets): for magnet in magnets: data = github_api_get(f"/repos/{REPO_OWNER}/{REPO_NAME}/issues/{magnet['number']}") magnet["title"] = data["title"] - magnet["body_preview"] = (data.get("body") or "")[:500] + magnet["body_preview"] = (data.get("body") or "")[:1000] def areas_match(detected, magnet_area): @@ -381,7 +386,7 @@ def search_for_similar_issues(issue, detected_areas, max_searches=6): "title": item["title"], "state": item.get("state", ""), "created_at": item.get("created_at", ""), - "body_preview": body[:500], + "body_preview": body[:1000], "source": search_type, } except requests.RequestException as e: @@ -414,12 +419,30 @@ def analyze_duplicates(anthropic_key, issue, magnets, search_results): system_prompt = """You analyze GitHub issues to identify potential duplicates. -Given a new issue and a list of existing issues, identify which existing issues might be duplicates. +Given a new issue and a list of existing issues, identify which existing issues are duplicates — meaning +they are caused by the SAME BUG in the code, not just similar symptoms. + +CRITICAL DISTINCTION — shared symptoms vs shared root cause: +- "models missing", "can't sign in", "editor hangs", "venv not detected" are SYMPTOMS that many + different bugs can produce. Two reports of the same symptom are NOT duplicates unless you can + identify a specific shared root cause. +- A duplicate means: if a developer fixed the existing issue, the new issue would also be fixed. +- If the issues just happen to be in the same feature area, or describe similar-sounding problems + with different specifics (different error messages, different triggers, different platforms, different + configurations), they are NOT duplicates. For each potential duplicate, assess confidence: -- "high": Very likely the same issue (same root cause, same symptoms) -- "medium": Possibly related (likely to be the same root cause) -- Do NOT include tangentially related issues (same general area but probably different issues) +- "high": Almost certainly the same bug. You can name a specific shared root cause, and the + reproduction steps / error messages / triggers are consistent. +- "medium": Likely the same bug based on specific technical details, but some uncertainty remains. +- Do NOT include issues that merely share symptoms, affect the same feature area, or sound similar + at a surface level. + +Examples of things that are NOT duplicates: +- Two issues about "Copilot models not showing" — one caused by a Zed update breaking the model list, + the other caused by the user's plan not including those models. +- Two issues about "Zed hangs" — one triggered by network drives, the other by large projects. +- Two issues about "can't sign in" — one caused by a missing system package, the other by a server-side error. Output only valid JSON (no markdown code blocks) with this structure: { @@ -427,13 +450,18 @@ Output only valid JSON (no markdown code blocks) with this structure: { "number": 12345, "confidence": "high|medium", - "explanation": "Brief explanation of why this might be a duplicate" + "shared_root_cause": "The specific bug/root cause shared by both issues", + "explanation": "Brief explanation with concrete evidence from both issues" } ], "summary": "One sentence summary of findings" } -Only include matches with "high" or "medium" confidence. Return empty matches array if none found.""" +When in doubt, return an empty matches array. A false positive (flagging a non-duplicate) is much +worse than a false negative (missing a real duplicate), because it wastes the time of both the +issue author and the maintainers. + +Return empty matches array if none found or if you can only identify shared symptoms.""" user_content = f"""## New Issue #{issue['number']} **Title:** {issue['title']} diff --git a/script/github-track-duplicate-bot-effectiveness.py b/script/github-track-duplicate-bot-effectiveness.py index a3056e856da717783ca0fc6538a131bb8a1d1d73..05e64026d9538606927da2c7e5cfbf211eb42d2e 100644 --- a/script/github-track-duplicate-bot-effectiveness.py +++ b/script/github-track-duplicate-bot-effectiveness.py @@ -24,6 +24,7 @@ import functools import os import re import sys +from datetime import datetime, timezone import requests @@ -39,6 +40,22 @@ BOT_START_DATE = "2026-02-18" NEEDS_TRIAGE_LABEL = "state:needs triage" DEFAULT_PROJECT_NUMBER = 76 VALID_CLOSED_AS_VALUES = {"duplicate", "not_planned", "completed"} +# Add a new tuple when you deploy a new version of the bot that you want to +# keep track of (e.g. the prompt gets a rewrite or the model gets swapped). +# Newest first, please. The datetime is for the deployment time (merge to maain). +BOT_VERSION_TIMELINE = [ + ("v2", datetime(2026, 2, 26, 14, 9, tzinfo=timezone.utc)), + ("v1", datetime(2026, 2, 18, tzinfo=timezone.utc)), +] + + +def bot_version_for_time(date_string): + """Return the bot version that was active at the given ISO 8601 timestamp.""" + timestamp = datetime.fromisoformat(date_string.replace("Z", "+00:00")) + for version, deployed in BOT_VERSION_TIMELINE: + if timestamp >= deployed: + return version + return BOT_VERSION_TIMELINE[-1][0] def github_api_get(path, params=None): @@ -75,13 +92,14 @@ def fetch_issue(issue_number): "node_id": data["node_id"], "author": (data.get("user") or {}).get("login", ""), "type_name": (data.get("type") or {}).get("name"), + "created_at": data.get("created_at", ""), } -def get_bot_duplicate_comment(issue_number): - """Get the bot's duplicate-detection comment body from an issue. +def get_bot_comment_with_time(issue_number): + """Get the bot's duplicate-detection comment and its timestamp from an issue. - Returns the comment body if found, else None. + Returns {"body": str, "created_at": str} if found, else None. """ comments_path = f"/repos/{REPO_OWNER}/{REPO_NAME}/issues/{issue_number}/comments" page = 1 @@ -90,7 +108,7 @@ def get_bot_duplicate_comment(issue_number): author = (comment.get("user") or {}).get("login", "") body = comment.get("body", "") if author == BOT_LOGIN and body.startswith(BOT_COMMENT_PREFIX): - return body + return {"body": body, "created_at": comment.get("created_at", "")} page += 1 return None @@ -100,8 +118,8 @@ def parse_suggested_issues(comment_body): return [int(match) for match in re.findall(r"^- #(\d+)", comment_body, re.MULTILINE)] -def github_api_graphql(query, variables=None): - """Execute a GitHub GraphQL query. Raises on errors.""" +def github_api_graphql(query, variables=None, partial_errors_ok=False): + """Execute a GitHub GraphQL query. Raises on errors unless partial_errors_ok is set.""" response = requests.post( GRAPHQL_URL, headers=GITHUB_HEADERS, @@ -110,43 +128,51 @@ def github_api_graphql(query, variables=None): response.raise_for_status() data = response.json() if "errors" in data: - raise RuntimeError(f"GraphQL errors: {data['errors']}") + if not partial_errors_ok or "data" not in data: + raise RuntimeError(f"GraphQL errors: {data['errors']}") + print(f" GraphQL partial errors (ignored): {data['errors']}") return data["data"] -def get_closed_as_duplicate_of(issue_number): - """Get the issue number this issue was closed as a duplicate of. +def find_canonical_among(duplicate_number, candidates): + """Check if any candidate issue has duplicate_number marked as a duplicate. - Uses the timeline to find the most recent MarkedAsDuplicateEvent. - Returns the original issue number, or None. + The MarkedAsDuplicateEvent lives on the canonical issue's timeline, not the + duplicate's. So to find which canonical issue our duplicate was closed against, + we check each candidate's timeline for a MarkedAsDuplicateEvent whose + `duplicate` field matches our issue. - Note: not all "closed as duplicate" issues have a MarkedAsDuplicateEvent. - If the closer used the "Close as duplicate" button without separately - marking the duplicate relationship, no event is created and this returns - None. The caller handles this by flagging the item for manual review. + Returns the matching canonical issue number, or None. """ + if not candidates: + return None + data = github_api_graphql( """ - query($owner: String!, $repo: String!, $number: Int!) { + query($owner: String!, $repo: String!, $numbers: [Int!]!) { repository(owner: $owner, name: $repo) { - issue(number: $number) { - timelineItems(last: 10, itemTypes: [MARKED_AS_DUPLICATE_EVENT]) { - nodes { - ... on MarkedAsDuplicateEvent { - canonical { ... on Issue { number } } - } - } - } - } + PLACEHOLDER } } - """, - {"owner": REPO_OWNER, "repo": REPO_NAME, "number": issue_number}, + """.replace("PLACEHOLDER", "\n ".join( + f'issue_{number}: issue(number: {number}) {{' + f' timelineItems(last: 50, itemTypes: [MARKED_AS_DUPLICATE_EVENT]) {{' + f' nodes {{ ... on MarkedAsDuplicateEvent {{ duplicate {{ ... on Issue {{ number }} }} }} }} }} }}' + for number in candidates + )), + {"owner": REPO_OWNER, "repo": REPO_NAME, "numbers": list(candidates)}, + partial_errors_ok=True, ) - nodes = data["repository"]["issue"]["timelineItems"]["nodes"] - for node in reversed(nodes): - if original := (node.get("canonical") or {}).get("number"): - return original + + repo = data["repository"] + for candidate in candidates: + issue_data = repo.get(f"issue_{candidate}") + if not issue_data: + continue + for node in issue_data["timelineItems"]["nodes"]: + dup_number = (node.get("duplicate") or {}).get("number") + if dup_number == duplicate_number: + return candidate return None @@ -261,7 +287,7 @@ def set_field_value(item_id, field_name, value): ) -def add_or_update_project_item(issue_node_id, outcome, closed_as=None, status="Auto-classified", notes=None): +def add_or_update_project_item(issue_node_id, outcome, closed_as=None, status="Auto-classified", notes=None, bot_comment_time=None): """Add an issue to the project board (or update it if already there), setting field values.""" item_id = find_project_item(issue_node_id) if item_id: @@ -279,6 +305,9 @@ def add_or_update_project_item(issue_node_id, outcome, closed_as=None, status="A if notes: set_field_value(item_id, "Notes", notes) + if bot_comment_time: + set_field_value(item_id, "Bot version", bot_version_for_time(bot_comment_time)) + return item_id @@ -296,14 +325,14 @@ def classify_closed(issue_number, closer_login, state_reason): print(f" Skipping: author '{author}' is a staff member") return - bot_comment = get_bot_duplicate_comment(issue_number) + bot_comment = get_bot_comment_with_time(issue_number) bot_commented = bot_comment is not None print(f" Bot commented: {bot_commented}") closer_is_author = closer_login == author if bot_commented and closer_is_author: - classify_as_success(issue, state_reason) + classify_as_success(issue, bot_comment, state_reason) elif bot_commented and not closer_is_author: # Only authors, staff, and triagers can close issues, so # a non-author closer is always someone with elevated permissions. @@ -314,7 +343,7 @@ def classify_closed(issue_number, closer_login, state_reason): print(" Skipping: no bot comment and not closed as duplicate") -def classify_as_success(issue, state_reason): +def classify_as_success(issue, bot_comment, state_reason): """Author closed their own issue after the bot commented.""" if state_reason == "duplicate": status = "Auto-classified" @@ -334,6 +363,7 @@ def classify_as_success(issue, state_reason): closed_as=state_reason, status=status, notes=notes, + bot_comment_time=bot_comment["created_at"], ) @@ -350,46 +380,48 @@ def classify_non_author_closed(issue, bot_comment, state_reason): closed_as=state_reason, status="Needs review", notes=notes, + bot_comment_time=bot_comment["created_at"], ) def classify_as_assist(issue, bot_comment): """Staff member closed as duplicate after the bot commented. Check if the dup matches.""" - suggested = parse_suggested_issues(bot_comment) + suggested = parse_suggested_issues(bot_comment["body"]) + if not suggested: + print(" -> Assist, needs review (could not parse bot suggestions)") + add_or_update_project_item( + issue["node_id"], outcome="Assist", closed_as="duplicate", + status="Needs review", notes="Could not parse bot suggestions", + bot_comment_time=bot_comment["created_at"]) + return + original = None try: - original = get_closed_as_duplicate_of(issue["number"]) + original = find_canonical_among(issue["number"], suggested) except (requests.RequestException, RuntimeError) as error: - print(f" Warning: failed to get the original-for the duplicate issue: {error}") - - if original and suggested: - if original in suggested: - status = "Auto-classified" - notes = None - print(f" -> Assist (original #{original} matches bot suggestion)") - else: - status = "Needs review" - suggested_str = ", ".join(f"#{number}" for number in suggested) - notes = f"Bot suggested {suggested_str}; closed as dup of #{original}" - print(f" -> Possible Assist, needs review ({notes})") + print(f" Warning: failed to query candidate timelines: {error}") + + if original: + status = "Auto-classified" + notes = None + print(f" -> Assist (original #{original} matches bot suggestion)") else: - # couldn't determine original or no suggestions parsed status = "Needs review" - if not original: - notes = "Could not determine original issue from timeline" - else: - notes = f"Closed as dup of #{original}; could not parse bot suggestions" + suggested_str = ", ".join(f"#{number}" for number in suggested) + notes = f"Bot suggested {suggested_str}; none matched as canonical" print(f" -> Possible Assist, needs review ({notes})") add_or_update_project_item( - issue["node_id"], outcome="Assist", closed_as="duplicate", status=status, notes=notes) + issue["node_id"], outcome="Assist", closed_as="duplicate", status=status, notes=notes, + bot_comment_time=bot_comment["created_at"]) def classify_as_missed_opportunity(issue): """Issue closed as duplicate but the bot never commented.""" print(" -> Missed opportunity") add_or_update_project_item( - issue["node_id"], outcome="Missed opportunity", closed_as="duplicate", status="Auto-classified") + issue["node_id"], outcome="Missed opportunity", closed_as="duplicate", status="Auto-classified", + bot_comment_time=issue["created_at"]) def classify_open(): @@ -419,16 +451,18 @@ def classify_open(): f"type is {type_name}" if type_name not in ("Bug", "Crash") else f"author {author} is staff" if is_staff_member(author) else "already on the board" if find_project_item(node_id) - else "no bot duplicate comment found" if not get_bot_duplicate_comment(number) + else "no bot duplicate comment found" if not (bot_comment := get_bot_comment_with_time(number)) else None ) + if skip_reason: print(f" #{number}: skipping, {skip_reason}") skipped += 1 continue print(f" #{number}: adding as Noise") - add_or_update_project_item(node_id, outcome="Noise", status="Auto-classified") + add_or_update_project_item(node_id, outcome="Noise", status="Auto-classified", + bot_comment_time=bot_comment["created_at"]) added += 1 except Exception as error: # broad catch: one issue failing shouldn't stop the sweep print(f" #{number}: error processing issue, skipping: {error}") diff --git a/script/linux b/script/linux index c5c4ea9ab3856545bcff63bc6bdaed5f06b8e07c..706fa63b037e290cd7991d3adfa42fac0c0cfe25 100755 --- a/script/linux +++ b/script/linux @@ -27,13 +27,15 @@ if [[ -n $apt ]]; then g++ libasound2-dev libfontconfig-dev + libgit2-dev + libglib2.0-dev + libssl-dev + libva-dev + libvulkan1 libwayland-dev libx11-xcb-dev libxkbcommon-x11-dev - libssl-dev libzstd-dev - libvulkan1 - libgit2-dev make cmake clang @@ -55,11 +57,21 @@ if [[ -n $apt ]]; then elif (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+12|Linux Mint 21|.+22\.04)' /etc/os-release); then deps+=( mold libstdc++-12-dev ) elif (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+11|Linux Mint 20|.+20\.04)' /etc/os-release); then - deps+=( libstdc++-10-dev ) + # Ubuntu 20.04 ships clang-10 and libstdc++-10 which lack adequate C++20 + # support for building webrtc-sys (requires -std=c++20, lambdas in + # unevaluated contexts from clang 17+, and working std::ranges in the + # stdlib). clang-18 is available in focal-security/universe as an official + # backport, and libstdc++-11-dev from the ubuntu-toolchain-r PPA provides + # headers with working pointer_traits/contiguous_range. + # Note: the prebuilt libwebrtc.a is compiled with libstdc++, so we must + # use libstdc++ (not libc++) to avoid ABI mismatches at link time. + $maysudo add-apt-repository -y ppa:ubuntu-toolchain-r/test + deps+=( clang-18 libstdc++-11-dev ) fi $maysudo "$apt" update $maysudo "$apt" install -y "${deps[@]}" + finalize exit 0 fi @@ -78,6 +90,8 @@ if [[ -n $dnf ]] || [[ -n $yum ]]; then cmake alsa-lib-devel fontconfig-devel + glib2-devel + libva-devel wayland-devel libxcb-devel libxkbcommon-x11-devel @@ -141,7 +155,9 @@ if [[ -n $zyp ]]; then cmake fontconfig-devel gcc + libva-devel gcc-c++ + glib2-devel git gzip jq @@ -175,6 +191,8 @@ if [[ -n $pacman ]]; then cmake alsa-lib fontconfig + glib2 + libva wayland libgit2 libxcb @@ -205,6 +223,8 @@ if [[ -n $xbps ]]; then gcc alsa-lib-devel fontconfig-devel + glib-devel + libva-devel libxcb-devel libxkbcommon-devel libzstd-devel @@ -226,11 +246,13 @@ if [[ -n $emerge ]]; then deps=( app-arch/zstd app-misc/jq + dev-libs/glib dev-libs/openssl dev-libs/wayland dev-util/cmake media-libs/alsa-lib media-libs/fontconfig + media-libs/libva media-libs/vulkan-loader x11-libs/libxcb x11-libs/libxkbcommon diff --git a/script/terms/terms.rtf b/script/terms/terms.rtf index f5fab23f4551fd0b3f8605209c3315eb470af224..cd01004c11ed10e58d3c17b1274cd499a1046c66 100644 --- a/script/terms/terms.rtf +++ b/script/terms/terms.rtf @@ -2,128 +2,194 @@ {\colortbl;\red255\green0\blue0;\red0\green0\blue255;} \widowctrl\hyphauto -{\pard \qc \f0 \sa180 \li0 \fi0 \b \fs36 Zed End User Terms\par} +{\pard \qc \f0 \sa180 \li0 \fi0 \b \fs36 Terms of Service\par} {\pard \ql \f0 \sa180 \li0 \fi0 \par} -{\pard \ql \f0 \sa180 \li0 \fi0 PLEASE READ THESE TERMS AND CONDITIONS CAREFULLY BEFORE USING THE SERVICE OR SOFTWARE OFFERED BY ZED INDUSTRIES, INC. ("ZED", OR "WE"). BY ACCESSING OR USING THE SOLUTION (AS DEFINED BELOW) IN ANY MANNER, YOU ("YOU" OR "CUSTOMER") AGREE TO BE BOUND BY THESE TERMS (THE "AGREEMENT") TO THE EXCLUSION OF ALL OTHER TERMS. YOU REPRESENT AND WARRANT THAT YOU HAVE THE AUTHORITY TO ENTER INTO THIS AGREEMENT; IF YOU ARE ENTERING INTO THIS AGREEMENT ON BEHALF OF AN ORGANIZATION OR ENTITY, REFERENCES TO "CUSTOMER" AND "YOU" IN THIS AGREEMENT, REFER TO THAT ORGANIZATION OR ENTITY. IF YOU DO NOT AGREE TO ALL OF THE FOLLOWING, YOU MAY NOT USE OR ACCESS THE SOLUTION IN ANY MANNER. IF THE TERMS OF THIS AGREEMENT ARE CONSIDERED AN OFFER, ACCEPTANCE IS EXPRESSLY LIMITED TO SUCH TERMS.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 1. ACCESS TO AND USE OF THE SOLUTION\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 2. TERMS APPLICABLE TO THE EDITOR\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.1. License Grant\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.2. License Limitations\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You agree that You shall not: (a) exceed the scope of the licenses granted in Section 2.1; (b) make copies of the Editor; (c) distribute, sublicense, assign, delegate, rent, lease, sell, time-share or otherwise transfer the benefits of, use under, or rights to, the license granted in Section 2.1; (d) reverse engineer, decompile, disassemble or otherwise attempt to learn the source code, structure or algorithms underlying the Editor, except to the extent required to be permitted under applicable law; (e) modify, translate or create derivative works of the Editor; or (f) remove any copyright, trademark, patent or other proprietary notice that appears on the Editor or copies thereof.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.3. Open Source Software\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed makes certain versions of the Editor and related software available at the Zed GitHub Repository: {\field{\*\fldinst{HYPERLINK "https://github.com/zed-industries/zed"}}{\fldrslt{\ul -https://github.com/zed-industries/zed -}}} - (the "Repo"). Your use of such software is subject to the open source software licenses declared in the Repo.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 3. TERMS APPLICABLE TO THE ZED SERVICE\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.1. Access to and Scope of Zed Service\par} -{\pard \ql \f0 \sa180 \li0 \fi0 If you have elected to use the Zed Service by enabling or activating the Zed Service, Zed will use commercially reasonable efforts to make the Zed Service available to You as set forth in this Agreement. Once you elected to use the Zed Service, You may access and use the Zed Service during the Term, subject to Your compliance with the terms and conditions of the Agreement.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.2. Restrictions\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You will use the Zed Service only in accordance with all applicable laws, including, but not limited to, laws related to data (whether applicable within the United States, the European Union, or otherwise). You agree not to (and will not allow any third party to): (i) remove or otherwise alter any proprietary notices or labels from the Zed Service or any portion thereof; (ii) reverse engineer, decompile, disassemble, or otherwise attempt to discover the underlying structure, ideas, or algorithms of the Zed Service or any software used to provide or make the Zed Service available; or (iii) rent, resell or otherwise allow any third party access to or use of the Zed Service. Zed may suspend Your access to or use of the Zed Service as follows: (a) immediately if Zed reasonably believes Your use of the Zed Service may pose a security risk to or may adversely impact the Zed Service; or (b) if You are in breach of this Agreement.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.3. Customer Data\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You are solely responsible for Customer Data including, but not limited to: (a) compliance with all applicable laws and this Agreement; (b) any claims relating to Customer Data; and (c) any claims that Customer Data infringes, misappropriates, or otherwise violates the rights of any third party. You agree and acknowledge that Customer Data may be irretrievably deleted if Your account is terminated. For purposes of this Agreement, "Customer Data" shall mean any data, information or other material provided, uploaded, or submitted by You to the Zed Service in the course of using the Zed Service. Notwithstanding anything to the contrary, You represent and warrant that You will not transfer or make available to Zed any personally identifiable information or related information subject to applicable data privacy laws or regulations, unless otherwise agreed to in writing by Zed.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.1. Customer Data Made Available to Zed\par} -{\pard \ql \f0 \sa180 \li0 \fi0 To the extent You elect to make Customer Data available to Zed, the same may only be used by Zed according to the Customer Data type and the use rights regarding the same as described herein:\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.2. Usage Data\par} -{\pard \ql \f0 \sa180 \li0 \fi0 To improve the Editor and understand how You use it, Zed optionally collects the following usage data:\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (a)\tx360\tab file extensions of opened files;\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (b)\tx360\tab features and tools You use within the Editor;\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (c)\tx360\tab project statistics (e.g., number of files); and\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (d)\tx360\tab frameworks detected in Your projects\sa180\sa180\par} -{\pard \ql \f0 \sa180 \li0 \fi0 (a-d collectively, "Usage Data"). Usage Data does not include any of Your software code or sensitive project details. You may change Your preferences disabling the collection of Usage Data and You can audit Usage Data collected by the Editor at any time. See {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/telemetry"}}{\fldrslt{\ul -https://zed.dev/docs/telemetry -}}} - for more.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Usage Data is associated with a secure random telemetry ID which may be linked to Your email address. This linkage currently serves two purposes: (1) it allows Zed to analyze usage patterns over time while maintaining Your privacy; and (2) it enables Zed to reach out to specific user groups for feedback and improvement suggestions. Zed may contact You based on Your usage patterns to better understand your needs and improve the Solution. If You delete Your account, the link between Your telemetry ID and Your email address will be permanently removed. By continuing to use Editor or Solution with this feature enabled You agree to this Usage Data collection.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.3. Crash Reports\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Customer Data consisting of data related to the behavior of the Solution prior to a crash or failure, such as stack traces are collected and classified as "Crash Reports". Zed will use commercially reasonable efforts to exclude any personally identifiable information from Crash Reports, but due to the nature of a crash, Zed does not ensure that information such as paths will be excluded from Crash Reports. Crash Reports will be used solely for Zed's internal purposes in connection with diagnosing defects in the Solution that led to the crash. You may grant us permission to capture Crash Reports when installing or activating the Solution, and You may change Your preferences at any time in the settings feature of the Solution. Once You grant us this permission, Zed will retain the Crash Reports indefinitely.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.4. User Content\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \u8226 ? You may access, modify or create certain data or information in connection with your access or use of the Zed Editor or the Solution. Such data and information may include, but is not limited to any of the following:\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (a)\tx360\tab file contents and associated metadata (e.g., filename, paths, size, timestamps);\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (b)\tx360\tab source control history, comments and metadata (e.g., git history, commit messages);\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (c)\tx360\tab configuration data (e.g., settings, keymaps);\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (d)\tx360\tab anything typed, pasted and/or displayed on screen while using the Editor;\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (e)\tx360\tab derivative works of the above generated by the Editor (e.g., format conversions, summaries, indexes, caches);\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (f)\tx360\tab metadata, code and other derivative works of the above returned by language servers and other local tooling; and\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (g)\tx360\tab metadata, code and other derivative works of the above returned by services integrated with the Zed Editor\sa180\sa180\par} -{\pard \ql \f0 \sa180 \li0 \fi0 (a-g collectively, "User Content").\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5. Handling of User Content\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed will make use of or transfer User Content only as specified in this Agreement, or as necessary to comply with applicable law.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.1. Zed Collaboration Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 When using Zed Collaboration Services, User Content is transmitted from Your environment only if You collaborate with other Zed users by electing to share a project in the Editor. Once You share a project, Zed may transmit User Content consisting of file paths, file contents, and metadata regarding the code returned by language servers. Currently, Zed does not persist any User Content beyond the Your collaboration session. If You unshare a project or disconnect from the Solution, all information associated with such project will be deleted from Zed servers. In the future, Zed may save User Content regarding projects beyond the scope of a single collaboration session. We may share such User Content with those users You elected to grant access to. Zed's access to such User Content is limited to debugging and making improvements to the Solution.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.2. Other Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 The Zed Editor supports integration with API-based services maintained and not operated by Zed (the "Other Services"). By way of example, Other Services includes those made available by GitHub, Anthropic, OpenAI, and similar providers, or those You host or manage directly. You may configure the Zed Editor to interoperate, communicate with, and exchange data (including User Content) directly with the Other Services. Zed is not responsible or otherwise liable with respect to Your use of any Other Service, including but not limited to the exchange of data between the Other Service and the Zed Editor. The terms and conditions, including the applicable privacy policy, with respect to the Other Service are those made available by the applicable Other Service, not these Terms.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.3. Zed AI Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 The Zed Editor supports integration with API-based services maintained and operated by Zed (the "Zed AI Services"). You may elect to use Zed AI Services as the provider for various Zed Editor features (e.g., Agent Panel, Inline Assistant, Edit Predictions, and similar features). In connection with Your use of these features, the Zed Editor and Zed AI Services may make use of User Content to generate contextually relevant responses (the \u8220"Output\u8221"). Other than as specified in Section 3.3.5.4 of these Terms, Zed will not use User Content for training of its models, or disclose User Content.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Output is provided "as is" without any warranties or guarantees of functionality, security, or fitness for a particular purpose. While efforts are made to ensure the accuracy and reliability, Output may include errors, vulnerabilities, and defects. You are responsible for reviewing, testing, and validating Output before use in any production or critical environment. Zed assumes no liability for any damages, losses, or liability arising from the use, modification, reliance on, or deployment of Output. Any such use is at Your own risk.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.4. Improvement Feedback\par} -{\pard \ql \f0 \sa180 \li0 \fi0 When using Zed AI Services to provide Edit Predictions in connection with certain open source software projects, You may elect to share requests, responses and feedback comments (collectively "Model Improvement Feedback") with Zed, and Zed may use the same to improve Zed Edit Predictions models. You may opt-out of sharing Model Improvement Feedback at any time.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 For more information on Zed Edit Predictions please see: {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/ai/ai-improvement"}}{\fldrslt{\ul -https://zed.dev/docs/ai/ai-improvement -}}} -\par} -{\pard \ql \f0 \sa180 \li0 \fi0 When using Zed AI Services in connection with the Agent Panel, You may elect to share with Zed requests, responses and feedback regarding the Agent Panel and related Output (the \u8220"Agent Improvement Feedback\u8221") with Zed, and Zed may use the same to improve the Agent Panel and related Output. Zed will only collect Agent Improvement Feedback when You elect to share the same.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 For more information regarding the Agent Panel please see: {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/ai/ai-improvement"}}{\fldrslt{\ul -https://zed.dev/docs/ai/ai-improvement -}}} -\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.4. Privacy Policy\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You and Zed are bound by the terms and conditions contained in the Zed Privacy Policy which is incorporated by reference hereto. The Zed Privacy Policy is available at the following URL: {\field{\*\fldinst{HYPERLINK "https://zed.dev/privacy-policy"}}{\fldrslt{\ul -https://zed.dev/privacy-policy +{\pard \ql \f0 \sa180 \li0 \fi0 {\b Last Updated}: March 2, 2026\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Welcome, and thank you for your interest in Zed Industries, Inc. (\u8220"{\b Zed},\u8221" \u8220"{\b we},\u8221" or \u8220"{\b us}\u8221") and our website at {\field{\*\fldinst{HYPERLINK "https://www.zed.dev"}}{\fldrslt{\ul +www.zed.dev +}}} +, along with our downloadable Zed software (the \u8220"{\b Software}\u8221") and related subscription service (the \u8220"{\b Service}\u8221"). These Terms of Service are a legally binding contract between you and Zed regarding your use of the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Please read the following Terms carefully.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b By accessing or using the Service, you (\u8220"You\u8221" or \u8220"Customer\u8221")\u160 ?agree to these Terms of Service, the Data Processing Addendum (\u8220"DPA\u8221"), available upon request,\u160 ?and Zed\u8217's {\field{\*\fldinst{HYPERLINK "/privacy-policy"}}{\fldrslt{\ul +Privacy Policy +}}} + (collectively, the \u8220"Terms\u8221").}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 If you are not eligible, or do not agree to the Terms, you may not access or use the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 By using the Service, you confirm that you have read and understand these Terms and that they form a binding agreement between you and Zed.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b ARBITRATION\u160 ?NOTICE}. Except for certain kinds of disputes described in Section\u160 ?15.2 (Dispute Resolution and Arbitration), you agree that disputes arising under these Terms will be resolved by binding, individual arbitration, and BY ACCEPTING THESE TERMS, YOU AND ZED ARE EACH WAIVING THE RIGHT TO A TRIAL BY JURY OR TO PARTICIPATE IN ANY CLASS ACTION OR REPRESENTATIVE PROCEEDING.\u160 ?ALTERNATIVELY, CUSTOMER MAY OPT OUT OF ARBITRATION PER SECTION 17.2(a).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 1. Overview\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Subject to these Terms, Zed will permit Customer to access and use Zed\u8217's AI-enabled software-as-a-service offering (the \u8220"{\b Service}\u8221"), which enables certain additional features and functionality (including artificial intelligence and collaboration features) in Zed\u8217's open source code editing software (\u8220"{\b Software}\u8221").\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 2. Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.1. Eligibility\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer\u160 ?must be at least 18 years old\u160 ?to use the Service. By agreeing to these Terms, Customer represents and warrants to Zed that: (a) Customer is at least 18 years old; (b) Customer has not previously been suspended or removed from the Service; and (c) Customer\u8217's registration and use of the Service is in compliance with any and all applicable laws and regulations. If Customer is an entity, organization, or company, the individual accepting these Terms on Customer\u8217's behalf represents and warrants that they have authority to bind Customer to these Terms and Customer agrees to be bound by these Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.2. Access Grant\par} +{\pard \ql \f0 \sa180 \li0 \fi0 During the Term, subject to Customer\u8217's compliance with the terms of the Terms, Customer may access and use the Service only for Customer\u8217's internal business purposes\u160 ?or for individuals, for personal non-commercial purposes, in accordance with the then-current version of Zed\u8217's usage guidelines and standard technical documentation for the Service that Zed makes generally available to its customers (\u8220"{\b Documentation}\u8221"), the Terms, and any terms set forth in the applicable Subscription Service (as defined in Section 3.4 below). Customer\u160 ?agrees\u160 ?to access the Service only through the mechanisms designated by Zed. Without limiting the foregoing, to access the Service, Customer may be required to associate an existing third-party account with the Service to enable authentication (e.g., via OAuth). Customer will be responsible for the acts and omissions of all persons who access the Service through Customer\u8217's account as though such acts and omissions were Customer\u8217's own. Customer\u160 ?will promptly notify Zed if it becomes aware of any compromise to its Zed account.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.3. Acceptable Use\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service uses technology provided by multiple third party AI subprocessors (the \u8220"AI Providers\u8221") including but not limited to: Anthropic, PBC (\u8220"Anthropic\u8221"), Google LLC (\u8220"Google\u8221"), LiveKit Incorporated, OpenAI, LLC (\u8220"OpenAI\u8221") etc., as may be updated from time to time. Customer may not use the Service in a manner that violates any applicable AI Provider policy which are listed on {\field{\*\fldinst{HYPERLINK "https://zed.dev/acceptable-use-policies"}}{\fldrslt{\ul +https://zed.dev/acceptable-use-policies +}}} +, including Anthropic\u8217's {\field{\*\fldinst{HYPERLINK "https://www.anthropic.com/legal/aup"}}{\fldrslt{\ul +Usage Policy +}}} +, Google Gemini\u8217's {\field{\*\fldinst{HYPERLINK "https://policies.google.com/terms/generative-ai/use-policy"}}{\fldrslt{\ul +Generative AI Prohibited Use Policy +}}} +, GitHub's {\field{\*\fldinst{HYPERLINK "https://docs.github.com/en/site-policy/acceptable-use-policies/github-acceptable-use-policies"}}{\fldrslt{\ul +Acceptable Use Policy +}}} +, LiveKit\u8217's {\field{\*\fldinst{HYPERLINK "https://livekit.io/legal/acceptable-use-policy"}}{\fldrslt{\ul +Acceptable Use Policy +}}} +; OpenAI\u8217's {\field{\*\fldinst{HYPERLINK "https://openai.com/policies/usage-policies/"}}{\fldrslt{\ul +Usage Policies +}}} +\u160 ?or {\field{\*\fldinst{HYPERLINK "https://openai.com/api/policies/sharing-publication/"}}{\fldrslt{\ul +Sharing and Publication Policy +}}} +; and {\field{\*\fldinst{HYPERLINK "https://openai.com/api/policies/community-guidelines/"}}{\fldrslt{\ul +Community Guidelines +}}} +; each of which may be updated from time to time and are expressly incorporated by reference. Customer\u160 ?is solely responsible to check for updates to the applicable AI Provider policy from time to time.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.4. Restrictions\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer will not (and will not permit anyone else to), directly or indirectly, do any of the following: (a) provide access to, distribute, sell, or sublicense the Service to a third party; (b)\u160 ?seek to access non-public APIs associated with the Service; (c) copy any element of the Service; (d) interfere with the operation of the Service, circumvent any access restrictions, or conduct any security or vulnerability test of the Service; (e) transmit any viruses or other harmful materials to the Service or others;\u160 ?(f) take any action that risks harm to others or to the security, availability, or integrity of the Service except for the purposes of legitimate security or malware research; or (g) access or use the Service or Output in a manner that violates any applicable relevant local, state, federal or international laws, regulations, or conventions, including those related to data privacy or data transfer, international communications, or export of data (collectively, \u8220"{\b Laws}\u8221"), or the Terms.\u160 ?The Service incorporates functionality provided by third-party services, the use of which is subject to additional terms. Customer agrees that if Customer accesses or uses services, features or functionality in the Software or Service that are provided by a third party, Customer will comply with any applicable terms promulgated by that third party, including as set forth at {\field{\*\fldinst{HYPERLINK "/acceptable-use-policies"}}{\fldrslt{\ul +https://zed.dev/acceptable-use-policies +}}} +\u160 ?(as may be updated from time to time). Customer further acknowledges that certain components of the Software or Service may be covered by open source licenses ("{\b Open Source Component}"), including but not limited to Apache License, Version 2.0, GNU General Public License v3.0, and the GNU Affero General Public License v3.0.\u160 ?To the extent required by such open source license for the applicable Open Source Component, the terms of such license will apply to such Open Source Component in lieu of the relevant provisions of these Terms. If such open\u160 ?source license prohibits any of the restrictions in these Terms, such restrictions will not apply to such Open Source Component. Zed shall provide Customer with a list of Open Source Components upon Customer's request.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 3. General Payment Terms\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Accessing certain features and tiers of the Service requires Customer\u160 ?to pay fees. Before Customer pays any fees, Customer will have an opportunity to review and accept the fees that Customer will be charged. Unless otherwise specifically provided for in these Terms, all fees are in U.S. Dollars and are non-refundable, except as required by law.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.1. Price\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed reserves the right to determine pricing for the Service. Zed will make reasonable efforts to keep pricing information published on our pricing page at {\field{\*\fldinst{HYPERLINK "https://zed.dev/pricing"}}{\fldrslt{\ul +https://zed.dev/pricing +}}} +\u160 ?up to date. Zed encourages Customer\u160 ?to check Zed\u8217's pricing page periodically for current pricing information. Zed may change the fees for any feature of the Service, including by adding fees or charges, if Zed gives Customer advance notice of changes before they apply.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.2. Taxes\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer is responsible for any sales, use, GST, value-added, withholding, or similar taxes or levies that apply to Orders, whether domestic or foreign, other than Zed\u8217's income tax (\u8220"{\b Taxes}\u8221"). Fees are exclusive of all Taxes. If Customer is compelled to make a deduction or set-off for any such Taxes, Customer will pay Zed such additional amounts as necessary to ensure receipt by Zed of the full amount Zed would have received but for the deduction.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.3. Authorization\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer authorizes Zed to charge all sums for the orders that Customer makes, the level of Service that Customer selects, and Customer\u8217's submission of prompts or other Customer Data (defined below) to the Service to generate Output (defined below) as described in these Terms or published by Zed, including all applicable taxes, to the payment method specified in Customer\u8217's account. If Customer pays any fees with a credit card, then Zed may seek pre-authorization of Customer\u8217's credit card account prior to Customer\u8217's purchase to verify that the credit card is valid and has the necessary funds or credit available to cover Customer\u8217's purchase.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.4. Subscription Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service may include certain subscription-based plans with automatically recurring payments for periodic charges ("{\b Subscription Service}"). The "{\b Subscription Billing Date}" is the date when Customer purchases its first subscription to the Service. The Subscription Service will begin on the Subscription Billing Date and continue for the subscription period that Customer selects on its account (such period, the "{\b Initial Subscription Period}"), and will automatically renew for successive periods of the same duration as the Initial Subscription Period (the Initial Subscription Period and each such renewal period, each a "{\b Subscription Period}") unless Customer cancels the Subscription Service or Zed terminates it. If Customer activates a Subscription Service, then Customer authorizes Zed or its third-party payment processors to periodically charge, on a going-forward basis and until cancellation of the Subscription Service, all accrued sums on or before the payment due date. For information on the "Subscription Fee", please see Zed\u8217's pricing page at {\field{\*\fldinst{HYPERLINK "https://zed.dev/pricing"}}{\fldrslt{\ul +https://zed.dev/pricing +}}} +. Customer\u8217's\u160 ?account will be charged automatically on the Subscription Billing Date and thereafter on the renewal date of its Subscription Service for all applicable fees and taxes for the next Subscription Period. Customer must cancel its Subscription Service before it renews in order to avoid billing of the next periodic Subscription Fee to Customer\u8217's account. Zed or its third-party payment processor will bill the periodic Subscription Fee to the payment method associated with Customer\u8217's account or that Customer otherwise provides to Zed. Customer\u160 ?may cancel the Subscription Service from the account page at https://zed.dev/account or by contacting us at {\field{\*\fldinst{HYPERLINK "mailto:billing-support@zed.dev"}}{\fldrslt{\ul +billing-support@zed.dev +}}} +. {\b YOUR CANCELLATION MUST BE RECEIVED BEFORE THE RENEWAL DATE IN ORDER TO AVOID BEING CHARGED FOR THE NEXT SUBSCRIPTION PERIOD.}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.5. Consumption Fees\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer\u8217's subscription to the Service may permit Customer to submit prompts or other Customer Data for the purpose of generating Output, at no additional charge for a certain number of times each month. If Customer elects to submit a volume of prompts in excess of the quantity included in its Subscription Fee, then Customer authorizes Zed to charge, and Customer will be charged, a fee for each additional prompt at the rates set forth at {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/ai/models"}}{\fldrslt{\ul +https://zed.dev/docs/ai/models +}}} +.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.6. Delinquent Accounts\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may suspend or terminate access to the Service, including fee-based portions of the Service, for any account for which any amount is due but unpaid. In addition to the amount due for the Service, a delinquent account will be charged with fees or charges that are incidental to any chargeback or collection of any unpaid amount, including collection fees. If your payment method is no longer valid at the time a renewal Subscription Fee is due, then Zed reserves the right to delete your account and any information or Customer Data associated with your account without any liability to Customer.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 4. Data\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.1. Zed's Use of Customer Data\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer hereby grants Zed a non-exclusive, worldwide, royalty-free, fully paid-up, non-sublicensable (except to service providers and Customer\u8217's designees), non-transferable (except as set forth in Section 15.1) right to use, copy, store, disclose, transmit, transfer, display, modify, create derivative works from, collect, access, store, host, or otherwise process (\u8220"{\b Process}\u8221") any materials that Customer inputs into or otherwise makes available to the Service (including prompts and other written content) (collectively, \u8220"{\b Customer Data}\u8221") solely: (a) to perform its obligations set forth in the Terms, including its Support obligations as applicable; (b) to derive and generate Telemetry (see Section 4.4); and (c) as necessary to comply with applicable Laws. Except as required by applicable Laws, Zed will not provide Customer Data to any person or entity other than Customer\u8217's designees (including pursuant to Section 7) or service providers. In the event that autocomplete suggestions are turned on, Customer understands and agrees that the Service will periodically send Customer Data in the background to an AI Provider for the purpose of generating autocomplete input suggestions in the Services. Autocomplete features can be turned off at any time, in which case Customer Data will not be sent.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.2. Customer's Ownership of Output\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service may generate specifically for, and make available to, Customer text and written content based on or in response to Customer Data input into the Service (collectively, \u8220"{\b Output}\u8221"), including through the use of technologies that incorporate or rely upon artificial intelligence, machine learning techniques, and other similar technology and features. As between the Parties, to the greatest extent permitted by applicable Laws, Customer owns all Output and Zed hereby irrevocably assigns to Customer all right, title, and interest in and to the Output that Zed may possess. {\b For the avoidance of doubt, Zed and its AI Providers will not retain or use Customer Data for the purpose of improving or training the Service or any AI Provider products, except to the extent Customer explicitly opts-in on Zed\u8217's specific feature to allow training and/or such improvement (such as fine-tuning) and is solely for the benefit of Customer.}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.3. Zed's Collection of Output Rating\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service may enable Customer, at its option, to rate or otherwise provide feedback with respect to Output generated through the Service. If Customer opts in to provide feedback concerning Output using the features of the Software or Service (e.g., by clicking an Output rating button), then Customer agrees that Zed may Process that Output and associated Customer Data for the purpose of product development and improvement (\u8220"Output Rating\u8221"). For clarity, Customer\u8217's decision to opt in to provide Output Rating is specific to the corresponding Output. Your decision to provide Output Rating with respect to one instance of Output does not give Zed the right to use any other Output for Output Rating purposes.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.4. Telemetry\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may collect, generate, and Process\u160 ?information, including technical logs, metrics, and data and learnings, related to the Software and Service (\u8220"{\b Telemetry}\u8221") to improve and support the Services and for other lawful business purposes. Customer\u160 ?may configure the Software to opt out of the collection of certain Telemetry Processed\u160 ?locally by the Software itself, but Zed may still collect, generate, and Process Telemetry on Zed\u8217's servers. Zed may not disclose Telemetry to any third-party other than Zed\u8217's Representatives unless it is de-identified so that it does not identify Customer as the source thereof and is aggregated with data across other customers. {\b For avoidance of doubt, Telemetry expressly does not include Customer Data.}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 5. Customer Obligations\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer is responsible for its Customer Data and will comply with applicable Laws when using the Service. Customer represents and warrants that it has obtained all rights, consents, and permissions necessary for Zed to Process Customer Data and exercise the rights granted to it in the Terms without violating or infringing Laws or third-party rights. Customer Data shall not contain: (a) any \u8220"protected health information\u8221" or \u8220"PHI\u8221" as defined under HIPAA (including 45 C.F.R. Parts 160 and 164); or (b) any payment card or cardholder data subject to PCI DSS (including primary account numbers, full track or chip data, CVV/CVC codes, PINs, or similar payment card security data). Customer is solely responsible for ensuring compliance with this restriction and shall be liable for, and shall indemnify Zed against, any claims, fines, or penalties arising from Customer\u8217's breach of this Section. Zed disclaims any and all liability in connection with Customer Data.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 6. Suspension of Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may immediately suspend Customer\u8217's access to any or all of the Service if: (a) Customer breaches Section 2.2 - 2.4 or Section 5; (b)\u160 ?any payments required under the Terms are overdue by 30 days or more; (c)\u160 ?changes to Laws or new Laws require that Zed suspend the Service or otherwise may impose additional liability on Zed in connection with its provision of the Service to Customer; or (d) Customer\u8217's breach of the Terms risks harm to any of Zed\u8217's other customers or the security, availability, or integrity of the Service or other services and entities. Where practicable, Zed will use reasonable efforts to provide Customer with prior notice of the suspension (email sufficing). If the issue that led to the suspension is resolved, Zed will restore Customer\u8217's access to the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 7. Data Sharing and Third-Party Integrations\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 7.1. Collaboration Services\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Certain features of the Service may allow Customer to share data between accounts on the Service, including accounts controlled by persons and entities not associated with Customer (\u8220"{\b Collaboration Features}\u8221"). If Customer elects to use Collaboration Features, Customer acknowledges and agrees that Zed will, and authorizes Zed to, make available Customer Data consisting of file paths, file contents, and metadata regarding the code returned by language servers to the third parties designated by Customer, and that Zed exercises no control over, and has no liability for, the acts or omissions of such third parties (including in connection with the Customer Data).\u160 ?Currently, with the exception of the Channel notes feature, Zed does not persist any shared Customer Data beyond the designated Collaboration Feature session.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 7.2. Third-Party Integrations\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service may support integration with third-party platforms, add-ons, services, or products not provided by Zed (\u8220"{\b Third-Party Platforms}\u8221"). Use of any Third-Party Platforms integrated with or made available through the Service is subject to Customer\u8217's agreement with the relevant provider and not these Terms. Zed does not control and has no liability for Third-Party Platforms, including their security, functionality, operation, availability, or interoperability with the Service. By enabling a Third-Party Platform to interact with the Service, Customer authorizes Zed to access and exchange Customer Data with such Third-Party Platform on Customer\u8217's behalf.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 8. Disclaimers; No Warranties by Zed\par} +{\pard \ql \f0 \sa180 \li0 \fi0 THE SOFTWARE, SERVICE, OUTPUT, AND ALL OTHER ZED SERVICES ARE PROVIDED \u8220"AS IS\u8221" AND \u8220"AS AVAILABLE\u8221". ZED, ON ITS OWN BEHALF AND ON BEHALF OF ITS SUPPLIERS AND LICENSORS, MAKES NO OTHER WARRANTIES, WHETHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, INCLUDING WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE, OR NONINFRINGEMENT. ZED DOES NOT WARRANT THAT CUSTOMER\u8217'S\u160 ?USE OF THE SOFTWARE OR SERVICE WILL BE UNINTERRUPTED OR ERROR-FREE OR THAT IT WILL MAINTAIN CUSTOMER DATA WITHOUT LOSS. ZED IS NOT LIABLE FOR DELAYS, FAILURES, OR PROBLEMS INHERENT IN USE OF THE INTERNET AND ELECTRONIC COMMUNICATIONS OR OTHER SYSTEMS OUTSIDE OF ZED\u8217'S CONTROL.\u160 ?ZED IS\u160 ?NOT RESPONSIBLE FOR ANY DAMAGE THAT MAY RESULT FROM THE SOFTWARE OR SERVICE OR OUTPUT OR CUSTOMER\u8217'S\u160 ?DEALING WITH ANY OTHER SERVICE USER. Without limiting the foregoing, Customer\u160 ?acknowledges\u160 ?and agrees\u160 ?that: (a) the Service may produce inaccurate or erroneous Output; (b) Customer is\u160 ?responsible for independently evaluating the Output and any other information Customer\u160 ?receives\u160 ?from the Service; and (c) due to the nature of the Service and artificial intelligence technologies generally, Output may not be unique and other users of the Service may receive output from the Service that is similar or identical to the Output (and, notwithstanding anything to the contrary, such similar or identical output will not be understood to be Output).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS IN THIS SECTION\u160 ?8 (DISCLAIMERS; NO WARRANTIES BY\u160 ?ZED)\u160 ?APPLY TO THE FULLEST EXTENT PERMITTED BY LAW. Zed does not disclaim any warranty or other right that Zed is prohibited from disclaiming under applicable law.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 9. Term, Termination, and Modification of the Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 9.1. Term\par} +{\pard \ql \f0 \sa180 \li0 \fi0 These Terms are effective beginning when Customer\u160 ?accepts\u160 ?the Terms or first downloads, installs, accesses, or uses\u160 ?the Service, and ending when terminated as described in Section 9.2 (Termination).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 9.2. Termination\par} +{\pard \ql \f0 \sa180 \li0 \fi0 If Customer\u160 ?violates\u160 ?any provision of these Terms, then Customer is\u160 ?not authorized to access the Service and these Terms automatically terminate. In addition, Zed may, at its sole discretion, terminate these Terms or Customer\u8217's\u160 ?account on the Service, or suspend or terminate Customer\u8217's\u160 ?access to the Service, at any time for any reason or no reason, with or without notice, and without any liability to Customer\u160 ?arising from such termination. Customer\u160 ?may terminate its\u160 ?account and these Terms at any time by contacting Zed\u160 ?at {\field{\*\fldinst{HYPERLINK "mailto:hi@zed.dev"}}{\fldrslt{\ul +hi@zed.dev +}}} +.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 9.3. Effect of Termination\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Upon termination of these Terms: a) Customer\u8217's\u160 ?license to access and use the Service will terminate and Customer\u160 ?must immediately cease all use of the Service; b) Customer\u160 ?will no longer be authorized to access its\u160 ?account or the Service; c) Customer\u160 ?must pay Zed any unpaid amount that was due prior to termination; and d) all payment obligations accrued prior to termination and Section(s)\u160 ?2.4 (Restrictions), 3 (General Payment Terms) with the exception of 3.4 (Subscription Service), 4.2\u160 ?(Customer\u8217's Ownership of Output), 4.4 (Telemetry), 8 (Disclaimers; No Warranties by Zed), 9.3 (Effect of Termination), 10 (Ownership; Feedback), 11 (Limitations of Liability), 12 (Indemnity), 15\u160 ?(Governing Law, Dispute Resolution and Arbitration); and 16 (General Terms), will survive. If Customer\u8217's account has been terminated for a breach of these Terms, then Customer is\u160 ?prohibited from creating a new account on the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 9.4. Modification of the Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed reserves the right to modify or discontinue all or any portion of the Service at any time (including by limiting or discontinuing certain features of the Service), temporarily or permanently, without notice to Customer. Zed will have no liability to Customer\u160 ?for any change to the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 10. Ownership; Feedback\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Neither Party grants the other Party any rights or licenses not expressly set out in the Terms. Except as expressly provided in the Terms, as between the Parties, Customer retains all intellectual property rights and other rights in and to the Customer Data and Output. Except for the rights and licenses granted in the Terms, Zed and its licensors retain all intellectual property rights in and to the Service and Software. To the extent Customer provides Zed with feedback (including suggestions and comments for enhancements or new functionality) regarding the Service or Software, Output, or Zed\u8217's products, services, or other technology (\u8220"{\b Feedback}\u8221"), Zed has the full and unrestricted right (but no obligation) to use or incorporate Feedback in any manner, including to improve and develop any of its products, services, technology, or other materials without attribution to Customer.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 11. Limitations of Liability\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.1.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 TO THE FULLEST EXTENT PERMITTED BY LAW, IN NO EVENT WILL THE ZED ENTITIES BE LIABLE TO CUSTOMER FOR ANY INDIRECT, INCIDENTAL, SPECIAL, CONSEQUENTIAL, OR PUNITIVE DAMAGES (INCLUDING DAMAGES FOR LOSS OF PROFITS, GOODWILL, OR ANY OTHER INTANGIBLE LOSS) ARISING OUT OF OR RELATING TO YOUR ACCESS TO OR USE OF, OR YOUR INABILITY TO ACCESS OR USE, THE SERVICE OR ANY MATERIALS OR CONTENT ON THE SERVICE, WHETHER BASED ON WARRANTY, CONTRACT, TORT (INCLUDING NEGLIGENCE), STATUTE, OR ANY OTHER LEGAL THEORY, AND WHETHER OR NOT ANY ZED ENTITY HAS BEEN INFORMED OF THE POSSIBILITY OF DAMAGE.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.2.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 TO THE FULLEST EXTENT PERMITTED BY LAW, THE AGGREGATE LIABILITY OF THE ZED ENTITIES TO CUSTOMER FOR ALL CLAIMS ARISING OUT OF OR RELATING TO THE USE OF OR ANY INABILITY TO USE ANY PORTION OF THE SERVICE, OR OTHERWISE ARISING UNDER THESE TERMS, WHETHER IN CONTRACT, TORT, OR OTHERWISE, IS LIMITED TO THE GREATER OF: \u160 ?THE AMOUNT CUSTOMER HAS PAID TO ZED FOR ACCESS TO AND USE OF THE SERVICE IN THE 12 MONTHS PRIOR TO THE EVENT OR CIRCUMSTANCE GIVING RISE TO THE CLAIM OR US$100.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.3.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 EACH PROVISION OF THESE TERMS THAT PROVIDES FOR A LIMITATION OF LIABILITY, DISCLAIMER OF WARRANTIES, OR EXCLUSION OF DAMAGES IS INTENDED TO AND DOES ALLOCATE THE RISKS BETWEEN THE PARTIES UNDER THESE TERMS. THIS ALLOCATION IS AN ESSENTIAL ELEMENT OF THE BASIS OF THE BARGAIN BETWEEN THE PARTIES. EACH OF THESE PROVISIONS IS SEVERABLE AND INDEPENDENT OF ALL OTHER PROVISIONS OF THESE TERMS. THE LIMITATIONS IN THIS SECTION\u160 ?11 (LIMITATION OF LIABILITY) WILL APPLY EVEN IF ANY LIMITED REMEDY FAILS OF ITS ESSENTIAL PURPOSE.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 12. Indemnity\par} +{\pard \ql \f0 \sa180 \li0 \fi0 To the fullest extent permitted by law, Customer is responsible for its use of the Service, and Customer will defend and indemnify Zed, its affiliates, and their respective shareholders, directors, managers, members, officers, employees, consultants, and agents (together, the "Zed Entities") from and against every claim brought by a third party, and any related liability, damage, loss, and expense, including attorneys' fees and costs, arising out of or connected with: (1) Customer\u8217's unauthorized use of, or misuse of, the Service; (2) the Customer Data; (3) Customer\u8217's use of Output; (4) Customer\u8217's violation or alleged violation of any portion of these Terms, any representation, warranty, or agreement referenced in these Terms, or any applicable law or regulation; (5) Customer\u8217's violation or alleged violation of any third-party right, including any intellectual property right or publicity, confidentiality, other property, or privacy right; or (6) any dispute or issue between Customer and any third party. Zed reserves the right, at Zed\u8217's own expense, to assume the exclusive defense and control of any matter otherwise subject to indemnification by Customer (without limiting Customer\u8217's indemnification obligations with respect to that matter), and in that case, Customer agrees to cooperate with our defense of those claims.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 13. Confidentiality\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.1. Definition\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \u8220"{\b Confidential Information}\u8221" means information disclosed to the receiving Party (\u8220"{\b Recipient}\u8221") under the Terms that is designated by the disclosing Party (\u8220"{\b Discloser}\u8221") as proprietary or confidential or that should be reasonably understood to be proprietary or confidential due to its nature and the circumstances of its disclosure. Zed\u8217's Confidential\u160 ?Information\u160 ?includes the terms and conditions of the Terms and the Service (including any technical or performance information about the Service).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.2. Obligations\par} +{\pard \ql \f0 \sa180 \li0 \fi0 As Recipient, each Party will: (a) hold Confidential Information in confidence and not disclose it to third parties except as permitted in the Terms, including Section 4.1; and (b) only use Confidential Information to fulfill its obligations and exercise its rights under the Terms. Recipient may disclose Confidential Information to its employees, agents, contractors, and other representatives having a legitimate need to know (including, for Zed, the subcontractors referenced in Section 16.5) (\u8220"{\b Representatives}\u8221"), provided Recipient remains responsible for its respective Representatives\u8217' compliance with this Section\u160 ?13 and such Representatives are bound by written agreements (or, in the case of professional advisers like attorneys and accountants, ethical duties) imposing confidentiality and non-use obligations no less protective than this Section 13.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.3. Exclusions\par} +{\pard \ql \f0 \sa180 \li0 \fi0 These confidentiality obligations do not apply to information that Recipient can document: (a) is or becomes public knowledge through no fault of Recipient or its Representatives; (b) it rightfully knew or possessed prior to receipt under the Terms; (c) it rightfully received from a third party without breach of confidentiality obligations; or (d) it independently developed without using Confidential Information.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.4. Remedies\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Unauthorized use or disclosure of Confidential\u160 ?Information\u160 ?may cause substantial harm for which damages alone are an insufficient remedy. Discloser\u160 ?may seek appropriate equitable relief, in addition to other available remedies, for breach or threatened breach of this Section 13, without the \u160 ?necessity\u160 ?of posting a bond or proving actual damages.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.5. Required Disclosures\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Nothing in the Terms prohibits Recipient from making disclosures, including of Customer Data and other Confidential Information, if required by Laws, subpoena, or court order, provided (if permitted by Laws) it notifies Discloser in advance and cooperates in any effort to obtain confidential treatment.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 14. Publicity\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Neither Party may publicly announce that the Parties have entered into the Terms, except with the other Party\u8217's prior consent or as required by Laws. However, Zed may use the name, brand, or logo of Customer (or Customer\u8217's parent company) for the purpose of identifying Customer as a licensee or customer on Zed\u8217's website or in other promotional materials. Zed will cease further use at Customer\u8217's written request.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 15. Governing Law, Dispute Resolution and Arbitration\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 15.1. Governing Law, Jurisdiction and Venue\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Terms are governed by the laws of the State of Delaware and the United States without regard to conflicts of laws provisions that would result in the application of the laws of another jurisdiction and without regard to the United Nations Convention on the International Sale of Goods. The parties further agree that except as stated below in the Arbitration provision, and for any claims under Section 15.2 (b), each party irrevocably consents to the exclusive jurisdiction and venue of the state and federal courts located in New Castle County, Delaware, for any action arising out of or relating to these Terms, and waive any objection based on venue or forum non conveniens. ANY CAUSE OF ACTION OR CLAIM CUSTOMER MAY HAVE ARISING OUT OF OR RELATING TO THESE TERMS MUST BE COMMENCED WITHIN ONE (1) YEAR AFTER THE CAUSE OF ACTION OR CLAIM ACCRUES, OTHERWISE, SUCH CAUSE OF ACTION OR CLAIM IS PERMANENTLY BARRED.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 15.2. Dispute Resolution and Arbitration\par} +{\pard \ql \f0 \sa180 \li0 \fi0 ANY CONTROVERSY OR CLAIM ARISING OUT OF OR RELATING TO THESE TERMS, OR THE BREACH THEREOF, SHALL BE SETTLED BY ARBITRATION AND JUDGMENT ON THE AWARD RENDERED BY THE ARBITRATOR MAY BE ENTERED IN ANY COURT HAVING JURISDICTION THEREOF. IF THERE IS A DISPUTE ABOUT WHETHER THIS ARBITRATION AGREEMENT CAN BE ENFORCED OR APPLIES TO THE DISPUTE, CUSTOMER AND ZED AGREE THAT THE ARBITRATOR WILL DECIDE THAT ISSUE.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b a. Opt-Out.} If Customer does not wish to resolve disputes by binding arbitration, Customer may opt out of the provisions of this Section 17.2 (Dispute Resolution and Arbitration) within 30 days after the date that Customer agrees to these Terms by sending an email to {\field{\*\fldinst{HYPERLINK "mailto:arbitration-opt-out@zed.dev"}}{\fldrslt{\ul +arbitration-opt-out@zed.dev +}}} +\u160 ?or a letter to Zed Industries, Inc., Attention: Legal Department \u8211- Arbitration Opt-Out, 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 that specifies: Customer\u8217's full legal name, the email address associated with Customer\u8217's account on the Service, and a statement that Customer wishes to opt out of arbitration (\u8220"{\b Opt-Out Notice}\u8221"). Once Zed receives Customer\u8217's Opt-Out Notice, this Section 15.2 (Dispute Resolution and Arbitration) will be void and any action arising out of these Terms will be resolved as set forth in Section 15.1 (Governing Law). The remaining provisions of these Terms will not be affected by Customer\u8217's Opt-Out Notice.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b b. Pre-Arbitration Dispute Resolution and Notification.} Prior to initiating an arbitration, Customer and Zed each agree to notify the other party of the dispute and attempt to negotiate an informal resolution to it first. Zed will contact Customer at the email address Customer has provided to Zed; Customer can contact Zed by email at\u160 ?{\field{\*\fldinst{HYPERLINK "mailto:legal@zed.dev"}}{\fldrslt{\ul +legal@zed.dev +}}} +. If after a good faith effort to negotiate, one party feels the dispute has not and cannot be resolved informally, the party intending to pursue arbitration agrees to notify the other party via email prior to initiating the arbitration.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b c. Exceptions to Arbitration.} Customer and Zed each agree that the following claims are exceptions to arbitration and will be brought in a judicial proceeding in a court of competent jurisdiction: (i) Any claim related to actual or threatened infringement, misappropriation or violation of a party\u8217's copyrights, trademarks, trade secrets, patents, or other intellectual property rights; or (ii) Any claim seeking emergency injunctive relief based on exigent circumstances (e.g., imminent danger or commission of a crime, hacking, cyber-attack).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b d. Arbitration Rules.} (1) If Customer is domiciled in the U.S. - Any controversy or claim arising out of or relating to this contract, or the breach thereof, shall be settled by arbitration administered by the American Arbitration Association in accordance with its Commercial Arbitration Rules, and judgment on the award rendered by the arbitrator may be entered in any court having jurisdiction thereof.\u160 ?(2)\u160 ?If Customer is domiciled internationally outside the U.S. - Any controversy or claim arising out of or relating to this contract, or the breach thereof, shall be determined by arbitration administered by the International Centre for Dispute Resolution in accordance with its International Arbitration Rules.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b e. Modification to AAA Rules - Arbitration Hearing/Location.} Customer agrees that any required arbitration hearing will be conducted in the English language by one (1) mutually agreed upon arbitrator, (a) in city/county and state of Customer\u8217's headquarters unless both parties agree otherwise; and appearances may be made via telephonic or video hearing; and (b) for any claim or counterclaim under $25,000, by solely the submission of documents to the arbitrator.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 15.3. Waiver of Jury Trial and Class Action Waiver\par} +{\pard \ql \f0 \sa180 \li0 \fi0 EACH PARTY HEREBY IRREVOCABLY WAIVES ALL RIGHT TO TRIAL BY JURY IN ANY ACTION, SUIT, PROCEEDING, CLAIM, OR COUNTERCLAIM ARISING OUT OF OR RELATING TO THESE TERMS. CUSTOMER AND ZED EACH AGREE THAT ANY SUIT, PROCEEDING, OR OTHER ACTION ARISING OUT OF OR RELATED TO THESE TERMS WILL BE CONDUCTED ONLY ON AN INDIVIDUAL BASIS AND NOT IN A CLASS, CONSOLIDATED OR REPRESENTATIVE ACTION.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 16. General Terms\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.1.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 These Terms, including the Privacy Policy and any other agreements expressly incorporated by reference into these Terms, are the entire and exclusive understanding and agreement between Customer and Zed regarding your use of the Service. Customer\u160 ?may not assign or transfer these Terms or its rights under these Terms, in whole or in part, by operation of law or otherwise, without Zed\u8217's prior written consent. Zed may assign these Terms and all rights granted under these Terms at any time without notice or consent. The failure to require performance of any provision will not affect Zed\u8217's right to require performance at any other time after that, nor will a waiver by Zed of any breach or default of these Terms, or any provision of these Terms, be a waiver of any subsequent breach or default or a waiver of the provision itself. Use of Section\u160 ?headers in these Terms are for convenience only and will not have any impact on the interpretation of any provision. Throughout these Terms the use of the word \u8220"including\u8221" means \u8220"including but not limited to.\u8221" If any part of these Terms are held to be invalid or unenforceable, then the unenforceable part will be given effect to the greatest extent possible, and the remaining parts will remain in full force and effect.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.2. Notices\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Except as set out in the Terms, any notice or consent under the Terms must be in writing to the Customer email address on the Order and Customer shall send all notices to Zed at Zed Industries, Inc., 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 with cc: to {\field{\*\fldinst{HYPERLINK "mailto:legal@zed.dev"}}{\fldrslt{\ul +legal@zed.dev +}}} +\u160 ?and will be deemed given: (a) upon receipt if by personal delivery; (b) upon receipt if by certified or registered U.S. mail (return receipt requested); or (c) one day after dispatch if by a commercial overnight delivery service. Either Party may update its address with notice to the other Party pursuant to this Section. Zed may also send operational notices to Customer\u160 ?by email or through the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.3. DPA\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The terms of the Data Processing Agreement (\u8220"{\b DPA}\u8221"), available upon request, are incorporated into these Terms by reference.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.4. Modification of Terms\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may, from time to time, change these Terms. Please check these Terms periodically for changes. Revisions will be effective immediately except that, for existing users, material revisions will be effective 30 days after posting or notice to Customer of the revisions unless otherwise stated. Zed may require that Customer\u160 ?accept modified Terms in order to continue to use the Service. If Customer does not agree to the modified Terms, then Customer should discontinue its use of the Service and notify Zed at hi@zed.dev, in which case Zed will provide a pro-rated refund of any prepaid Subscription Fee. The terms in any Customer purchase order or business form will not amend or modify the Terms and are expressly rejected by Zed; any of these Customer documents are for administrative purposes only and have no legal effect with respect to the Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.5. Subcontractors\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may use subcontractors and permit them to exercise Zed\u8217's rights, but Zed remains responsible for their compliance with the Terms and for its overall performance under the Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.6. Independent Contractors\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Parties are independent contractors, not agents, partners, or joint venturers.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.7. Export\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer\u160 ?will comply with all relevant U.S. and foreign export and import Laws in using the Service. Customer: (a) represents and warrants that it is not listed on any U.S. government list of prohibited or restricted parties or located in (or a national of) a country that is subject to a U.S. government embargo or that has been designated by the U.S. government as a \u8220"terrorist supporting\u8221" country; (b) agrees not to access or use the Service in violation of any U.S. export embargo, prohibition, or restriction; and (c) will not submit to the Service any information controlled under the U.S. International Traffic in Arms Regulations.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.8. Government End-Users\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Elements of the Service may include commercial computer software. If the user or licensee of the Service is an agency, department, or other entity of the United States Government, the use, duplication, reproduction, release, modification, disclosure, or transfer of the Service or any related documentation of any kind, including technical data and manuals, is restricted by the terms of the Terms in accordance with Federal Acquisition Regulation 12.212 for civilian purposes and Defense Federal Acquisition Regulation Supplement 227.7202 for military purposes. The Service was developed fully at private expense. All other use is prohibited.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.9. Privacy Policy\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Please read the {\field{\*\fldinst{HYPERLINK "/privacy-policy"}}{\fldrslt{\ul +Zed Privacy Policy +}}} + (the \u8220"{\b Privacy Policy}\u8221") carefully for information relating to our collection, use, storage, and disclosure of your personal information. The Zed Privacy Policy is incorporated by this reference into, and made a part of, these Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.10. Additional Terms\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer\u8217's use of the Service is subject to all additional terms, policies, rules, or guidelines applicable to the Service or certain features of the Service that we may post on or link to from the Service (the \u8220"{\b Additional Terms}\u8221"). All Additional Terms are incorporated by this reference into, and made a part of, these Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.11. Consent to Electronic Communications\par} +{\pard \ql \f0 \sa180 \li0 \fi0 By using the Service, Customer consents to receiving certain electronic communications from Zed as further described in the Privacy Policy. Please read the Privacy Policy to learn more about Zed\u8217's electronic communications practices. Customer agrees that any notices, agreements, disclosures, or other communications that Zed sends to Customer electronically will satisfy any legal communication requirements, including that those communications be in writing. Zed may send Customer emails concerning Zed products and services, as well as those of third parties. Customer\u160 ?may opt out of promotional emails by following the unsubscribe instructions in the promotional email itself.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.12. Contact Information\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service is offered by Zed Industries, Inc. Customer\u160 ?may contact Zed by sending correspondence to 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 with cc: to {\field{\*\fldinst{HYPERLINK "mailto:legal@zed.dev"}}{\fldrslt{\ul +legal@zed.dev }}} .\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 4. FEE BASED SERVICES, FEES AND PAYMENT TERMS\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.1. Fee Based Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 The Zed AI Services is made available with additional usage benefits (the \u8220"Enhanced Use \u8221") as described in the table published at {\field{\*\fldinst{HYPERLINK "https://zed.dev/pricing"}}{\fldrslt{\ul -zed.dev/pricing -}}} - (the \u8220"Pricing Table\u8221"), subject to the requirements and limitations set forth in the Pricing Table and these Terms. In order to make use of the Enhanced Use, Customer must access the Zed AI Services through a Zed registered account.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.2. Fees\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Customer shall pay to Zed the applicable fees set forth in Pricing Table, together with any applicable taxes and shipping and handling (collectively, the \u8220"Fees\u8221"). Customer shall have no right of return, and all Fees shall be non-refundable.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.3. Payment Terms\par} -{\pard \ql \f0 \sa180 \li0 \fi0 All amounts payable to Zed under this Agreement shall be paid in United States dollars and paid Zed according to the method of payment, frequency and calculated as set forth in the Pricing Table.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.4. Taxes; Set-offs\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Any and all payments made by Customer in accordance with this Agreement are exclusive of any taxes that might be assessed by any jurisdiction. Customer shall pay or reimburse Zed for all sales, use, property and similar taxes; all customs duties, import fees, stamp duties, license fees and similar charges; and all other mandatory payments to government agencies of whatever kind, except taxes imposed on the net or gross income of Zed. All amounts payable to Zed under this Agreement shall be without set-off and without deduction of any taxes, levies, imposts, charges, withholdings and/or duties of any nature which may be levied or imposed, including without limitation, value added tax, customs duty and withholding tax.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 5. TERM AND TERMINATION\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 5.1. Term\par} -{\pard \ql \f0 \sa180 \li0 \fi0 The term of this Agreement shall commence on the date You first download the Editor or use the Zed Service (the "Effective Date"), and unless terminated earlier according to this Section 3, will end pursuant to this Section 5 (the "Term").\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 5.2. Termination\par} -{\pard \ql \f0 \sa180 \li0 \fi0 This Agreement may be terminated: (a) by either party if the other has materially breached this Agreement; or (b) by Zed at any time and for any reason upon notice to Customer. You acknowledge that Zed is under no obligation to continue to operate the Zed Service or make the Editor available, and We may end any programs in connection with the same at any time.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 5.3. Effect of Termination and Survival\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Upon any expiration or termination of this Agreement, Customer shall (i) immediately cease use of the Zed Service, and (ii) return all Zed Confidential Information and other materials provided by Zed. The following provisions will survive termination of this Agreement: Sections 3.3 (Customer Data), Section 3.4 (Privacy Policy), Section 5.3 (Effect of Termination and Survival), Section 6 (Ownership), Section 7 (Indemnification), Section 9 (Limitation of Liability), Section 10 (Third Party Services), and Section 11 (Miscellaneous).\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 6. OWNERSHIP\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed retains all right, title, and interest in and to the Zed Service, Editor, and any software, products, works or other intellectual property created, used, provided, or made available by Zed under or in connection with the Zed Service or Editor. Customer may from time to time provide suggestions, comments, or other feedback to Zed with respect to the Zed Service or Editor ("Feedback"). Customer shall, and hereby does, grant to Zed a nonexclusive, worldwide, perpetual, irrevocable, transferable, sublicensable, royalty-free, fully paid-up license to use and exploit the Feedback for any purpose. You retain all right, title and interest in and to the Customer Data, including all intellectual property rights therein. No intellectual property rights with respect to any software code you develop or modify with the Editor or Zed Service (collectively, the \u8220"Output\u8221") are transferred or assigned to Zed hereunder.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 7. INDEMNIFICATION\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Customer will defend, indemnify, and hold Zed, its affiliates, suppliers and licensors harmless and each of their respective officers, directors, employees and representatives from and against any claims, damages, losses, liabilities, costs, and expenses (including reasonable attorneys' fees) arising out of or relating to any third party claim with respect to: (a) Customer Data; (b) breach of this Agreement or violation of applicable law by Customer; or (c) alleged infringement or misappropriation of third-party's intellectual property rights resulting from Customer Data.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 8. WARRANTY\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed does not represent or warrant that the operation of the Zed Service or Editor (or any portion thereof) will be uninterrupted or error free, or that the Zed Service or Editor (or any portion thereof) will operate in combination with other hardware, software, systems or data not provided by Zed. CUSTOMER ACKNOWLEDGES THAT, ZED MAKES NO EXPRESS OR IMPLIED REPRESENTATIONS OR WARRANTIES OF ANY KIND WITH RESPECT TO THE SERVICE OR SOFTWARE, OR THEIR CONDITION. ZED HEREBY EXPRESSLY EXCLUDES, ANY AND ALL OTHER EXPRESS OR IMPLIED REPRESENTATIONS OR WARRANTIES, WHETHER UNDER COMMON LAW, STATUTE OR OTHERWISE, INCLUDING WITHOUT LIMITATION ANY AND ALL WARRANTIES AS TO MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, SATISFACTORY QUALITY OR NON-INFRINGEMENT OF THIRD-PARTY RIGHTS.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 9. LIMITATIONS OF LIABILITY\par} -{\pard \ql \f0 \sa180 \li0 \fi0 IN NO EVENT SHALL ZED BE LIABLE FOR ANY LOST DATA, LOST PROFITS, BUSINESS INTERRUPTION, REPLACEMENT SERVICE OR OTHER SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR INDIRECT DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THEORY OF LIABILITY. ZED'S LIABILITY FOR ALL CLAIMS ARISING UNDER THIS AGREEMENT, WHETHER IN CONTRACT, TORT OR OTHERWISE, SHALL NOT EXCEED THE GREATER OF: THE FEES PAID TO ZED BY CUSTOMER DURING THE TWELVE (12) MONTH PERIOD PRECEDING THE DATE OF THE CLAIM, OR ONE THOUSAND US DOLLARS ($1,000).\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 10. Third Party Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed may make certain third party services available to You within the Editor or the Zed Service (each a "Third Party Service"). You acknowledge and agree that (a) use of each Third Party Service is subject to the corresponding terms and conditions available at the following URL: {\field{\*\fldinst{HYPERLINK "https://zed.dev/third-party-terms"}}{\fldrslt{\ul -https://zed.dev/third-party-terms -}}} - and/or presented in connection with Your use of such Third Party Service; (b) the terms and conditions of this Agreement do not apply with respect to Your use of any Third Party Service; and (c) Zed is not liable in any way regarding Your use of any Third Party Service.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 11. MISCELLANEOUS\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.1. Export Control\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You hereby certify that You will comply with all current US Export Control laws. You agree to defend, indemnify and hold Zed harmless from any liability for Your violation of U.S. Export Control laws.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.2. Compliance with Laws\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You shall comply with all applicable laws and regulations in its use of the Solution, including without limitation the unlawful gathering or collecting, or assisting in the gathering or collecting of information in violation of any privacy laws or regulations. You shall, at its own expense, defend, indemnify and hold harmless Zed from and against any and all claims, losses, liabilities, damages, judgments, government or federal sanctions, costs and expenses (including attorneys' fees) incurred by Zed arising from any claim or assertion by any third party of violation of privacy laws or regulations by You or any of its agents, officers, directors or employees.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.3. Assignment\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Neither party may transfer and assign its rights and obligations under this Agreement without the prior written consent of the other party. Notwithstanding the foregoing, Zed may transfer and assign its rights under this Agreement without consent from the other party in connection with a change in control, acquisition or sale of all or substantially all of its assets.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.4. Force Majeure\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Neither party shall be responsible for failure or delay in performance by events out of their reasonable control, including but not limited to, acts of God, Internet outage, terrorism, war, fires, earthquakes and other disasters (each a "Force Majeure"). Notwithstanding the foregoing: if a Force Majeure continues for more than thirty (30) days, either party may to terminate this agreement by written notice to the other party.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.5. Notice\par} -{\pard \ql \f0 \sa180 \li0 \fi0 All notices between the parties shall be in writing and shall be deemed to have been given if personally delivered or sent by registered or certified mail (return receipt), or by recognized courier service.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.6. No Agency\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Both parties agree that no agency, partnership, joint venture, or employment is created as a result of this Agreement. You do not have any authority of any kind to bind Zed.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.7. Governing Law\par} -{\pard \ql \f0 \sa180 \li0 \fi0 This Agreement shall be governed exclusively by, and construed exclusively in accordance with, the laws of the United States and the State of California, without regard to its conflict of laws provisions. The federal courts of the United States in the Northern District of California and the state courts of the State of California shall have exclusive jurisdiction to adjudicate any dispute arising out of or relating to this Agreement. Each party hereby consents to the jurisdiction of such courts and waives any right it may otherwise have to challenge the appropriateness of such forums, whether on the basis of the doctrine of forum non conveniens or otherwise. The United Nations Convention on Contracts for the International Sale of Goods shall not apply to this Agreement or any Purchase Order issued under this Agreement.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.8. Updated Agreement\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed reserves the right to update this Agreement at any time. The terms and conditions of the updated version of the Agreement shall apply to the Zed Service and Editor downloaded, or accessed following the date of publication of the updated version. If You do not agree with any terms of the updated Agreement, You may not use or access the Zed Service or Editor in any manner. Zed may from time-to-time provide release notes applicable to the Editor or Zed Service, and such release notes may contain additional use restrictions or terms applicable to Customer Data. Your use of the Editor or Zed Service after the applicable release notes are made available shall be subject to the additional use restrictions or terms applicable to Customer Data.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.9. Entire Agreement\par} -{\pard \ql \f0 \sa180 \li0 \fi0 This Agreement is the complete and exclusive statement of the mutual understanding of the parties and supersedes and cancels all previous written and oral agreements, communications, and other understandings relating to the subject matter of this Agreement, and all waivers and modifications must be in a writing signed by both parties, except as otherwise provided herein. Any term or provision of this Agreement held to be illegal or unenforceable shall be, to the fullest extent possible, interpreted so as to be construed as valid, but in any event the validity or enforceability of the remainder hereof shall not be affected.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 {\b DATE: May 6, 2025}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.13. Notice to California Residents\par} +{\pard \ql \f0 \sa180 \li0 \fi0 If Customer is a California resident, then under California Civil Code Section\u160 ?1789.3, Customer may contact the Complaint Assistance Unit of the Division of Consumer Services of the California Department of Consumer Affairs in writing at 1625 N. Market Blvd., Suite N 112, Sacramento, California 95834, or by telephone at +1-800-952-5210 in order to resolve a complaint regarding the Service or to receive further information regarding use of the Service.\par} } diff --git a/tooling/xtask/Cargo.toml b/tooling/xtask/Cargo.toml index 13179b2eb69ba9a63ba6be5784907b78bba1b9f2..21090d1304ea0eab9ad70808b91f76789f2fd923 100644 --- a/tooling/xtask/Cargo.toml +++ b/tooling/xtask/Cargo.toml @@ -9,6 +9,7 @@ license = "GPL-3.0-or-later" workspace = true [dependencies] +annotate-snippets = "0.12.1" anyhow.workspace = true backtrace.workspace = true cargo_metadata.workspace = true @@ -17,7 +18,11 @@ clap = { workspace = true, features = ["derive"] } toml.workspace = true indoc.workspace = true indexmap.workspace = true +itertools.workspace = true +regex.workspace = true serde.workspace = true serde_json.workspace = true +serde_yaml = "0.9.34" +strum.workspace = true toml_edit.workspace = true gh-workflow.workspace = true diff --git a/tooling/xtask/src/main.rs b/tooling/xtask/src/main.rs index 6f83927d6730cb2f846d001a9bbbdd010589d998..05afe3c766829137a7c2ba6e73d57638624d5e6a 100644 --- a/tooling/xtask/src/main.rs +++ b/tooling/xtask/src/main.rs @@ -20,7 +20,10 @@ enum CliCommand { PackageConformity(tasks::package_conformity::PackageConformityArgs), /// Publishes GPUI and its dependencies to crates.io. PublishGpui(tasks::publish_gpui::PublishGpuiArgs), + /// Builds GPUI web examples and serves them. + WebExamples(tasks::web_examples::WebExamplesArgs), Workflows(tasks::workflows::GenerateWorkflowArgs), + CheckWorkflows(tasks::workflow_checks::WorkflowValidationArgs), } fn main() -> Result<()> { @@ -33,6 +36,8 @@ fn main() -> Result<()> { tasks::package_conformity::run_package_conformity(args) } CliCommand::PublishGpui(args) => tasks::publish_gpui::run_publish_gpui(args), + CliCommand::WebExamples(args) => tasks::web_examples::run_web_examples(args), CliCommand::Workflows(args) => tasks::workflows::run_workflows(args), + CliCommand::CheckWorkflows(args) => tasks::workflow_checks::validate(args), } } diff --git a/tooling/xtask/src/tasks.rs b/tooling/xtask/src/tasks.rs index 01b3907f0486854b1bd18a5a3d21930b16670bd4..80f504fa0345de0d5bc71c5b44c71846f04c50bc 100644 --- a/tooling/xtask/src/tasks.rs +++ b/tooling/xtask/src/tasks.rs @@ -2,4 +2,6 @@ pub mod clippy; pub mod licenses; pub mod package_conformity; pub mod publish_gpui; +pub mod web_examples; +pub mod workflow_checks; pub mod workflows; diff --git a/tooling/xtask/src/tasks/web_examples.rs b/tooling/xtask/src/tasks/web_examples.rs new file mode 100644 index 0000000000000000000000000000000000000000..5b8e0fdd610e39a8ee020eddfbc9b98d00bdf419 --- /dev/null +++ b/tooling/xtask/src/tasks/web_examples.rs @@ -0,0 +1,338 @@ +#![allow(clippy::disallowed_methods, reason = "tooling is exempt")] + +use std::io::Write; +use std::path::Path; +use std::process::Command; + +use anyhow::{Context as _, Result, bail}; +use clap::Parser; + +#[derive(Parser)] +pub struct WebExamplesArgs { + #[arg(long)] + pub release: bool, + #[arg(long, default_value = "8080")] + pub port: u16, + #[arg(long)] + pub no_serve: bool, +} + +fn check_program(binary: &str, install_hint: &str) -> Result<()> { + match Command::new(binary).arg("--version").output() { + Ok(output) if output.status.success() => Ok(()), + _ => bail!("`{binary}` not found. Install with: {install_hint}"), + } +} + +fn discover_examples() -> Result> { + let examples_dir = Path::new("crates/gpui/examples"); + let mut names = Vec::new(); + + for entry in std::fs::read_dir(examples_dir).context("failed to read crates/gpui/examples")? { + let path = entry?.path(); + if path.extension().and_then(|e| e.to_str()) == Some("rs") { + if let Some(stem) = path.file_stem().and_then(|s| s.to_str()) { + names.push(stem.to_string()); + } + } + } + + if names.is_empty() { + bail!("no examples found in crates/gpui/examples"); + } + + names.sort(); + Ok(names) +} + +pub fn run_web_examples(args: WebExamplesArgs) -> Result<()> { + let cargo = std::env::var("CARGO").unwrap_or_else(|_| "cargo".to_string()); + let profile = if args.release { "release" } else { "debug" }; + let out_dir = "target/web-examples"; + + check_program("wasm-bindgen", "cargo install wasm-bindgen-cli")?; + + let examples = discover_examples()?; + eprintln!( + "Building {} example(s) for wasm32-unknown-unknown ({profile})...\n", + examples.len() + ); + + std::fs::create_dir_all(out_dir).context("failed to create output directory")?; + + eprintln!("Building all examples..."); + + let mut cmd = Command::new(&cargo); + cmd.args([ + "build", + "--target", + "wasm32-unknown-unknown", + "-p", + "gpui", + "--keep-going", + ]); + // 🙈 + cmd.env("RUSTC_BOOTSTRAP", "1"); + for name in &examples { + cmd.args(["--example", name]); + } + if args.release { + cmd.arg("--release"); + } + + let _ = cmd.status().context("failed to run cargo build")?; + + // Run wasm-bindgen on each .wasm that was produced. + let mut succeeded: Vec = Vec::new(); + let mut failed: Vec = Vec::new(); + + for name in &examples { + let wasm_path = format!("target/wasm32-unknown-unknown/{profile}/examples/{name}.wasm"); + if !Path::new(&wasm_path).exists() { + eprintln!("[{name}] SKIPPED (build failed)"); + failed.push(name.clone()); + continue; + } + + eprintln!("[{name}] Running wasm-bindgen..."); + + let example_dir = format!("{out_dir}/{name}"); + std::fs::create_dir_all(&example_dir) + .with_context(|| format!("failed to create {example_dir}"))?; + + let status = Command::new("wasm-bindgen") + .args([ + &wasm_path, + "--target", + "web", + "--no-typescript", + "--out-dir", + &example_dir, + "--out-name", + name, + ]) + // 🙈 + .env("RUSTC_BOOTSTRAP", "1") + .status() + .context("failed to run wasm-bindgen")?; + if !status.success() { + eprintln!("[{name}] SKIPPED (wasm-bindgen failed)"); + failed.push(name.clone()); + continue; + } + + // Write per-example index.html. + let html_path = format!("{example_dir}/index.html"); + std::fs::File::create(&html_path) + .and_then(|mut file| file.write_all(make_example_html(name).as_bytes())) + .with_context(|| format!("failed to write {html_path}"))?; + + eprintln!("[{name}] OK"); + succeeded.push(name.clone()); + } + + if succeeded.is_empty() { + bail!("all {} examples failed to build", examples.len()); + } + + let example_names: Vec<&str> = succeeded.iter().map(|s| s.as_str()).collect(); + let index_path = format!("{out_dir}/index.html"); + std::fs::File::create(&index_path) + .and_then(|mut file| file.write_all(make_gallery_html(&example_names).as_bytes())) + .context("failed to write index.html")?; + + if args.no_serve { + return Ok(()); + } + + // Serve with COEP/COOP headers required for WebGPU / SharedArrayBuffer. + eprintln!("Serving on http://127.0.0.1:{}...", args.port); + + let server_script = format!( + r#" +import http.server +class Handler(http.server.SimpleHTTPRequestHandler): + def __init__(self, *args, **kwargs): + super().__init__(*args, directory="{out_dir}", **kwargs) + def end_headers(self): + self.send_header("Cross-Origin-Embedder-Policy", "require-corp") + self.send_header("Cross-Origin-Opener-Policy", "same-origin") + super().end_headers() +http.server.HTTPServer(("127.0.0.1", {port}), Handler).serve_forever() +"#, + port = args.port, + ); + + let status = Command::new("python3") + .args(["-c", &server_script]) + .status() + .context("failed to run python3 http server (is python3 installed?)")?; + if !status.success() { + bail!("python3 http server exited with: {status}"); + } + + Ok(()) +} + +fn make_example_html(name: &str) -> String { + format!( + r#" + + + + + GPUI Web: {name} + + + +
Loading {name}…
+ + + +"# + ) +} + +fn make_gallery_html(examples: &[&str]) -> String { + let mut buttons = String::new(); + for name in examples { + buttons.push_str(&format!( + " \n" + )); + } + + let first = examples.first().copied().unwrap_or("hello_web"); + + format!( + r##" + + + + + GPUI Web Examples + + + +
+ +
+
+ {first} + Open in new tab ↗ +
+ +
+
+ + + +"##, + count = examples.len(), + ) +} diff --git a/tooling/xtask/src/tasks/workflow_checks.rs b/tooling/xtask/src/tasks/workflow_checks.rs new file mode 100644 index 0000000000000000000000000000000000000000..d6be0299327ad2dd4b4a126a61a8b2ae6ddb9fd3 --- /dev/null +++ b/tooling/xtask/src/tasks/workflow_checks.rs @@ -0,0 +1,118 @@ +mod check_run_patterns; + +use std::{fs, path::PathBuf}; + +use annotate_snippets::Renderer; +use anyhow::{Result, anyhow}; +use clap::Parser; +use itertools::{Either, Itertools}; +use serde_yaml::Value; +use strum::IntoEnumIterator; + +use crate::tasks::{ + workflow_checks::check_run_patterns::{ + RunValidationError, WorkflowFile, WorkflowValidationError, + }, + workflows::WorkflowType, +}; + +pub use check_run_patterns::validate_run_command; + +#[derive(Default, Parser)] +pub struct WorkflowValidationArgs {} + +pub fn validate(_: WorkflowValidationArgs) -> Result<()> { + let (parsing_errors, file_errors): (Vec<_>, Vec<_>) = get_all_workflow_files() + .map(check_workflow) + .flat_map(Result::err) + .partition_map(|error| match error { + WorkflowError::ParseError(error) => Either::Left(error), + WorkflowError::ValidationError(error) => Either::Right(error), + }); + + if !parsing_errors.is_empty() { + Err(anyhow!( + "Failed to read or parse some workflow files: {}", + parsing_errors.into_iter().join("\n") + )) + } else if !file_errors.is_empty() { + let errors: Vec<_> = file_errors + .iter() + .map(|error| error.annotation_group()) + .collect(); + + let renderer = + Renderer::styled().decor_style(annotate_snippets::renderer::DecorStyle::Ascii); + println!("{}", renderer.render(errors.as_slice())); + + Err(anyhow!("Workflow checks failed!")) + } else { + Ok(()) + } +} + +enum WorkflowError { + ParseError(anyhow::Error), + ValidationError(Box), +} + +fn get_all_workflow_files() -> impl Iterator { + WorkflowType::iter() + .map(|workflow_type| workflow_type.folder_path()) + .flat_map(|folder_path| { + fs::read_dir(folder_path).into_iter().flat_map(|entries| { + entries + .flat_map(Result::ok) + .map(|entry| entry.path()) + .filter(|path| { + path.extension() + .is_some_and(|ext| ext == "yaml" || ext == "yml") + }) + }) + }) +} + +fn check_workflow(workflow_file_path: PathBuf) -> Result<(), WorkflowError> { + fn collect_errors( + iter: impl Iterator>>, + ) -> Result<(), Vec> { + Some(iter.flat_map(Result::err).flatten().collect::>()) + .filter(|errors| !errors.is_empty()) + .map_or(Ok(()), Err) + } + + fn check_recursive(key: &Value, value: &Value) -> Result<(), Vec> { + match value { + Value::Mapping(mapping) => collect_errors( + mapping + .into_iter() + .map(|(key, value)| check_recursive(key, value)), + ), + Value::Sequence(sequence) => collect_errors( + sequence + .into_iter() + .map(|value| check_recursive(key, value)), + ), + Value::String(string) => check_string(key, string).map_err(|error| vec![error]), + Value::Null | Value::Bool(_) | Value::Number(_) | Value::Tagged(_) => Ok(()), + } + } + + let file_content = + WorkflowFile::load(&workflow_file_path).map_err(WorkflowError::ParseError)?; + + check_recursive(&Value::Null, &file_content.parsed_content).map_err(|errors| { + WorkflowError::ValidationError(Box::new(WorkflowValidationError::new( + errors, + file_content, + workflow_file_path, + ))) + }) +} + +fn check_string(key: &Value, value: &str) -> Result<(), RunValidationError> { + match key { + Value::String(key) if key == "run" => validate_run_command(value), + _ => Ok(()), + } +} diff --git a/tooling/xtask/src/tasks/workflow_checks/check_run_patterns.rs b/tooling/xtask/src/tasks/workflow_checks/check_run_patterns.rs new file mode 100644 index 0000000000000000000000000000000000000000..50c435d033336dd82d2f110f5c880dff0d677e52 --- /dev/null +++ b/tooling/xtask/src/tasks/workflow_checks/check_run_patterns.rs @@ -0,0 +1,124 @@ +use annotate_snippets::{AnnotationKind, Group, Level, Snippet}; +use anyhow::{Result, anyhow}; +use regex::Regex; +use serde_yaml::Value; +use std::{ + collections::HashMap, + fs, + ops::Range, + path::{Path, PathBuf}, + sync::LazyLock, +}; + +static GITHUB_INPUT_PATTERN: LazyLock = LazyLock::new(|| { + Regex::new(r#"\$\{\{[[:blank:]]*([[:alnum:]]|[[:punct:]])+?[[:blank:]]*\}\}"#) + .expect("Should compile") +}); + +pub struct WorkflowFile { + raw_content: String, + pub parsed_content: Value, +} + +impl WorkflowFile { + pub fn load(workflow_file_path: &Path) -> Result { + fs::read_to_string(workflow_file_path) + .map_err(|_| { + anyhow!( + "Could not read workflow file at {}", + workflow_file_path.display() + ) + }) + .and_then(|file_content| { + serde_yaml::from_str(&file_content) + .map(|parsed_content| Self { + raw_content: file_content, + parsed_content, + }) + .map_err(|e| anyhow!("Failed to parse workflow file: {e:?}")) + }) + } +} + +pub struct WorkflowValidationError { + file_path: PathBuf, + contents: WorkflowFile, + errors: Vec, +} + +impl WorkflowValidationError { + pub fn new( + errors: Vec, + contents: WorkflowFile, + file_path: PathBuf, + ) -> Self { + Self { + file_path, + contents, + errors, + } + } + + pub fn annotation_group<'a>(&'a self) -> Group<'a> { + let raw_content = &self.contents.raw_content; + let mut identical_lines = HashMap::new(); + + let ranges = self + .errors + .iter() + .flat_map(|error| error.found_injection_patterns.iter()) + .map(|(line, pattern_range)| { + let initial_offset = identical_lines + .get(&(line.as_str(), pattern_range.start)) + .copied() + .unwrap_or_default(); + + let line_start = raw_content[initial_offset..] + .find(line.as_str()) + .map(|offset| offset + initial_offset) + .unwrap_or_default(); + + let pattern_start = line_start + pattern_range.start; + let pattern_end = pattern_start + pattern_range.len(); + + identical_lines.insert((line.as_str(), pattern_range.start), pattern_end); + + pattern_start..pattern_end + }); + + Level::ERROR + .primary_title("Found GitHub input injection in run command") + .element( + Snippet::source(&self.contents.raw_content) + .path(self.file_path.display().to_string()) + .annotations(ranges.map(|range| { + AnnotationKind::Primary + .span(range) + .label("This should be passed via an environment variable") + })), + ) + } +} + +pub struct RunValidationError { + found_injection_patterns: Vec<(String, Range)>, +} + +pub fn validate_run_command(command: &str) -> Result<(), RunValidationError> { + let patterns: Vec<_> = command + .lines() + .flat_map(move |line| { + GITHUB_INPUT_PATTERN + .find_iter(line) + .map(|m| (line.to_owned(), m.range())) + }) + .collect(); + + if patterns.is_empty() { + Ok(()) + } else { + Err(RunValidationError { + found_injection_patterns: patterns, + }) + } +} diff --git a/tooling/xtask/src/tasks/workflows.rs b/tooling/xtask/src/tasks/workflows.rs index 5663ebec247c4025f7cfbae8e9467733e2c7be2d..9151b9c671ef42e3dc54661f80438a4e31aff1e9 100644 --- a/tooling/xtask/src/tasks/workflows.rs +++ b/tooling/xtask/src/tasks/workflows.rs @@ -4,6 +4,8 @@ use gh_workflow::Workflow; use std::fs; use std::path::{Path, PathBuf}; +use crate::tasks::workflow_checks::{self}; + mod after_release; mod autofix_pr; mod bump_patch_version; @@ -87,8 +89,8 @@ impl WorkflowFile { } } -#[derive(PartialEq, Eq)] -enum WorkflowType { +#[derive(PartialEq, Eq, strum::EnumIter)] +pub enum WorkflowType { /// Workflows living in the Zed repository Zed, /// Workflows living in the `zed-extensions/workflows` repository that are @@ -113,7 +115,7 @@ impl WorkflowType { ) } - fn folder_path(&self) -> PathBuf { + pub fn folder_path(&self) -> PathBuf { match self { WorkflowType::Zed => PathBuf::from(".github/workflows"), WorkflowType::ExtensionCi => PathBuf::from("extensions/workflows"), @@ -155,5 +157,5 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> { workflow_file.generate_file()?; } - Ok(()) + workflow_checks::validate(Default::default()) } diff --git a/tooling/xtask/src/tasks/workflows/after_release.rs b/tooling/xtask/src/tasks/workflows/after_release.rs index 3936e3ffb7754d167c6c39f02e17f758bed0c1ae..07ff1fba0d4799c463128362ad4ba996ccf8cea0 100644 --- a/tooling/xtask/src/tasks/workflows/after_release.rs +++ b/tooling/xtask/src/tasks/workflows/after_release.rs @@ -123,7 +123,7 @@ fn publish_winget() -> NamedJob { "X-GitHub-Api-Version" = "2022-11-28" } $body = @{ branch = "master" } | ConvertTo-Json - $uri = "https://api.github.com/repos/${{ github.repository_owner }}/winget-pkgs/merge-upstream" + $uri = "https://api.github.com/repos/$env:GITHUB_REPOSITORY_OWNER/winget-pkgs/merge-upstream" try { Invoke-RestMethod -Uri $uri -Method Post -Headers $headers -Body $body -ContentType "application/json" Write-Host "Successfully synced winget-pkgs fork" diff --git a/tooling/xtask/src/tasks/workflows/autofix_pr.rs b/tooling/xtask/src/tasks/workflows/autofix_pr.rs index c2c89b7cd05394c225c015a6cc83f48bd35b24a4..2779dc2b01fa873bc050be4d873b9a5d502606bd 100644 --- a/tooling/xtask/src/tasks/workflows/autofix_pr.rs +++ b/tooling/xtask/src/tasks/workflows/autofix_pr.rs @@ -55,7 +55,8 @@ fn download_patch_artifact() -> Step { fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJob { fn checkout_pr(pr_number: &WorkflowInput) -> Step { - named::bash(&format!("gh pr checkout {pr_number}")) + named::bash(r#"gh pr checkout "$PR_NUMBER""#) + .add_env(("PR_NUMBER", pr_number.to_string())) .add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)) } @@ -133,7 +134,9 @@ fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJo fn commit_changes(pr_number: &WorkflowInput, autofix_job: &NamedJob) -> NamedJob { fn checkout_pr(pr_number: &WorkflowInput, token: &StepOutput) -> Step { - named::bash(&format!("gh pr checkout {pr_number}")).add_env(("GITHUB_TOKEN", token)) + named::bash(r#"gh pr checkout "$PR_NUMBER""#) + .add_env(("PR_NUMBER", pr_number.to_string())) + .add_env(("GITHUB_TOKEN", token)) } fn apply_patch() -> Step { diff --git a/tooling/xtask/src/tasks/workflows/cherry_pick.rs b/tooling/xtask/src/tasks/workflows/cherry_pick.rs index eaa786837f84ebf4d4f7e1a579db0c7b4dcc5040..5680bf6b23b85c17e68e531cecadfb31f091520d 100644 --- a/tooling/xtask/src/tasks/workflows/cherry_pick.rs +++ b/tooling/xtask/src/tasks/workflows/cherry_pick.rs @@ -35,7 +35,10 @@ fn run_cherry_pick( channel: &WorkflowInput, token: &StepOutput, ) -> Step { - named::bash(&format!("./script/cherry-pick {branch} {commit} {channel}")) + named::bash(r#"./script/cherry-pick "$BRANCH" "$COMMIT" "$CHANNEL""#) + .add_env(("BRANCH", branch.to_string())) + .add_env(("COMMIT", commit.to_string())) + .add_env(("CHANNEL", channel.to_string())) .add_env(("GIT_COMMITTER_NAME", "Zed Zippy")) .add_env(("GIT_COMMITTER_EMAIL", "hi@zed.dev")) .add_env(("GITHUB_TOKEN", token)) diff --git a/tooling/xtask/src/tasks/workflows/compare_perf.rs b/tooling/xtask/src/tasks/workflows/compare_perf.rs index 1d111acc4f8a4dc47edea6f45c0b93c845b7cda2..74a1fbdc389e2b0dacdf579d9ee96a0366eb5c01 100644 --- a/tooling/xtask/src/tasks/workflows/compare_perf.rs +++ b/tooling/xtask/src/tasks/workflows/compare_perf.rs @@ -29,14 +29,16 @@ pub fn run_perf( crate_name: &WorkflowInput, ) -> NamedJob { fn cargo_perf_test(ref_name: &WorkflowInput, crate_name: &WorkflowInput) -> Step { - named::bash(&format!( - " - if [ -n \"{crate_name}\" ]; then - cargo perf-test -p {crate_name} -- --json={ref_name}; + named::bash( + r#" + if [ -n "$CRATE_NAME" ]; then + cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME"; else - cargo perf-test -p vim -- --json={ref_name}; - fi" - )) + cargo perf-test -p vim -- --json="$REF_NAME"; + fi"#, + ) + .add_env(("REF_NAME", ref_name.to_string())) + .add_env(("CRATE_NAME", crate_name.to_string())) } fn install_hyperfine() -> Step { @@ -44,9 +46,9 @@ pub fn run_perf( } fn compare_runs(head: &WorkflowInput, base: &WorkflowInput) -> Step { - named::bash(&format!( - "cargo perf-compare --save=results.md {base} {head}" - )) + named::bash(r#"cargo perf-compare --save=results.md "$BASE" "$HEAD""#) + .add_env(("BASE", base.to_string())) + .add_env(("HEAD", head.to_string())) } named::job( diff --git a/tooling/xtask/src/tasks/workflows/deploy_collab.rs b/tooling/xtask/src/tasks/workflows/deploy_collab.rs index 58212118c7ba4fa6d44d5f29fac671ca6eb5e662..300680f95b880e9adb14dffd2572d80cb08fd63c 100644 --- a/tooling/xtask/src/tasks/workflows/deploy_collab.rs +++ b/tooling/xtask/src/tasks/workflows/deploy_collab.rs @@ -1,5 +1,5 @@ use gh_workflow::{Container, Event, Port, Push, Run, Step, Use, Workflow}; -use indoc::{formatdoc, indoc}; +use indoc::indoc; use crate::tasks::workflows::runners::{self, Platform}; use crate::tasks::workflows::steps::{ @@ -115,9 +115,10 @@ fn deploy(deps: &[&NamedJob]) -> NamedJob { } fn sign_into_kubernetes() -> Step { - named::bash(formatdoc! {r#" - doctl kubernetes cluster kubeconfig save --expiry-seconds 600 {cluster_name} - "#, cluster_name = vars::CLUSTER_NAME}) + named::bash( + r#"doctl kubernetes cluster kubeconfig save --expiry-seconds 600 "$CLUSTER_NAME""#, + ) + .add_env(("CLUSTER_NAME", vars::CLUSTER_NAME)) } fn start_rollout() -> Step { @@ -139,7 +140,7 @@ fn deploy(deps: &[&NamedJob]) -> NamedJob { echo "Deploying collab:$GITHUB_SHA to $ZED_KUBE_NAMESPACE" source script/lib/deploy-helpers.sh - export_vars_for_environment $ZED_KUBE_NAMESPACE + export_vars_for_environment "$ZED_KUBE_NAMESPACE" ZED_DO_CERTIFICATE_ID="$(doctl compute certificate list --format ID --no-header)" export ZED_DO_CERTIFICATE_ID @@ -149,14 +150,14 @@ fn deploy(deps: &[&NamedJob]) -> NamedJob { export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT export DATABASE_MAX_CONNECTIONS=850 envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f - - kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch + kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}" export ZED_SERVICE_NAME=api export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_API_LOAD_BALANCER_SIZE_UNIT export DATABASE_MAX_CONNECTIONS=60 envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f - - kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch + kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}" "#}) } diff --git a/tooling/xtask/src/tasks/workflows/extension_bump.rs b/tooling/xtask/src/tasks/workflows/extension_bump.rs index bdc25f766e367042883ab7051676c7aa08873243..8c31de202ee7ac81b5f5e95fb26ec89452fd077c 100644 --- a/tooling/xtask/src/tasks/workflows/extension_bump.rs +++ b/tooling/xtask/src/tasks/workflows/extension_bump.rs @@ -150,8 +150,8 @@ pub(crate) fn compare_versions() -> (Step, StepOutput, StepOutput) { r#" CURRENT_VERSION="$({VERSION_CHECK})" - if [[ "${{{{ github.event_name }}}}" == "pull_request" ]]; then - PR_FORK_POINT="$(git merge-base --fork-point main)" + if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then + PR_FORK_POINT="$(git merge-base origin/main HEAD)" git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" @@ -191,7 +191,7 @@ fn bump_extension_version( let job = steps::dependant_job(dependencies) .cond(Expression::new(format!( - "{DEFAULT_REPOSITORY_OWNER_GUARD} &&\n({force_bump} == 'true' || {version_changed} == 'false')", + "{DEFAULT_REPOSITORY_OWNER_GUARD} &&\n({force_bump} == true || {version_changed} == 'false')", force_bump = force_bump_output.expr(), version_changed = version_changed_output.expr(), ))) @@ -258,8 +258,6 @@ fn install_bump_2_version() -> Step { fn bump_version(current_version: &JobOutput, bump_type: &WorkflowInput) -> (Step, StepOutput) { let step = named::bash(formatdoc! {r#" - OLD_VERSION="{current_version}" - BUMP_FILES=("extension.toml") if [[ -f "Cargo.toml" ]]; then BUMP_FILES+=("Cargo.toml") @@ -269,7 +267,7 @@ fn bump_version(current_version: &JobOutput, bump_type: &WorkflowInput) -> (Step --search "version = \"{{current_version}}"\" \ --replace "version = \"{{new_version}}"\" \ --current-version "$OLD_VERSION" \ - --no-configured-files {bump_type} "${{BUMP_FILES[@]}}" + --no-configured-files "$BUMP_TYPE" "${{BUMP_FILES[@]}}" if [[ -f "Cargo.toml" ]]; then cargo update --workspace @@ -280,7 +278,9 @@ fn bump_version(current_version: &JobOutput, bump_type: &WorkflowInput) -> (Step echo "new_version=${{NEW_VERSION}}" >> "$GITHUB_OUTPUT" "# }) - .id("bump-version"); + .id("bump-version") + .add_env(("OLD_VERSION", current_version.to_string())) + .add_env(("BUMP_TYPE", bump_type.to_string())); let new_version = StepOutput::new(&step, "new_version"); (step, new_version) diff --git a/tooling/xtask/src/tasks/workflows/extension_tests.rs b/tooling/xtask/src/tasks/workflows/extension_tests.rs index a650013bacfcfc1ac89a60ccfe8674a5621fb1c7..09f0cadf1c8731f8eed4ef1197a7edd05e0d1558 100644 --- a/tooling/xtask/src/tasks/workflows/extension_tests.rs +++ b/tooling/xtask/src/tasks/workflows/extension_tests.rs @@ -1,9 +1,11 @@ use gh_workflow::*; -use indoc::{formatdoc, indoc}; +use indoc::indoc; use crate::tasks::workflows::{ extension_bump::compare_versions, - run_tests::{orchestrate_without_package_filter, tests_pass}, + run_tests::{ + fetch_ts_query_ls, orchestrate_without_package_filter, run_ts_query_ls, tests_pass, + }, runners, steps::{ self, CommonJobConditions, FluentBuilder, NamedJob, cache_rust_dependencies_namespace, @@ -94,6 +96,8 @@ pub(crate) fn check_extension() -> NamedJob { .add_step(download_zed_extension_cli(cache_hit)) .add_step(cache_rust_dependencies_namespace()) // Extensions can compile Rust, so provide the cache if needed. .add_step(check()) + .add_step(fetch_ts_query_ls()) + .add_step(run_ts_query_ls()) .add_step(check_version_job) .add_step(verify_version_did_not_change(version_changed)); @@ -138,12 +142,14 @@ pub fn check() -> Step { } fn verify_version_did_not_change(version_changed: StepOutput) -> Step { - named::bash(formatdoc! {r#" - if [[ {version_changed} == "true" && "${{{{ github.event_name }}}}" == "pull_request" && "${{{{ github.event.pull_request.user.login }}}}" != "zed-zippy[bot]" ]] ; then + named::bash(indoc! {r#" + if [[ "$VERSION_CHANGED" == "true" && "$GITHUB_EVENT_NAME" == "pull_request" && "$PR_USER_LOGIN" != "zed-zippy[bot]" ]] ; then echo "Version change detected in your change!" echo "Version changes happen in separate PRs and will be performed by the zed-zippy bot" exit 42 fi "# }) + .add_env(("VERSION_CHANGED", version_changed.to_string())) + .add_env(("PR_USER_LOGIN", "${{ github.event.pull_request.user.login }}")) } diff --git a/tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs b/tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs index 09e0b231ce79decb244b89a93cc8cf349623a290..6f03ad1521850fb24c5bad7265ebf913228c5077 100644 --- a/tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs +++ b/tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs @@ -105,10 +105,8 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob { } fn get_removed_files(prev_commit: &StepOutput) -> (Step, StepOutput) { - let step = named::bash(formatdoc! {r#" - PREV_COMMIT="{prev_commit}" - - if [ "${{{{ matrix.repo }}}}" = "workflows" ]; then + let step = named::bash(indoc::indoc! {r#" + if [ "$MATRIX_REPO" = "workflows" ]; then WORKFLOW_DIR="extensions/workflows" else WORKFLOW_DIR="extensions/workflows/shared" @@ -119,8 +117,8 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob { # Get deleted files (status D) and renamed files (status R - old name needs removal) # Using -M to detect renames, then extracting files that are gone from their original location REMOVED_FILES=$(git diff --name-status -M "$PREV_COMMIT" HEAD -- "$WORKFLOW_DIR" | \ - awk '/^D/ {{ print $2 }} /^R/ {{ print $2 }}' | \ - xargs -I{{}} basename {{}} 2>/dev/null | \ + awk '/^D/ { print $2 } /^R/ { print $2 }' | \ + xargs -I{} basename {} 2>/dev/null | \ tr '\n' ' ' || echo "") REMOVED_FILES=$(echo "$REMOVED_FILES" | xargs) @@ -129,7 +127,9 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob { echo "removed_files=$REMOVED_FILES" >> "$GITHUB_OUTPUT" "#}) .id("calc-changes") - .working_directory("zed"); + .working_directory("zed") + .add_env(("PREV_COMMIT", prev_commit.to_string())) + .add_env(("MATRIX_REPO", "${{ matrix.repo }}")); let removed_files = StepOutput::new(&step, "removed_files"); @@ -137,9 +137,7 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob { } fn sync_workflow_files(removed_files: &StepOutput) -> Step { - named::bash(formatdoc! {r#" - REMOVED_FILES="{removed_files}" - + named::bash(indoc::indoc! {r#" mkdir -p extension/.github/workflows cd extension/.github/workflows @@ -153,17 +151,19 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob { cd - > /dev/null - if [ "${{{{ matrix.repo }}}}" = "workflows" ]; then + if [ "$MATRIX_REPO" = "workflows" ]; then cp zed/extensions/workflows/*.yml extension/.github/workflows/ else cp zed/extensions/workflows/shared/*.yml extension/.github/workflows/ fi "#}) + .add_env(("REMOVED_FILES", removed_files.to_string())) + .add_env(("MATRIX_REPO", "${{ matrix.repo }}")) } fn get_short_sha() -> (Step, StepOutput) { let step = named::bash(indoc::indoc! {r#" - echo "sha_short=$(git rev-parse --short HEAD)" >> "$GITHUB_OUTPUT" + echo "sha_short=$(git rev-parse --short=7 HEAD)" >> "$GITHUB_OUTPUT" "#}) .id("short-sha") .working_directory("zed"); @@ -205,13 +205,16 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob { fn enable_auto_merge(token: &StepOutput) -> Step { named::bash(indoc::indoc! {r#" - PR_NUMBER="${{ steps.create-pr.outputs.pull-request-number }}" if [ -n "$PR_NUMBER" ]; then cd extension gh pr merge "$PR_NUMBER" --auto --squash fi "#}) .add_env(("GH_TOKEN", token.to_string())) + .add_env(( + "PR_NUMBER", + "${{ steps.create-pr.outputs.pull-request-number }}", + )) } let (authenticate, token) = generate_token( diff --git a/tooling/xtask/src/tasks/workflows/extensions/bump_version.rs b/tooling/xtask/src/tasks/workflows/extensions/bump_version.rs index d6fa479095b594707675e300ca3cda4514c544bf..2d82f1351f21645a77b1d13e158bd4142dbec069 100644 --- a/tooling/xtask/src/tasks/workflows/extensions/bump_version.rs +++ b/tooling/xtask/src/tasks/workflows/extensions/bump_version.rs @@ -57,7 +57,7 @@ pub(crate) fn call_bump_version( .with( Input::default() .add("bump-type", bump_type.to_string()) - .add("force-bump", true), + .add("force-bump", "${{ github.event_name != 'push' }}"), ) .with_app_secrets(); diff --git a/tooling/xtask/src/tasks/workflows/publish_extension_cli.rs b/tooling/xtask/src/tasks/workflows/publish_extension_cli.rs index 549b0fdfcfbb8f44b24ac849e2fe3c13bf5acdb0..2269201a2de383bc5ae7147d9e1d08105c540d15 100644 --- a/tooling/xtask/src/tasks/workflows/publish_extension_cli.rs +++ b/tooling/xtask/src/tasks/workflows/publish_extension_cli.rs @@ -28,7 +28,7 @@ fn publish_job() -> NamedJob { } fn upload_binary() -> Step { - named::bash("script/upload-extension-cli ${{ github.sha }}") + named::bash(r#"script/upload-extension-cli "$GITHUB_SHA""#) .add_env(( "DIGITALOCEAN_SPACES_ACCESS_KEY", vars::DIGITALOCEAN_SPACES_ACCESS_KEY, @@ -60,7 +60,7 @@ fn update_sha_in_zed(publish_job: &NamedJob) -> NamedJob { fn replace_sha() -> Step { named::bash(indoc! {r#" - sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"${{ github.sha }}\"/" \ + sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"$GITHUB_SHA\"/" \ tooling/xtask/src/tasks/workflows/extension_tests.rs "#}) } @@ -139,7 +139,7 @@ fn update_sha_in_extensions(publish_job: &NamedJob) -> NamedJob { fn replace_sha() -> Step { named::bash(indoc! {r#" - sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: ${{ github.sha }}/" \ + sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: $GITHUB_SHA/" \ .github/workflows/ci.yml "#}) } @@ -191,7 +191,7 @@ fn create_pull_request_extensions( fn get_short_sha() -> (Step, StepOutput) { let step = named::bash(indoc::indoc! {r#" - echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT" + echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT" "#}) .id("short-sha"); diff --git a/tooling/xtask/src/tasks/workflows/release.rs b/tooling/xtask/src/tasks/workflows/release.rs index 8241fc58f0821b950e32ee9b1a42473975ec008d..2963bbec24301b85b345461a6ea532a9ac3421c5 100644 --- a/tooling/xtask/src/tasks/workflows/release.rs +++ b/tooling/xtask/src/tasks/workflows/release.rs @@ -272,18 +272,55 @@ pub(crate) fn push_release_update_notification( test_jobs: &[&NamedJob], bundle_jobs: &ReleaseBundleJobs, ) -> NamedJob { - let all_job_names = test_jobs - .into_iter() + fn env_name(name: &str) -> String { + format!("RESULT_{}", name.to_uppercase()) + } + + let all_job_names: Vec<&str> = test_jobs + .iter() .map(|j| j.name.as_ref()) - .chain(bundle_jobs.jobs().into_iter().map(|j| j.name.as_ref())); + .chain(bundle_jobs.jobs().into_iter().map(|j| j.name.as_ref())) + .collect(); + + let env_entries = [ + ( + "DRAFT_RESULT".into(), + format!("${{{{ needs.{}.result }}}}", create_draft_release_job.name), + ), + ( + "UPLOAD_RESULT".into(), + format!("${{{{ needs.{}.result }}}}", upload_assets_job.name), + ), + ( + "VALIDATE_RESULT".into(), + format!("${{{{ needs.{}.result }}}}", validate_assets_job.name), + ), + ( + "AUTO_RELEASE_RESULT".into(), + format!("${{{{ needs.{}.result }}}}", auto_release_preview.name), + ), + ("RUN_URL".into(), CURRENT_ACTION_RUN_URL.to_string()), + ] + .into_iter() + .chain( + all_job_names + .iter() + .map(|name| (env_name(name), format!("${{{{ needs.{name}.result }}}}"))), + ); + + let failure_checks = all_job_names + .iter() + .map(|name| { + format!( + "if [ \"${env_name}\" == \"failure\" ];then FAILED_JOBS=\"$FAILED_JOBS {name}\"; fi", + env_name = env_name(name) + ) + }) + .collect::>() + .join("\n "); let notification_script = formatdoc! {r#" - DRAFT_RESULT="${{{{ needs.{draft_job}.result }}}}" - UPLOAD_RESULT="${{{{ needs.{upload_job}.result }}}}" - VALIDATE_RESULT="${{{{ needs.{validate_job}.result }}}}" - AUTO_RELEASE_RESULT="${{{{ needs.{auto_release_job}.result }}}}" TAG="$GITHUB_REF_NAME" - RUN_URL="{run_url}" if [ "$DRAFT_RESULT" == "failure" ]; then echo "❌ Draft release creation failed for $TAG: $RUN_URL" @@ -319,19 +356,6 @@ pub(crate) fn push_release_update_notification( fi fi "#, - draft_job = create_draft_release_job.name, - upload_job = upload_assets_job.name, - validate_job = validate_assets_job.name, - auto_release_job = auto_release_preview.name, - run_url = CURRENT_ACTION_RUN_URL, - failure_checks = all_job_names - .into_iter() - .map(|name: &str| format!( - "if [ \"${{{{ needs.{name}.result }}}}\" == \"failure\" ];\ - then FAILED_JOBS=\"$FAILED_JOBS {name}\"; fi" - )) - .collect::>() - .join("\n "), }; let mut all_deps: Vec<&NamedJob> = vec![ @@ -347,7 +371,10 @@ pub(crate) fn push_release_update_notification( .runs_on(runners::LINUX_SMALL) .cond(Expression::new("always()")); - for step in notify_slack(MessageType::Evaluated(notification_script)) { + for step in notify_slack(MessageType::Evaluated { + script: notification_script, + env: env_entries.collect(), + }) { job = job.add_step(step); } named::job(job) @@ -368,14 +395,17 @@ pub(crate) fn notify_on_failure(deps: &[&NamedJob]) -> NamedJob { pub(crate) enum MessageType { Static(String), - Evaluated(String), + Evaluated { + script: String, + env: Vec<(String, String)>, + }, } fn notify_slack(message: MessageType) -> Vec> { match message { MessageType::Static(message) => vec![send_slack_message(message)], - MessageType::Evaluated(expression) => { - let (generate_step, generated_message) = generate_slack_message(expression); + MessageType::Evaluated { script, env } => { + let (generate_step, generated_message) = generate_slack_message(script, env); vec![ generate_step, @@ -385,26 +415,32 @@ fn notify_slack(message: MessageType) -> Vec> { } } -fn generate_slack_message(expression: String) -> (Step, StepOutput) { +fn generate_slack_message( + expression: String, + env: Vec<(String, String)>, +) -> (Step, StepOutput) { let script = formatdoc! {r#" MESSAGE=$({expression}) echo "message=$MESSAGE" >> "$GITHUB_OUTPUT" "# }; - let generate_step = named::bash(&script) + let mut generate_step = named::bash(&script) .id("generate-webhook-message") .add_env(("GH_TOKEN", Context::github().token())); + for (name, value) in env { + generate_step = generate_step.add_env((name, value)); + } + let output = StepOutput::new(&generate_step, "message"); (generate_step, output) } fn send_slack_message(message: String) -> Step { - let script = formatdoc! {r#" - curl -X POST -H 'Content-type: application/json'\ - --data '{{"text":"{message}"}}' "$SLACK_WEBHOOK" - "# - }; - named::bash(&script).add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES)) + named::bash( + r#"curl -X POST -H 'Content-type: application/json' --data "$(jq -n --arg text "$SLACK_MESSAGE" '{"text": $text}')" "$SLACK_WEBHOOK""# + ) + .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES)) + .add_env(("SLACK_MESSAGE", message)) } diff --git a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs index e83d3a07f079c1f40360f413f3007813dbe552ce..521f419d9b317c42a1106ebe8500ccf0a3f494ec 100644 --- a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs +++ b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs @@ -123,7 +123,7 @@ fn cron_unit_evals() -> NamedJob { const UNIT_EVAL_MODELS: &[&str] = &[ "anthropic/claude-sonnet-4-5-latest", "anthropic/claude-opus-4-5-latest", - "google/gemini-3-pro", + "google/gemini-3.1-pro", "openai/gpt-5", ]; diff --git a/tooling/xtask/src/tasks/workflows/run_bundling.rs b/tooling/xtask/src/tasks/workflows/run_bundling.rs index 7fa82e80c52b9e6faec6a377d906269e7a3dbb77..6b9d3b9e36c3ba3b3de4b02a53e83ee4faaa4785 100644 --- a/tooling/xtask/src/tasks/workflows/run_bundling.rs +++ b/tooling/xtask/src/tasks/workflows/run_bundling.rs @@ -146,6 +146,8 @@ pub(crate) fn bundle_linux( job: bundle_job(deps) .runs_on(arch.linux_bundler()) .envs(bundle_envs(platform)) + .add_env(Env::new("CC", "clang-18")) + .add_env(Env::new("CXX", "clang++-18")) .add_step(steps::checkout_repo()) .when_some(release_channel, |job, release_channel| { job.add_step(set_release_channel(platform, release_channel)) diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index c234f46f3dd2edc4bd861d7df46f966a1e623708..38ba1bd32945f9ba8ee1e08ebc994a1132fb07f2 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -3,9 +3,13 @@ use gh_workflow::{ Workflow, }; use indexmap::IndexMap; +use indoc::formatdoc; use crate::tasks::workflows::{ - steps::{CommonJobConditions, repository_owner_guard_expression}, + steps::{ + CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression, + use_clang, + }, vars::{self, PathCondition}, }; @@ -50,6 +54,7 @@ pub(crate) fn run_tests() -> Workflow { should_run_tests.guard(run_platform_tests(Platform::Mac)), should_run_tests.guard(doctests()), should_run_tests.guard(check_workspace_binaries()), + should_run_tests.guard(check_wasm()), should_run_tests.guard(check_dependencies()), // could be more specific here? should_check_docs.guard(check_docs()), should_check_licences.guard(check_licenses()), @@ -114,7 +119,7 @@ fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> N git fetch origin "$GITHUB_BASE_REF" --depth=350 COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)" fi - CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})" + CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")" check_pattern() { local output_name="$1" @@ -238,15 +243,20 @@ pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob { "#}); + let env_entries: Vec<_> = jobs + .iter() + .map(|job| { + let env_name = format!("RESULT_{}", job.name.to_uppercase()); + let env_value = format!("${{{{ needs.{}.result }}}}", job.name); + (env_name, env_value) + }) + .collect(); + script.push_str( &jobs .iter() - .map(|job| { - format!( - "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"", - job.name, job.name - ) - }) + .zip(env_entries.iter()) + .map(|(job, (env_name, _))| format!("check_result \"{}\" \"${}\"", job.name, env_name)) .collect::>() .join("\n"), ); @@ -261,11 +271,43 @@ pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob { .collect::>(), ) .cond(repository_owner_guard_expression(true)) - .add_step(named::bash(&script)); + .add_step( + env_entries + .into_iter() + .fold(named::bash(&script), |step, env_item| { + step.add_env(env_item) + }), + ); named::job(job) } +const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz"; +const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1"; + +pub(crate) fn fetch_ts_query_ls() -> Step { + named::uses( + "dsaltares", + "fetch-gh-release-asset", + "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c", + ) // v1.1.1 + .add_with(("repo", "ribru17/ts_query_ls")) + .add_with(("version", CI_TS_QUERY_RELEASE)) + .add_with(("file", TS_QUERY_LS_FILE)) +} + +pub(crate) fn run_ts_query_ls() -> Step { + named::bash(formatdoc!( + r#"tar -xf {TS_QUERY_LS_FILE} + ./ts_query_ls format --check . || {{ + echo "Found unformatted queries, please format them with ts_query_ls." + echo "For easy use, install the Tree-sitter query extension:" + echo "zed://extension/tree-sitter-query" + false + }}"# + )) +} + fn check_style() -> NamedJob { fn check_for_typos() -> Step { named::uses( @@ -275,6 +317,7 @@ fn check_style() -> NamedJob { ) // v1.40.0 .with(("config", "./typos.toml")) } + named::job( release_job(&[]) .runs_on(runners::LINUX_MEDIUM) @@ -285,7 +328,9 @@ fn check_style() -> NamedJob { .add_step(steps::cargo_fmt()) .add_step(steps::script("./script/check-todos")) .add_step(steps::script("./script/check-keymaps")) - .add_step(check_for_typos()), + .add_step(check_for_typos()) + .add_step(fetch_ts_query_ls()) + .add_step(run_ts_query_ls()), ) } @@ -323,7 +368,7 @@ fn check_dependencies() -> NamedJob { .with(("license-check", false)) } - named::job( + named::job(use_clang( release_job(&[]) .runs_on(runners::LINUX_SMALL) .add_step(steps::checkout_repo()) @@ -332,11 +377,43 @@ fn check_dependencies() -> NamedJob { .add_step(run_cargo_machete()) .add_step(check_cargo_lock()) .add_step(check_vulnerable_dependencies()), + )) +} + +fn check_wasm() -> NamedJob { + fn install_nightly_wasm_toolchain() -> Step { + named::bash( + "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown", + ) + } + + fn cargo_check_wasm() -> Step { + named::bash(concat!( + "cargo +nightly -Zbuild-std=std,panic_abort ", + "check --target wasm32-unknown-unknown -p gpui_platform", + )) + .add_env(( + "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS", + "-C target-feature=+atomics,+bulk-memory,+mutable-globals", + )) + } + + named::job( + release_job(&[]) + .runs_on(runners::LINUX_LARGE) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_cargo_config(Platform::Linux)) + .add_step(steps::cache_rust_dependencies_namespace()) + .add_step(install_nightly_wasm_toolchain()) + .add_step(steps::setup_sccache(Platform::Linux)) + .add_step(cargo_check_wasm()) + .add_step(steps::show_sccache_stats(Platform::Linux)) + .add_step(steps::cleanup_cargo_config(Platform::Linux)), ) } fn check_workspace_binaries() -> NamedJob { - named::job( + named::job(use_clang( release_job(&[]) .runs_on(runners::LINUX_LARGE) .add_step(steps::checkout_repo()) @@ -348,7 +425,7 @@ fn check_workspace_binaries() -> NamedJob { .add_step(steps::script("cargo build --workspace --bins --examples")) .add_step(steps::show_sccache_stats(Platform::Linux)) .add_step(steps::cleanup_cargo_config(Platform::Linux)), - ) + )) } pub(crate) fn clippy(platform: Platform) -> NamedJob { @@ -357,23 +434,27 @@ pub(crate) fn clippy(platform: Platform) -> NamedJob { Platform::Linux => runners::LINUX_DEFAULT, Platform::Mac => runners::MAC_DEFAULT, }; + let mut job = release_job(&[]) + .runs_on(runner) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_cargo_config(platform)) + .when( + platform == Platform::Linux || platform == Platform::Mac, + |this| this.add_step(steps::cache_rust_dependencies_namespace()), + ) + .when( + platform == Platform::Linux, + steps::install_linux_dependencies, + ) + .add_step(steps::setup_sccache(platform)) + .add_step(steps::clippy(platform)) + .add_step(steps::show_sccache_stats(platform)); + if platform == Platform::Linux { + job = use_clang(job); + } NamedJob { name: format!("clippy_{platform}"), - job: release_job(&[]) - .runs_on(runner) - .add_step(steps::checkout_repo()) - .add_step(steps::setup_cargo_config(platform)) - .when( - platform == Platform::Linux || platform == Platform::Mac, - |this| this.add_step(steps::cache_rust_dependencies_namespace()), - ) - .when( - platform == Platform::Linux, - steps::install_linux_dependencies, - ) - .add_step(steps::setup_sccache(platform)) - .add_step(steps::clippy(platform)) - .add_step(steps::show_sccache_stats(platform)), + job, } } @@ -411,10 +492,12 @@ fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJo }) .add_step(steps::checkout_repo()) .add_step(steps::setup_cargo_config(platform)) - .when( - platform == Platform::Linux || platform == Platform::Mac, - |this| this.add_step(steps::cache_rust_dependencies_namespace()), - ) + .when(platform == Platform::Mac, |this| { + this.add_step(steps::cache_rust_dependencies_namespace()) + }) + .when(platform == Platform::Linux, |this| { + use_clang(this.add_step(steps::cache_rust_dependencies_namespace())) + }) .when( platform == Platform::Linux, steps::install_linux_dependencies, @@ -464,6 +547,14 @@ pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob { .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/")) } + fn buf_lint() -> Step { + named::bash("buf lint crates/proto/proto") + } + + fn check_protobuf_formatting() -> Step { + named::bash("buf format --diff --exit-code crates/proto/proto") + } + named::job( release_job(&[]) .runs_on(runners::LINUX_DEFAULT) @@ -474,7 +565,9 @@ pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob { .add_step(steps::checkout_repo().with_full_history()) .add_step(ensure_fresh_merge()) .add_step(bufbuild_setup_action()) - .add_step(bufbuild_breaking_action()), + .add_step(bufbuild_breaking_action()) + .add_step(buf_lint()) + .add_step(check_protobuf_formatting()), ) } @@ -486,7 +579,7 @@ fn doctests() -> NamedJob { .id("run_doctests") } - named::job( + named::job(use_clang( release_job(&[]) .runs_on(runners::LINUX_DEFAULT) .add_step(steps::checkout_repo()) @@ -497,7 +590,7 @@ fn doctests() -> NamedJob { .add_step(run_doctests()) .add_step(steps::show_sccache_stats(Platform::Linux)) .add_step(steps::cleanup_cargo_config(Platform::Linux)), - ) + )) } fn check_licenses() -> NamedJob { @@ -539,7 +632,7 @@ fn check_docs() -> NamedJob { "#}) } - named::job( + named::job(use_clang( release_job(&[]) .runs_on(runners::LINUX_LARGE) .add_step(steps::checkout_repo()) @@ -556,7 +649,7 @@ fn check_docs() -> NamedJob { .add_step( lychee_link_check("target/deploy/docs"), // check links in generated html ), - ) + )) } pub(crate) fn check_scripts() -> NamedJob { @@ -567,9 +660,10 @@ pub(crate) fn check_scripts() -> NamedJob { } fn run_actionlint() -> Step { - named::bash(indoc::indoc! {r#" - ${{ steps.get_actionlint.outputs.executable }} -color - "#}) + named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env(( + "ACTIONLINT_BIN", + "${{ steps.get_actionlint.outputs.executable }}", + )) } fn run_shellcheck() -> Step { @@ -594,6 +688,7 @@ pub(crate) fn check_scripts() -> NamedJob { .add_step(run_shellcheck()) .add_step(download_actionlint().id("get_actionlint")) .add_step(run_actionlint()) + .add_step(cache_rust_dependencies_namespace()) .add_step(check_xtask_workflows()), ) } diff --git a/tooling/xtask/src/tasks/workflows/steps.rs b/tooling/xtask/src/tasks/workflows/steps.rs index 8220d8311ff7ee0ee3a955dabacb067701bb8d51..4d17be81322277d0093de5d547bf4f0849e38dc3 100644 --- a/tooling/xtask/src/tasks/workflows/steps.rs +++ b/tooling/xtask/src/tasks/workflows/steps.rs @@ -3,6 +3,11 @@ use serde_json::Value; use crate::tasks::workflows::{runners::Platform, vars, vars::StepOutput}; +pub(crate) fn use_clang(job: Job) -> Job { + job.add_env(Env::new("CC", "clang")) + .add_env(Env::new("CXX", "clang++")) +} + const SCCACHE_R2_BUCKET: &str = "sccache-zed"; const BASH_SHELL: &str = "bash -euxo pipefail {0}"; @@ -498,9 +503,8 @@ pub mod named { } pub fn git_checkout(ref_name: &dyn std::fmt::Display) -> Step { - named::bash(&format!( - "git fetch origin {ref_name} && git checkout {ref_name}" - )) + named::bash(r#"git fetch origin "$REF_NAME" && git checkout "$REF_NAME""#) + .add_env(("REF_NAME", ref_name.to_string())) } pub fn authenticate_as_zippy() -> (Step, StepOutput) { diff --git a/typos.toml b/typos.toml index 6f76cc75d25add39d841c07bbde82f93514adac5..863fea3822d62a51f737c3d7fa87a4c198710cfa 100644 --- a/typos.toml +++ b/typos.toml @@ -4,6 +4,9 @@ ignore-hidden = false extend-exclude = [ ".git/", + # Typewriter model names used for agent branch names aren't typos. + "crates/agent_ui/src/branch_names.rs", + # Contributor names aren't typos. ".mailmap", @@ -42,6 +45,8 @@ extend-exclude = [ "crates/gpui_windows/src/window.rs", # Some typos in the base mdBook CSS. "docs/theme/css/", + # Automatically generated JS. + "docs/theme/c15t@*.js", # Spellcheck triggers on `|Fixe[sd]|` regex part. "script/danger/dangerfile.ts", # Eval examples for prompts and criteria