1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::{Duration, Instant},
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 client_subscriptions: Vec<client::Subscription>,
94 _subscriptions: Vec<gpui::Subscription>,
95 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
96 shared_buffers: HashMap<PeerId, HashSet<u64>>,
97 loading_buffers: HashMap<
98 ProjectPath,
99 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
100 >,
101 loading_local_worktrees:
102 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
103 opened_buffers: HashMap<u64, OpenBuffer>,
104 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
105 nonce: u128,
106 initialized_persistent_state: bool,
107}
108
109#[derive(Error, Debug)]
110pub enum JoinProjectError {
111 #[error("host declined join request")]
112 HostDeclined,
113 #[error("host closed the project")]
114 HostClosedProject,
115 #[error("host went offline")]
116 HostWentOffline,
117 #[error("{0}")]
118 Other(#[from] anyhow::Error),
119}
120
121enum OpenBuffer {
122 Strong(ModelHandle<Buffer>),
123 Weak(WeakModelHandle<Buffer>),
124 Loading(Vec<Operation>),
125}
126
127enum WorktreeHandle {
128 Strong(ModelHandle<Worktree>),
129 Weak(WeakModelHandle<Worktree>),
130}
131
132enum ProjectClientState {
133 Local {
134 is_shared: bool,
135 remote_id_tx: watch::Sender<Option<u64>>,
136 remote_id_rx: watch::Receiver<Option<u64>>,
137 online_tx: watch::Sender<bool>,
138 online_rx: watch::Receiver<bool>,
139 _maintain_remote_id: Task<Option<()>>,
140 _maintain_online_status: Task<Option<()>>,
141 },
142 Remote {
143 sharing_has_stopped: bool,
144 remote_id: u64,
145 replica_id: ReplicaId,
146 _detect_unshare: Task<Option<()>>,
147 },
148}
149
150#[derive(Clone, Debug)]
151pub struct Collaborator {
152 pub user: Arc<User>,
153 pub peer_id: PeerId,
154 pub replica_id: ReplicaId,
155}
156
157#[derive(Clone, Debug, PartialEq, Eq)]
158pub enum Event {
159 ActiveEntryChanged(Option<ProjectEntryId>),
160 WorktreeAdded,
161 WorktreeRemoved(WorktreeId),
162 DiskBasedDiagnosticsStarted {
163 language_server_id: usize,
164 },
165 DiskBasedDiagnosticsFinished {
166 language_server_id: usize,
167 },
168 DiagnosticsUpdated {
169 path: ProjectPath,
170 language_server_id: usize,
171 },
172 RemoteIdChanged(Option<u64>),
173 CollaboratorLeft(PeerId),
174 ContactRequestedJoin(Arc<User>),
175 ContactCancelledJoinRequest(Arc<User>),
176}
177
178#[derive(Serialize)]
179pub struct LanguageServerStatus {
180 pub name: String,
181 pub pending_work: BTreeMap<String, LanguageServerProgress>,
182 pub has_pending_diagnostic_updates: bool,
183 progress_tokens: HashSet<String>,
184}
185
186#[derive(Clone, Debug, Serialize)]
187pub struct LanguageServerProgress {
188 pub message: Option<String>,
189 pub percentage: Option<usize>,
190 #[serde(skip_serializing)]
191 pub last_update_at: Instant,
192}
193
194#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
195pub struct ProjectPath {
196 pub worktree_id: WorktreeId,
197 pub path: Arc<Path>,
198}
199
200#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
201pub struct DiagnosticSummary {
202 pub language_server_id: usize,
203 pub error_count: usize,
204 pub warning_count: usize,
205}
206
207#[derive(Debug, Clone)]
208pub struct Location {
209 pub buffer: ModelHandle<Buffer>,
210 pub range: Range<language::Anchor>,
211}
212
213#[derive(Debug, Clone)]
214pub struct LocationLink {
215 pub origin: Option<Location>,
216 pub target: Location,
217}
218
219#[derive(Debug)]
220pub struct DocumentHighlight {
221 pub range: Range<language::Anchor>,
222 pub kind: DocumentHighlightKind,
223}
224
225#[derive(Clone, Debug)]
226pub struct Symbol {
227 pub source_worktree_id: WorktreeId,
228 pub worktree_id: WorktreeId,
229 pub language_server_name: LanguageServerName,
230 pub path: PathBuf,
231 pub label: CodeLabel,
232 pub name: String,
233 pub kind: lsp::SymbolKind,
234 pub range: Range<PointUtf16>,
235 pub signature: [u8; 32],
236}
237
238#[derive(Clone, Debug, PartialEq)]
239pub struct HoverBlock {
240 pub text: String,
241 pub language: Option<String>,
242}
243
244impl HoverBlock {
245 fn try_new(marked_string: MarkedString) -> Option<Self> {
246 let result = match marked_string {
247 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
248 text: value,
249 language: Some(language),
250 },
251 MarkedString::String(text) => HoverBlock {
252 text,
253 language: None,
254 },
255 };
256 if result.text.is_empty() {
257 None
258 } else {
259 Some(result)
260 }
261 }
262}
263
264#[derive(Debug)]
265pub struct Hover {
266 pub contents: Vec<HoverBlock>,
267 pub range: Option<Range<language::Anchor>>,
268}
269
270#[derive(Default)]
271pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
272
273impl DiagnosticSummary {
274 fn new<'a, T: 'a>(
275 language_server_id: usize,
276 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
277 ) -> Self {
278 let mut this = Self {
279 language_server_id,
280 error_count: 0,
281 warning_count: 0,
282 };
283
284 for entry in diagnostics {
285 if entry.diagnostic.is_primary {
286 match entry.diagnostic.severity {
287 DiagnosticSeverity::ERROR => this.error_count += 1,
288 DiagnosticSeverity::WARNING => this.warning_count += 1,
289 _ => {}
290 }
291 }
292 }
293
294 this
295 }
296
297 pub fn is_empty(&self) -> bool {
298 self.error_count == 0 && self.warning_count == 0
299 }
300
301 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
302 proto::DiagnosticSummary {
303 path: path.to_string_lossy().to_string(),
304 language_server_id: self.language_server_id as u64,
305 error_count: self.error_count as u32,
306 warning_count: self.warning_count as u32,
307 }
308 }
309}
310
311#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
312pub struct ProjectEntryId(usize);
313
314impl ProjectEntryId {
315 pub const MAX: Self = Self(usize::MAX);
316
317 pub fn new(counter: &AtomicUsize) -> Self {
318 Self(counter.fetch_add(1, SeqCst))
319 }
320
321 pub fn from_proto(id: u64) -> Self {
322 Self(id as usize)
323 }
324
325 pub fn to_proto(&self) -> u64 {
326 self.0 as u64
327 }
328
329 pub fn to_usize(&self) -> usize {
330 self.0
331 }
332}
333
334impl Project {
335 pub fn init(client: &Arc<Client>) {
336 client.add_model_message_handler(Self::handle_request_join_project);
337 client.add_model_message_handler(Self::handle_add_collaborator);
338 client.add_model_message_handler(Self::handle_buffer_reloaded);
339 client.add_model_message_handler(Self::handle_buffer_saved);
340 client.add_model_message_handler(Self::handle_start_language_server);
341 client.add_model_message_handler(Self::handle_update_language_server);
342 client.add_model_message_handler(Self::handle_remove_collaborator);
343 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
344 client.add_model_message_handler(Self::handle_update_project);
345 client.add_model_message_handler(Self::handle_unregister_project);
346 client.add_model_message_handler(Self::handle_project_unshared);
347 client.add_model_message_handler(Self::handle_update_buffer_file);
348 client.add_model_message_handler(Self::handle_update_buffer);
349 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
350 client.add_model_message_handler(Self::handle_update_worktree);
351 client.add_model_request_handler(Self::handle_create_project_entry);
352 client.add_model_request_handler(Self::handle_rename_project_entry);
353 client.add_model_request_handler(Self::handle_copy_project_entry);
354 client.add_model_request_handler(Self::handle_delete_project_entry);
355 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
356 client.add_model_request_handler(Self::handle_apply_code_action);
357 client.add_model_request_handler(Self::handle_reload_buffers);
358 client.add_model_request_handler(Self::handle_format_buffers);
359 client.add_model_request_handler(Self::handle_get_code_actions);
360 client.add_model_request_handler(Self::handle_get_completions);
361 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
362 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
363 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
364 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
365 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
366 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
367 client.add_model_request_handler(Self::handle_search_project);
368 client.add_model_request_handler(Self::handle_get_project_symbols);
369 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
370 client.add_model_request_handler(Self::handle_open_buffer_by_id);
371 client.add_model_request_handler(Self::handle_open_buffer_by_path);
372 client.add_model_request_handler(Self::handle_save_buffer);
373 }
374
375 pub fn local(
376 online: bool,
377 client: Arc<Client>,
378 user_store: ModelHandle<UserStore>,
379 project_store: ModelHandle<ProjectStore>,
380 languages: Arc<LanguageRegistry>,
381 fs: Arc<dyn Fs>,
382 cx: &mut MutableAppContext,
383 ) -> ModelHandle<Self> {
384 cx.add_model(|cx: &mut ModelContext<Self>| {
385 let (remote_id_tx, remote_id_rx) = watch::channel();
386 let _maintain_remote_id = cx.spawn_weak({
387 let mut status_rx = client.clone().status();
388 move |this, mut cx| async move {
389 while let Some(status) = status_rx.recv().await {
390 let this = this.upgrade(&cx)?;
391 if status.is_connected() {
392 this.update(&mut cx, |this, cx| this.register(cx))
393 .await
394 .log_err()?;
395 } else {
396 this.update(&mut cx, |this, cx| this.unregister(cx))
397 .await
398 .log_err();
399 }
400 }
401 None
402 }
403 });
404
405 let (online_tx, online_rx) = watch::channel_with(online);
406 let mut send_extension_counts = None;
407 let _maintain_online_status = cx.spawn_weak({
408 let mut online_rx = online_rx.clone();
409 move |this, mut cx| async move {
410 while let Some(online) = online_rx.recv().await {
411 let this = this.upgrade(&cx)?;
412 if online {
413 send_extension_counts = Some(
414 this.update(&mut cx, |this, cx| this.send_extension_counts(cx)),
415 );
416 } else {
417 send_extension_counts.take();
418 }
419
420 this.update(&mut cx, |this, cx| {
421 if !online {
422 this.unshared(cx);
423 }
424 this.metadata_changed(false, cx)
425 });
426 }
427 None
428 }
429 });
430
431 let handle = cx.weak_handle();
432 project_store.update(cx, |store, cx| store.add_project(handle, cx));
433
434 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
435 Self {
436 worktrees: Default::default(),
437 collaborators: Default::default(),
438 opened_buffers: Default::default(),
439 shared_buffers: Default::default(),
440 loading_buffers: Default::default(),
441 loading_local_worktrees: Default::default(),
442 buffer_snapshots: Default::default(),
443 client_state: ProjectClientState::Local {
444 is_shared: false,
445 remote_id_tx,
446 remote_id_rx,
447 online_tx,
448 online_rx,
449 _maintain_remote_id,
450 _maintain_online_status,
451 },
452 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
453 client_subscriptions: Vec::new(),
454 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
455 active_entry: None,
456 languages,
457 client,
458 user_store,
459 project_store,
460 fs,
461 next_entry_id: Default::default(),
462 next_diagnostic_group_id: Default::default(),
463 language_servers: Default::default(),
464 started_language_servers: Default::default(),
465 language_server_statuses: Default::default(),
466 last_workspace_edits_by_language_server: Default::default(),
467 language_server_settings: Default::default(),
468 next_language_server_id: 0,
469 nonce: StdRng::from_entropy().gen(),
470 initialized_persistent_state: false,
471 }
472 })
473 }
474
475 fn send_extension_counts(&self, cx: &mut ModelContext<Self>) -> Task<Option<()>> {
476 cx.spawn_weak(|this, cx| async move {
477 loop {
478 let this = this.upgrade(&cx)?;
479 this.read_with(&cx, |this, cx| {
480 if let Some(project_id) = this.remote_id() {
481 for worktree in this.visible_worktrees(cx) {
482 if let Some(worktree) = worktree.read(cx).as_local() {
483 let mut extensions = Vec::new();
484 let mut counts = Vec::new();
485
486 for (extension, count) in worktree.extension_counts() {
487 extensions.push(extension.to_string_lossy().to_string());
488 counts.push(*count as u32);
489 }
490
491 this.client
492 .send(proto::UpdateWorktreeExtensions {
493 project_id,
494 worktree_id: worktree.id().to_proto(),
495 extensions,
496 counts,
497 })
498 .log_err();
499 }
500 }
501 }
502 });
503
504 cx.background().timer(Duration::from_secs(60 * 5)).await;
505 }
506 })
507 }
508
509 pub async fn remote(
510 remote_id: u64,
511 client: Arc<Client>,
512 user_store: ModelHandle<UserStore>,
513 project_store: ModelHandle<ProjectStore>,
514 languages: Arc<LanguageRegistry>,
515 fs: Arc<dyn Fs>,
516 mut cx: AsyncAppContext,
517 ) -> Result<ModelHandle<Self>, JoinProjectError> {
518 client.authenticate_and_connect(true, &cx).await?;
519
520 let response = client
521 .request(proto::JoinProject {
522 project_id: remote_id,
523 })
524 .await?;
525
526 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
527 proto::join_project_response::Variant::Accept(response) => response,
528 proto::join_project_response::Variant::Decline(decline) => {
529 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
530 Some(proto::join_project_response::decline::Reason::Declined) => {
531 Err(JoinProjectError::HostDeclined)?
532 }
533 Some(proto::join_project_response::decline::Reason::Closed) => {
534 Err(JoinProjectError::HostClosedProject)?
535 }
536 Some(proto::join_project_response::decline::Reason::WentOffline) => {
537 Err(JoinProjectError::HostWentOffline)?
538 }
539 None => Err(anyhow!("missing decline reason"))?,
540 }
541 }
542 };
543
544 let replica_id = response.replica_id as ReplicaId;
545
546 let mut worktrees = Vec::new();
547 for worktree in response.worktrees {
548 let (worktree, load_task) = cx
549 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
550 worktrees.push(worktree);
551 load_task.detach();
552 }
553
554 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
555 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
556 let handle = cx.weak_handle();
557 project_store.update(cx, |store, cx| store.add_project(handle, cx));
558
559 let mut this = Self {
560 worktrees: Vec::new(),
561 loading_buffers: Default::default(),
562 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
563 shared_buffers: Default::default(),
564 loading_local_worktrees: Default::default(),
565 active_entry: None,
566 collaborators: Default::default(),
567 languages,
568 user_store: user_store.clone(),
569 project_store,
570 fs,
571 next_entry_id: Default::default(),
572 next_diagnostic_group_id: Default::default(),
573 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
574 _subscriptions: Default::default(),
575 client: client.clone(),
576 client_state: ProjectClientState::Remote {
577 sharing_has_stopped: false,
578 remote_id,
579 replica_id,
580 _detect_unshare: cx.spawn_weak(move |this, mut cx| {
581 async move {
582 let mut status = client.status();
583 let is_connected =
584 status.next().await.map_or(false, |s| s.is_connected());
585 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
586 if !is_connected || status.next().await.is_some() {
587 if let Some(this) = this.upgrade(&cx) {
588 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
589 }
590 }
591 Ok(())
592 }
593 .log_err()
594 }),
595 },
596 language_servers: Default::default(),
597 started_language_servers: Default::default(),
598 language_server_settings: Default::default(),
599 language_server_statuses: response
600 .language_servers
601 .into_iter()
602 .map(|server| {
603 (
604 server.id as usize,
605 LanguageServerStatus {
606 name: server.name,
607 pending_work: Default::default(),
608 has_pending_diagnostic_updates: false,
609 progress_tokens: Default::default(),
610 },
611 )
612 })
613 .collect(),
614 last_workspace_edits_by_language_server: Default::default(),
615 next_language_server_id: 0,
616 opened_buffers: Default::default(),
617 buffer_snapshots: Default::default(),
618 nonce: StdRng::from_entropy().gen(),
619 initialized_persistent_state: false,
620 };
621 for worktree in worktrees {
622 this.add_worktree(&worktree, cx);
623 }
624 this
625 });
626
627 let user_ids = response
628 .collaborators
629 .iter()
630 .map(|peer| peer.user_id)
631 .collect();
632 user_store
633 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
634 .await?;
635 let mut collaborators = HashMap::default();
636 for message in response.collaborators {
637 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
638 collaborators.insert(collaborator.peer_id, collaborator);
639 }
640
641 this.update(&mut cx, |this, _| {
642 this.collaborators = collaborators;
643 });
644
645 Ok(this)
646 }
647
648 #[cfg(any(test, feature = "test-support"))]
649 pub async fn test(
650 fs: Arc<dyn Fs>,
651 root_paths: impl IntoIterator<Item = &Path>,
652 cx: &mut gpui::TestAppContext,
653 ) -> ModelHandle<Project> {
654 if !cx.read(|cx| cx.has_global::<Settings>()) {
655 cx.update(|cx| cx.set_global(Settings::test(cx)));
656 }
657
658 let languages = Arc::new(LanguageRegistry::test());
659 let http_client = client::test::FakeHttpClient::with_404_response();
660 let client = client::Client::new(http_client.clone());
661 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
662 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
663 let project = cx.update(|cx| {
664 Project::local(true, client, user_store, project_store, languages, fs, cx)
665 });
666 for path in root_paths {
667 let (tree, _) = project
668 .update(cx, |project, cx| {
669 project.find_or_create_local_worktree(path, true, cx)
670 })
671 .await
672 .unwrap();
673 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
674 .await;
675 }
676 project
677 }
678
679 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
680 if self.is_remote() {
681 return Task::ready(Ok(()));
682 }
683
684 let db = self.project_store.read(cx).db.clone();
685 let keys = self.db_keys_for_online_state(cx);
686 let online_by_default = cx.global::<Settings>().projects_online_by_default;
687 let read_online = cx.background().spawn(async move {
688 let values = db.read(keys)?;
689 anyhow::Ok(
690 values
691 .into_iter()
692 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
693 )
694 });
695 cx.spawn(|this, mut cx| async move {
696 let online = read_online.await.log_err().unwrap_or(false);
697 this.update(&mut cx, |this, cx| {
698 this.initialized_persistent_state = true;
699 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
700 let mut online_tx = online_tx.borrow_mut();
701 if *online_tx != online {
702 *online_tx = online;
703 drop(online_tx);
704 this.metadata_changed(false, cx);
705 }
706 }
707 });
708 Ok(())
709 })
710 }
711
712 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
713 if self.is_remote() || !self.initialized_persistent_state {
714 return Task::ready(Ok(()));
715 }
716
717 let db = self.project_store.read(cx).db.clone();
718 let keys = self.db_keys_for_online_state(cx);
719 let is_online = self.is_online();
720 cx.background().spawn(async move {
721 let value = &[is_online as u8];
722 db.write(keys.into_iter().map(|key| (key, value)))
723 })
724 }
725
726 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
727 let settings = cx.global::<Settings>();
728
729 let mut language_servers_to_start = Vec::new();
730 for buffer in self.opened_buffers.values() {
731 if let Some(buffer) = buffer.upgrade(cx) {
732 let buffer = buffer.read(cx);
733 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
734 {
735 if settings.enable_language_server(Some(&language.name())) {
736 let worktree = file.worktree.read(cx);
737 language_servers_to_start.push((
738 worktree.id(),
739 worktree.as_local().unwrap().abs_path().clone(),
740 language.clone(),
741 ));
742 }
743 }
744 }
745 }
746
747 let mut language_servers_to_stop = Vec::new();
748 for language in self.languages.to_vec() {
749 if let Some(lsp_adapter) = language.lsp_adapter() {
750 if !settings.enable_language_server(Some(&language.name())) {
751 let lsp_name = lsp_adapter.name();
752 for (worktree_id, started_lsp_name) in self.started_language_servers.keys() {
753 if lsp_name == *started_lsp_name {
754 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
755 }
756 }
757 }
758 }
759 }
760
761 // Stop all newly-disabled language servers.
762 for (worktree_id, adapter_name) in language_servers_to_stop {
763 self.stop_language_server(worktree_id, adapter_name, cx)
764 .detach();
765 }
766
767 // Start all the newly-enabled language servers.
768 for (worktree_id, worktree_path, language) in language_servers_to_start {
769 self.start_language_server(worktree_id, worktree_path, language, cx);
770 }
771
772 cx.notify();
773 }
774
775 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
776 self.opened_buffers
777 .get(&remote_id)
778 .and_then(|buffer| buffer.upgrade(cx))
779 }
780
781 pub fn languages(&self) -> &Arc<LanguageRegistry> {
782 &self.languages
783 }
784
785 pub fn client(&self) -> Arc<Client> {
786 self.client.clone()
787 }
788
789 pub fn user_store(&self) -> ModelHandle<UserStore> {
790 self.user_store.clone()
791 }
792
793 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
794 self.project_store.clone()
795 }
796
797 #[cfg(any(test, feature = "test-support"))]
798 pub fn check_invariants(&self, cx: &AppContext) {
799 if self.is_local() {
800 let mut worktree_root_paths = HashMap::default();
801 for worktree in self.worktrees(cx) {
802 let worktree = worktree.read(cx);
803 let abs_path = worktree.as_local().unwrap().abs_path().clone();
804 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
805 assert_eq!(
806 prev_worktree_id,
807 None,
808 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
809 abs_path,
810 worktree.id(),
811 prev_worktree_id
812 )
813 }
814 } else {
815 let replica_id = self.replica_id();
816 for buffer in self.opened_buffers.values() {
817 if let Some(buffer) = buffer.upgrade(cx) {
818 let buffer = buffer.read(cx);
819 assert_eq!(
820 buffer.deferred_ops_len(),
821 0,
822 "replica {}, buffer {} has deferred operations",
823 replica_id,
824 buffer.remote_id()
825 );
826 }
827 }
828 }
829 }
830
831 #[cfg(any(test, feature = "test-support"))]
832 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
833 let path = path.into();
834 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
835 self.opened_buffers.iter().any(|(_, buffer)| {
836 if let Some(buffer) = buffer.upgrade(cx) {
837 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
838 if file.worktree == worktree && file.path() == &path.path {
839 return true;
840 }
841 }
842 }
843 false
844 })
845 } else {
846 false
847 }
848 }
849
850 pub fn fs(&self) -> &Arc<dyn Fs> {
851 &self.fs
852 }
853
854 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
855 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
856 let mut online_tx = online_tx.borrow_mut();
857 if *online_tx != online {
858 *online_tx = online;
859 drop(online_tx);
860 self.metadata_changed(true, cx);
861 }
862 }
863 }
864
865 pub fn is_online(&self) -> bool {
866 match &self.client_state {
867 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
868 ProjectClientState::Remote { .. } => true,
869 }
870 }
871
872 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
873 self.unshared(cx);
874 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
875 if let Some(remote_id) = *remote_id_rx.borrow() {
876 let request = self.client.request(proto::UnregisterProject {
877 project_id: remote_id,
878 });
879 return cx.spawn(|this, mut cx| async move {
880 let response = request.await;
881
882 // Unregistering the project causes the server to send out a
883 // contact update removing this project from the host's list
884 // of online projects. Wait until this contact update has been
885 // processed before clearing out this project's remote id, so
886 // that there is no moment where this project appears in the
887 // contact metadata and *also* has no remote id.
888 this.update(&mut cx, |this, cx| {
889 this.user_store()
890 .update(cx, |store, _| store.contact_updates_done())
891 })
892 .await;
893
894 this.update(&mut cx, |this, cx| {
895 if let ProjectClientState::Local { remote_id_tx, .. } =
896 &mut this.client_state
897 {
898 *remote_id_tx.borrow_mut() = None;
899 }
900 this.client_subscriptions.clear();
901 this.metadata_changed(false, cx);
902 });
903 response.map(drop)
904 });
905 }
906 }
907 Task::ready(Ok(()))
908 }
909
910 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
911 if let ProjectClientState::Local {
912 remote_id_rx,
913 online_rx,
914 ..
915 } = &self.client_state
916 {
917 if remote_id_rx.borrow().is_some() {
918 return Task::ready(Ok(()));
919 }
920
921 let response = self.client.request(proto::RegisterProject {
922 online: *online_rx.borrow(),
923 });
924 cx.spawn(|this, mut cx| async move {
925 let remote_id = response.await?.project_id;
926 this.update(&mut cx, |this, cx| {
927 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
928 *remote_id_tx.borrow_mut() = Some(remote_id);
929 }
930
931 this.metadata_changed(false, cx);
932 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
933 this.client_subscriptions
934 .push(this.client.add_model_for_remote_entity(remote_id, cx));
935 Ok(())
936 })
937 })
938 } else {
939 Task::ready(Err(anyhow!("can't register a remote project")))
940 }
941 }
942
943 pub fn remote_id(&self) -> Option<u64> {
944 match &self.client_state {
945 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
946 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
947 }
948 }
949
950 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
951 let mut id = None;
952 let mut watch = None;
953 match &self.client_state {
954 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
955 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
956 }
957
958 async move {
959 if let Some(id) = id {
960 return id;
961 }
962 let mut watch = watch.unwrap();
963 loop {
964 let id = *watch.borrow();
965 if let Some(id) = id {
966 return id;
967 }
968 watch.next().await;
969 }
970 }
971 }
972
973 pub fn shared_remote_id(&self) -> Option<u64> {
974 match &self.client_state {
975 ProjectClientState::Local {
976 remote_id_rx,
977 is_shared,
978 ..
979 } => {
980 if *is_shared {
981 *remote_id_rx.borrow()
982 } else {
983 None
984 }
985 }
986 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
987 }
988 }
989
990 pub fn replica_id(&self) -> ReplicaId {
991 match &self.client_state {
992 ProjectClientState::Local { .. } => 0,
993 ProjectClientState::Remote { replica_id, .. } => *replica_id,
994 }
995 }
996
997 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
998 if let ProjectClientState::Local {
999 remote_id_rx,
1000 online_rx,
1001 ..
1002 } = &self.client_state
1003 {
1004 // Broadcast worktrees only if the project is public.
1005 let worktrees = if *online_rx.borrow() {
1006 self.worktrees
1007 .iter()
1008 .filter_map(|worktree| {
1009 worktree
1010 .upgrade(&cx)
1011 .map(|worktree| worktree.read(cx).as_local().unwrap().metadata_proto())
1012 })
1013 .collect()
1014 } else {
1015 Default::default()
1016 };
1017 if let Some(project_id) = *remote_id_rx.borrow() {
1018 self.client
1019 .send(proto::UpdateProject {
1020 project_id,
1021 worktrees,
1022 online: *online_rx.borrow(),
1023 })
1024 .log_err();
1025 }
1026
1027 self.project_store.update(cx, |_, cx| cx.notify());
1028 if persist {
1029 self.persist_state(cx).detach_and_log_err(cx);
1030 }
1031 cx.notify();
1032 }
1033 }
1034
1035 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
1036 &self.collaborators
1037 }
1038
1039 pub fn worktrees<'a>(
1040 &'a self,
1041 cx: &'a AppContext,
1042 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
1043 self.worktrees
1044 .iter()
1045 .filter_map(move |worktree| worktree.upgrade(cx))
1046 }
1047
1048 pub fn visible_worktrees<'a>(
1049 &'a self,
1050 cx: &'a AppContext,
1051 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
1052 self.worktrees.iter().filter_map(|worktree| {
1053 worktree.upgrade(cx).and_then(|worktree| {
1054 if worktree.read(cx).is_visible() {
1055 Some(worktree)
1056 } else {
1057 None
1058 }
1059 })
1060 })
1061 }
1062
1063 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1064 self.visible_worktrees(cx)
1065 .map(|tree| tree.read(cx).root_name())
1066 }
1067
1068 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
1069 self.worktrees
1070 .iter()
1071 .filter_map(|worktree| {
1072 let worktree = worktree.upgrade(&cx)?.read(cx);
1073 if worktree.is_visible() {
1074 Some(format!(
1075 "project-path-online:{}",
1076 worktree.as_local().unwrap().abs_path().to_string_lossy()
1077 ))
1078 } else {
1079 None
1080 }
1081 })
1082 .collect::<Vec<_>>()
1083 }
1084
1085 pub fn worktree_for_id(
1086 &self,
1087 id: WorktreeId,
1088 cx: &AppContext,
1089 ) -> Option<ModelHandle<Worktree>> {
1090 self.worktrees(cx)
1091 .find(|worktree| worktree.read(cx).id() == id)
1092 }
1093
1094 pub fn worktree_for_entry(
1095 &self,
1096 entry_id: ProjectEntryId,
1097 cx: &AppContext,
1098 ) -> Option<ModelHandle<Worktree>> {
1099 self.worktrees(cx)
1100 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1101 }
1102
1103 pub fn worktree_id_for_entry(
1104 &self,
1105 entry_id: ProjectEntryId,
1106 cx: &AppContext,
1107 ) -> Option<WorktreeId> {
1108 self.worktree_for_entry(entry_id, cx)
1109 .map(|worktree| worktree.read(cx).id())
1110 }
1111
1112 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1113 paths.iter().all(|path| self.contains_path(&path, cx))
1114 }
1115
1116 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1117 for worktree in self.worktrees(cx) {
1118 let worktree = worktree.read(cx).as_local();
1119 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1120 return true;
1121 }
1122 }
1123 false
1124 }
1125
1126 pub fn create_entry(
1127 &mut self,
1128 project_path: impl Into<ProjectPath>,
1129 is_directory: bool,
1130 cx: &mut ModelContext<Self>,
1131 ) -> Option<Task<Result<Entry>>> {
1132 let project_path = project_path.into();
1133 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1134 if self.is_local() {
1135 Some(worktree.update(cx, |worktree, cx| {
1136 worktree
1137 .as_local_mut()
1138 .unwrap()
1139 .create_entry(project_path.path, is_directory, cx)
1140 }))
1141 } else {
1142 let client = self.client.clone();
1143 let project_id = self.remote_id().unwrap();
1144 Some(cx.spawn_weak(|_, mut cx| async move {
1145 let response = client
1146 .request(proto::CreateProjectEntry {
1147 worktree_id: project_path.worktree_id.to_proto(),
1148 project_id,
1149 path: project_path.path.as_os_str().as_bytes().to_vec(),
1150 is_directory,
1151 })
1152 .await?;
1153 let entry = response
1154 .entry
1155 .ok_or_else(|| anyhow!("missing entry in response"))?;
1156 worktree
1157 .update(&mut cx, |worktree, cx| {
1158 worktree.as_remote().unwrap().insert_entry(
1159 entry,
1160 response.worktree_scan_id as usize,
1161 cx,
1162 )
1163 })
1164 .await
1165 }))
1166 }
1167 }
1168
1169 pub fn copy_entry(
1170 &mut self,
1171 entry_id: ProjectEntryId,
1172 new_path: impl Into<Arc<Path>>,
1173 cx: &mut ModelContext<Self>,
1174 ) -> Option<Task<Result<Entry>>> {
1175 let worktree = self.worktree_for_entry(entry_id, cx)?;
1176 let new_path = new_path.into();
1177 if self.is_local() {
1178 worktree.update(cx, |worktree, cx| {
1179 worktree
1180 .as_local_mut()
1181 .unwrap()
1182 .copy_entry(entry_id, new_path, cx)
1183 })
1184 } else {
1185 let client = self.client.clone();
1186 let project_id = self.remote_id().unwrap();
1187
1188 Some(cx.spawn_weak(|_, mut cx| async move {
1189 let response = client
1190 .request(proto::CopyProjectEntry {
1191 project_id,
1192 entry_id: entry_id.to_proto(),
1193 new_path: new_path.as_os_str().as_bytes().to_vec(),
1194 })
1195 .await?;
1196 let entry = response
1197 .entry
1198 .ok_or_else(|| anyhow!("missing entry in response"))?;
1199 worktree
1200 .update(&mut cx, |worktree, cx| {
1201 worktree.as_remote().unwrap().insert_entry(
1202 entry,
1203 response.worktree_scan_id as usize,
1204 cx,
1205 )
1206 })
1207 .await
1208 }))
1209 }
1210 }
1211
1212 pub fn rename_entry(
1213 &mut self,
1214 entry_id: ProjectEntryId,
1215 new_path: impl Into<Arc<Path>>,
1216 cx: &mut ModelContext<Self>,
1217 ) -> Option<Task<Result<Entry>>> {
1218 let worktree = self.worktree_for_entry(entry_id, cx)?;
1219 let new_path = new_path.into();
1220 if self.is_local() {
1221 worktree.update(cx, |worktree, cx| {
1222 worktree
1223 .as_local_mut()
1224 .unwrap()
1225 .rename_entry(entry_id, new_path, cx)
1226 })
1227 } else {
1228 let client = self.client.clone();
1229 let project_id = self.remote_id().unwrap();
1230
1231 Some(cx.spawn_weak(|_, mut cx| async move {
1232 let response = client
1233 .request(proto::RenameProjectEntry {
1234 project_id,
1235 entry_id: entry_id.to_proto(),
1236 new_path: new_path.as_os_str().as_bytes().to_vec(),
1237 })
1238 .await?;
1239 let entry = response
1240 .entry
1241 .ok_or_else(|| anyhow!("missing entry in response"))?;
1242 worktree
1243 .update(&mut cx, |worktree, cx| {
1244 worktree.as_remote().unwrap().insert_entry(
1245 entry,
1246 response.worktree_scan_id as usize,
1247 cx,
1248 )
1249 })
1250 .await
1251 }))
1252 }
1253 }
1254
1255 pub fn delete_entry(
1256 &mut self,
1257 entry_id: ProjectEntryId,
1258 cx: &mut ModelContext<Self>,
1259 ) -> Option<Task<Result<()>>> {
1260 let worktree = self.worktree_for_entry(entry_id, cx)?;
1261 if self.is_local() {
1262 worktree.update(cx, |worktree, cx| {
1263 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1264 })
1265 } else {
1266 let client = self.client.clone();
1267 let project_id = self.remote_id().unwrap();
1268 Some(cx.spawn_weak(|_, mut cx| async move {
1269 let response = client
1270 .request(proto::DeleteProjectEntry {
1271 project_id,
1272 entry_id: entry_id.to_proto(),
1273 })
1274 .await?;
1275 worktree
1276 .update(&mut cx, move |worktree, cx| {
1277 worktree.as_remote().unwrap().delete_entry(
1278 entry_id,
1279 response.worktree_scan_id as usize,
1280 cx,
1281 )
1282 })
1283 .await
1284 }))
1285 }
1286 }
1287
1288 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1289 if !self.is_online() {
1290 return Task::ready(Err(anyhow!("can't share an offline project")));
1291 }
1292
1293 let project_id;
1294 if let ProjectClientState::Local {
1295 remote_id_rx,
1296 is_shared,
1297 ..
1298 } = &mut self.client_state
1299 {
1300 if *is_shared {
1301 return Task::ready(Ok(()));
1302 }
1303 *is_shared = true;
1304 if let Some(id) = *remote_id_rx.borrow() {
1305 project_id = id;
1306 } else {
1307 return Task::ready(Err(anyhow!("project hasn't been registered")));
1308 }
1309 } else {
1310 return Task::ready(Err(anyhow!("can't share a remote project")));
1311 };
1312
1313 for open_buffer in self.opened_buffers.values_mut() {
1314 match open_buffer {
1315 OpenBuffer::Strong(_) => {}
1316 OpenBuffer::Weak(buffer) => {
1317 if let Some(buffer) = buffer.upgrade(cx) {
1318 *open_buffer = OpenBuffer::Strong(buffer);
1319 }
1320 }
1321 OpenBuffer::Loading(_) => unreachable!(),
1322 }
1323 }
1324
1325 for worktree_handle in self.worktrees.iter_mut() {
1326 match worktree_handle {
1327 WorktreeHandle::Strong(_) => {}
1328 WorktreeHandle::Weak(worktree) => {
1329 if let Some(worktree) = worktree.upgrade(cx) {
1330 *worktree_handle = WorktreeHandle::Strong(worktree);
1331 }
1332 }
1333 }
1334 }
1335
1336 let mut tasks = Vec::new();
1337 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1338 worktree.update(cx, |worktree, cx| {
1339 let worktree = worktree.as_local_mut().unwrap();
1340 tasks.push(worktree.share(project_id, cx));
1341 });
1342 }
1343
1344 for (server_id, status) in &self.language_server_statuses {
1345 self.client
1346 .send(proto::StartLanguageServer {
1347 project_id,
1348 server: Some(proto::LanguageServer {
1349 id: *server_id as u64,
1350 name: status.name.clone(),
1351 }),
1352 })
1353 .log_err();
1354 }
1355
1356 cx.spawn(|this, mut cx| async move {
1357 for task in tasks {
1358 task.await?;
1359 }
1360 this.update(&mut cx, |_, cx| cx.notify());
1361 Ok(())
1362 })
1363 }
1364
1365 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1366 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1367 if !*is_shared {
1368 return;
1369 }
1370
1371 *is_shared = false;
1372 self.collaborators.clear();
1373 self.shared_buffers.clear();
1374 for worktree_handle in self.worktrees.iter_mut() {
1375 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1376 let is_visible = worktree.update(cx, |worktree, _| {
1377 worktree.as_local_mut().unwrap().unshare();
1378 worktree.is_visible()
1379 });
1380 if !is_visible {
1381 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1382 }
1383 }
1384 }
1385
1386 for open_buffer in self.opened_buffers.values_mut() {
1387 match open_buffer {
1388 OpenBuffer::Strong(buffer) => {
1389 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1390 }
1391 _ => {}
1392 }
1393 }
1394
1395 cx.notify();
1396 } else {
1397 log::error!("attempted to unshare a remote project");
1398 }
1399 }
1400
1401 pub fn respond_to_join_request(
1402 &mut self,
1403 requester_id: u64,
1404 allow: bool,
1405 cx: &mut ModelContext<Self>,
1406 ) {
1407 if let Some(project_id) = self.remote_id() {
1408 let share = if self.is_online() && allow {
1409 Some(self.share(cx))
1410 } else {
1411 None
1412 };
1413 let client = self.client.clone();
1414 cx.foreground()
1415 .spawn(async move {
1416 if let Some(share) = share {
1417 share.await?;
1418 }
1419 client.send(proto::RespondToJoinProjectRequest {
1420 requester_id,
1421 project_id,
1422 allow,
1423 })
1424 })
1425 .detach_and_log_err(cx);
1426 }
1427 }
1428
1429 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1430 if let ProjectClientState::Remote {
1431 sharing_has_stopped,
1432 ..
1433 } = &mut self.client_state
1434 {
1435 *sharing_has_stopped = true;
1436 self.collaborators.clear();
1437 for worktree in &self.worktrees {
1438 if let Some(worktree) = worktree.upgrade(cx) {
1439 worktree.update(cx, |worktree, _| {
1440 if let Some(worktree) = worktree.as_remote_mut() {
1441 worktree.disconnected_from_host();
1442 }
1443 });
1444 }
1445 }
1446 cx.notify();
1447 }
1448 }
1449
1450 pub fn is_read_only(&self) -> bool {
1451 match &self.client_state {
1452 ProjectClientState::Local { .. } => false,
1453 ProjectClientState::Remote {
1454 sharing_has_stopped,
1455 ..
1456 } => *sharing_has_stopped,
1457 }
1458 }
1459
1460 pub fn is_local(&self) -> bool {
1461 match &self.client_state {
1462 ProjectClientState::Local { .. } => true,
1463 ProjectClientState::Remote { .. } => false,
1464 }
1465 }
1466
1467 pub fn is_remote(&self) -> bool {
1468 !self.is_local()
1469 }
1470
1471 pub fn create_buffer(
1472 &mut self,
1473 text: &str,
1474 language: Option<Arc<Language>>,
1475 cx: &mut ModelContext<Self>,
1476 ) -> Result<ModelHandle<Buffer>> {
1477 if self.is_remote() {
1478 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1479 }
1480
1481 let buffer = cx.add_model(|cx| {
1482 Buffer::new(self.replica_id(), text, cx)
1483 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1484 });
1485 self.register_buffer(&buffer, cx)?;
1486 Ok(buffer)
1487 }
1488
1489 pub fn open_path(
1490 &mut self,
1491 path: impl Into<ProjectPath>,
1492 cx: &mut ModelContext<Self>,
1493 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1494 let task = self.open_buffer(path, cx);
1495 cx.spawn_weak(|_, cx| async move {
1496 let buffer = task.await?;
1497 let project_entry_id = buffer
1498 .read_with(&cx, |buffer, cx| {
1499 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1500 })
1501 .ok_or_else(|| anyhow!("no project entry"))?;
1502 Ok((project_entry_id, buffer.into()))
1503 })
1504 }
1505
1506 pub fn open_local_buffer(
1507 &mut self,
1508 abs_path: impl AsRef<Path>,
1509 cx: &mut ModelContext<Self>,
1510 ) -> Task<Result<ModelHandle<Buffer>>> {
1511 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1512 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1513 } else {
1514 Task::ready(Err(anyhow!("no such path")))
1515 }
1516 }
1517
1518 pub fn open_buffer(
1519 &mut self,
1520 path: impl Into<ProjectPath>,
1521 cx: &mut ModelContext<Self>,
1522 ) -> Task<Result<ModelHandle<Buffer>>> {
1523 let project_path = path.into();
1524 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1525 worktree
1526 } else {
1527 return Task::ready(Err(anyhow!("no such worktree")));
1528 };
1529
1530 // If there is already a buffer for the given path, then return it.
1531 let existing_buffer = self.get_open_buffer(&project_path, cx);
1532 if let Some(existing_buffer) = existing_buffer {
1533 return Task::ready(Ok(existing_buffer));
1534 }
1535
1536 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1537 // If the given path is already being loaded, then wait for that existing
1538 // task to complete and return the same buffer.
1539 hash_map::Entry::Occupied(e) => e.get().clone(),
1540
1541 // Otherwise, record the fact that this path is now being loaded.
1542 hash_map::Entry::Vacant(entry) => {
1543 let (mut tx, rx) = postage::watch::channel();
1544 entry.insert(rx.clone());
1545
1546 let load_buffer = if worktree.read(cx).is_local() {
1547 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1548 } else {
1549 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1550 };
1551
1552 cx.spawn(move |this, mut cx| async move {
1553 let load_result = load_buffer.await;
1554 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1555 // Record the fact that the buffer is no longer loading.
1556 this.loading_buffers.remove(&project_path);
1557 let buffer = load_result.map_err(Arc::new)?;
1558 Ok(buffer)
1559 }));
1560 })
1561 .detach();
1562 rx
1563 }
1564 };
1565
1566 cx.foreground().spawn(async move {
1567 loop {
1568 if let Some(result) = loading_watch.borrow().as_ref() {
1569 match result {
1570 Ok(buffer) => return Ok(buffer.clone()),
1571 Err(error) => return Err(anyhow!("{}", error)),
1572 }
1573 }
1574 loading_watch.next().await;
1575 }
1576 })
1577 }
1578
1579 fn open_local_buffer_internal(
1580 &mut self,
1581 path: &Arc<Path>,
1582 worktree: &ModelHandle<Worktree>,
1583 cx: &mut ModelContext<Self>,
1584 ) -> Task<Result<ModelHandle<Buffer>>> {
1585 let load_buffer = worktree.update(cx, |worktree, cx| {
1586 let worktree = worktree.as_local_mut().unwrap();
1587 worktree.load_buffer(path, cx)
1588 });
1589 cx.spawn(|this, mut cx| async move {
1590 let buffer = load_buffer.await?;
1591 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1592 Ok(buffer)
1593 })
1594 }
1595
1596 fn open_remote_buffer_internal(
1597 &mut self,
1598 path: &Arc<Path>,
1599 worktree: &ModelHandle<Worktree>,
1600 cx: &mut ModelContext<Self>,
1601 ) -> Task<Result<ModelHandle<Buffer>>> {
1602 let rpc = self.client.clone();
1603 let project_id = self.remote_id().unwrap();
1604 let remote_worktree_id = worktree.read(cx).id();
1605 let path = path.clone();
1606 let path_string = path.to_string_lossy().to_string();
1607 cx.spawn(|this, mut cx| async move {
1608 let response = rpc
1609 .request(proto::OpenBufferByPath {
1610 project_id,
1611 worktree_id: remote_worktree_id.to_proto(),
1612 path: path_string,
1613 })
1614 .await?;
1615 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1616 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1617 .await
1618 })
1619 }
1620
1621 fn open_local_buffer_via_lsp(
1622 &mut self,
1623 abs_path: lsp::Url,
1624 lsp_adapter: Arc<dyn LspAdapter>,
1625 lsp_server: Arc<LanguageServer>,
1626 cx: &mut ModelContext<Self>,
1627 ) -> Task<Result<ModelHandle<Buffer>>> {
1628 cx.spawn(|this, mut cx| async move {
1629 let abs_path = abs_path
1630 .to_file_path()
1631 .map_err(|_| anyhow!("can't convert URI to path"))?;
1632 let (worktree, relative_path) = if let Some(result) =
1633 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1634 {
1635 result
1636 } else {
1637 let worktree = this
1638 .update(&mut cx, |this, cx| {
1639 this.create_local_worktree(&abs_path, false, cx)
1640 })
1641 .await?;
1642 this.update(&mut cx, |this, cx| {
1643 this.language_servers.insert(
1644 (worktree.read(cx).id(), lsp_adapter.name()),
1645 (lsp_adapter, lsp_server),
1646 );
1647 });
1648 (worktree, PathBuf::new())
1649 };
1650
1651 let project_path = ProjectPath {
1652 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1653 path: relative_path.into(),
1654 };
1655 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1656 .await
1657 })
1658 }
1659
1660 pub fn open_buffer_by_id(
1661 &mut self,
1662 id: u64,
1663 cx: &mut ModelContext<Self>,
1664 ) -> Task<Result<ModelHandle<Buffer>>> {
1665 if let Some(buffer) = self.buffer_for_id(id, cx) {
1666 Task::ready(Ok(buffer))
1667 } else if self.is_local() {
1668 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1669 } else if let Some(project_id) = self.remote_id() {
1670 let request = self
1671 .client
1672 .request(proto::OpenBufferById { project_id, id });
1673 cx.spawn(|this, mut cx| async move {
1674 let buffer = request
1675 .await?
1676 .buffer
1677 .ok_or_else(|| anyhow!("invalid buffer"))?;
1678 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1679 .await
1680 })
1681 } else {
1682 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1683 }
1684 }
1685
1686 pub fn save_buffer_as(
1687 &mut self,
1688 buffer: ModelHandle<Buffer>,
1689 abs_path: PathBuf,
1690 cx: &mut ModelContext<Project>,
1691 ) -> Task<Result<()>> {
1692 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1693 let old_path =
1694 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1695 cx.spawn(|this, mut cx| async move {
1696 if let Some(old_path) = old_path {
1697 this.update(&mut cx, |this, cx| {
1698 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1699 });
1700 }
1701 let (worktree, path) = worktree_task.await?;
1702 worktree
1703 .update(&mut cx, |worktree, cx| {
1704 worktree
1705 .as_local_mut()
1706 .unwrap()
1707 .save_buffer_as(buffer.clone(), path, cx)
1708 })
1709 .await?;
1710 this.update(&mut cx, |this, cx| {
1711 this.assign_language_to_buffer(&buffer, cx);
1712 this.register_buffer_with_language_server(&buffer, cx);
1713 });
1714 Ok(())
1715 })
1716 }
1717
1718 pub fn get_open_buffer(
1719 &mut self,
1720 path: &ProjectPath,
1721 cx: &mut ModelContext<Self>,
1722 ) -> Option<ModelHandle<Buffer>> {
1723 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1724 self.opened_buffers.values().find_map(|buffer| {
1725 let buffer = buffer.upgrade(cx)?;
1726 let file = File::from_dyn(buffer.read(cx).file())?;
1727 if file.worktree == worktree && file.path() == &path.path {
1728 Some(buffer)
1729 } else {
1730 None
1731 }
1732 })
1733 }
1734
1735 fn register_buffer(
1736 &mut self,
1737 buffer: &ModelHandle<Buffer>,
1738 cx: &mut ModelContext<Self>,
1739 ) -> Result<()> {
1740 let remote_id = buffer.read(cx).remote_id();
1741 let open_buffer = if self.is_remote() || self.is_shared() {
1742 OpenBuffer::Strong(buffer.clone())
1743 } else {
1744 OpenBuffer::Weak(buffer.downgrade())
1745 };
1746
1747 match self.opened_buffers.insert(remote_id, open_buffer) {
1748 None => {}
1749 Some(OpenBuffer::Loading(operations)) => {
1750 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1751 }
1752 Some(OpenBuffer::Weak(existing_handle)) => {
1753 if existing_handle.upgrade(cx).is_some() {
1754 Err(anyhow!(
1755 "already registered buffer with remote id {}",
1756 remote_id
1757 ))?
1758 }
1759 }
1760 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1761 "already registered buffer with remote id {}",
1762 remote_id
1763 ))?,
1764 }
1765 cx.subscribe(buffer, |this, buffer, event, cx| {
1766 this.on_buffer_event(buffer, event, cx);
1767 })
1768 .detach();
1769
1770 self.assign_language_to_buffer(buffer, cx);
1771 self.register_buffer_with_language_server(buffer, cx);
1772 cx.observe_release(buffer, |this, buffer, cx| {
1773 if let Some(file) = File::from_dyn(buffer.file()) {
1774 if file.is_local() {
1775 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1776 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1777 server
1778 .notify::<lsp::notification::DidCloseTextDocument>(
1779 lsp::DidCloseTextDocumentParams {
1780 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1781 },
1782 )
1783 .log_err();
1784 }
1785 }
1786 }
1787 })
1788 .detach();
1789
1790 Ok(())
1791 }
1792
1793 fn register_buffer_with_language_server(
1794 &mut self,
1795 buffer_handle: &ModelHandle<Buffer>,
1796 cx: &mut ModelContext<Self>,
1797 ) {
1798 let buffer = buffer_handle.read(cx);
1799 let buffer_id = buffer.remote_id();
1800 if let Some(file) = File::from_dyn(buffer.file()) {
1801 if file.is_local() {
1802 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1803 let initial_snapshot = buffer.text_snapshot();
1804
1805 let mut language_server = None;
1806 let mut language_id = None;
1807 if let Some(language) = buffer.language() {
1808 let worktree_id = file.worktree_id(cx);
1809 if let Some(adapter) = language.lsp_adapter() {
1810 language_id = adapter.id_for_language(language.name().as_ref());
1811 language_server = self
1812 .language_servers
1813 .get(&(worktree_id, adapter.name()))
1814 .cloned();
1815 }
1816 }
1817
1818 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1819 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1820 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1821 .log_err();
1822 }
1823 }
1824
1825 if let Some((_, server)) = language_server {
1826 server
1827 .notify::<lsp::notification::DidOpenTextDocument>(
1828 lsp::DidOpenTextDocumentParams {
1829 text_document: lsp::TextDocumentItem::new(
1830 uri,
1831 language_id.unwrap_or_default(),
1832 0,
1833 initial_snapshot.text(),
1834 ),
1835 }
1836 .clone(),
1837 )
1838 .log_err();
1839 buffer_handle.update(cx, |buffer, cx| {
1840 buffer.set_completion_triggers(
1841 server
1842 .capabilities()
1843 .completion_provider
1844 .as_ref()
1845 .and_then(|provider| provider.trigger_characters.clone())
1846 .unwrap_or(Vec::new()),
1847 cx,
1848 )
1849 });
1850 self.buffer_snapshots
1851 .insert(buffer_id, vec![(0, initial_snapshot)]);
1852 }
1853 }
1854 }
1855 }
1856
1857 fn unregister_buffer_from_language_server(
1858 &mut self,
1859 buffer: &ModelHandle<Buffer>,
1860 old_path: PathBuf,
1861 cx: &mut ModelContext<Self>,
1862 ) {
1863 buffer.update(cx, |buffer, cx| {
1864 buffer.update_diagnostics(Default::default(), cx);
1865 self.buffer_snapshots.remove(&buffer.remote_id());
1866 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1867 language_server
1868 .notify::<lsp::notification::DidCloseTextDocument>(
1869 lsp::DidCloseTextDocumentParams {
1870 text_document: lsp::TextDocumentIdentifier::new(
1871 lsp::Url::from_file_path(old_path).unwrap(),
1872 ),
1873 },
1874 )
1875 .log_err();
1876 }
1877 });
1878 }
1879
1880 fn on_buffer_event(
1881 &mut self,
1882 buffer: ModelHandle<Buffer>,
1883 event: &BufferEvent,
1884 cx: &mut ModelContext<Self>,
1885 ) -> Option<()> {
1886 match event {
1887 BufferEvent::Operation(operation) => {
1888 if let Some(project_id) = self.shared_remote_id() {
1889 let request = self.client.request(proto::UpdateBuffer {
1890 project_id,
1891 buffer_id: buffer.read(cx).remote_id(),
1892 operations: vec![language::proto::serialize_operation(&operation)],
1893 });
1894 cx.background().spawn(request).detach_and_log_err(cx);
1895 } else if let Some(project_id) = self.remote_id() {
1896 let _ = self
1897 .client
1898 .send(proto::RegisterProjectActivity { project_id });
1899 }
1900 }
1901 BufferEvent::Edited { .. } => {
1902 let (_, language_server) = self
1903 .language_server_for_buffer(buffer.read(cx), cx)?
1904 .clone();
1905 let buffer = buffer.read(cx);
1906 let file = File::from_dyn(buffer.file())?;
1907 let abs_path = file.as_local()?.abs_path(cx);
1908 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1909 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1910 let (version, prev_snapshot) = buffer_snapshots.last()?;
1911 let next_snapshot = buffer.text_snapshot();
1912 let next_version = version + 1;
1913
1914 let content_changes = buffer
1915 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1916 .map(|edit| {
1917 let edit_start = edit.new.start.0;
1918 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1919 let new_text = next_snapshot
1920 .text_for_range(edit.new.start.1..edit.new.end.1)
1921 .collect();
1922 lsp::TextDocumentContentChangeEvent {
1923 range: Some(lsp::Range::new(
1924 point_to_lsp(edit_start),
1925 point_to_lsp(edit_end),
1926 )),
1927 range_length: None,
1928 text: new_text,
1929 }
1930 })
1931 .collect();
1932
1933 buffer_snapshots.push((next_version, next_snapshot));
1934
1935 language_server
1936 .notify::<lsp::notification::DidChangeTextDocument>(
1937 lsp::DidChangeTextDocumentParams {
1938 text_document: lsp::VersionedTextDocumentIdentifier::new(
1939 uri,
1940 next_version,
1941 ),
1942 content_changes,
1943 },
1944 )
1945 .log_err();
1946 }
1947 BufferEvent::Saved => {
1948 let file = File::from_dyn(buffer.read(cx).file())?;
1949 let worktree_id = file.worktree_id(cx);
1950 let abs_path = file.as_local()?.abs_path(cx);
1951 let text_document = lsp::TextDocumentIdentifier {
1952 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1953 };
1954
1955 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1956 server
1957 .notify::<lsp::notification::DidSaveTextDocument>(
1958 lsp::DidSaveTextDocumentParams {
1959 text_document: text_document.clone(),
1960 text: None,
1961 },
1962 )
1963 .log_err();
1964 }
1965
1966 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1967 // that don't support a disk-based progress token.
1968 let (lsp_adapter, language_server) =
1969 self.language_server_for_buffer(buffer.read(cx), cx)?;
1970 if lsp_adapter
1971 .disk_based_diagnostics_progress_token()
1972 .is_none()
1973 {
1974 let server_id = language_server.server_id();
1975 self.disk_based_diagnostics_finished(server_id, cx);
1976 self.broadcast_language_server_update(
1977 server_id,
1978 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1979 proto::LspDiskBasedDiagnosticsUpdated {},
1980 ),
1981 );
1982 }
1983 }
1984 _ => {}
1985 }
1986
1987 None
1988 }
1989
1990 fn language_servers_for_worktree(
1991 &self,
1992 worktree_id: WorktreeId,
1993 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1994 self.language_servers.iter().filter_map(
1995 move |((language_server_worktree_id, _), server)| {
1996 if *language_server_worktree_id == worktree_id {
1997 Some(server)
1998 } else {
1999 None
2000 }
2001 },
2002 )
2003 }
2004
2005 fn assign_language_to_buffer(
2006 &mut self,
2007 buffer: &ModelHandle<Buffer>,
2008 cx: &mut ModelContext<Self>,
2009 ) -> Option<()> {
2010 // If the buffer has a language, set it and start the language server if we haven't already.
2011 let full_path = buffer.read(cx).file()?.full_path(cx);
2012 let language = self.languages.select_language(&full_path)?;
2013 buffer.update(cx, |buffer, cx| {
2014 buffer.set_language(Some(language.clone()), cx);
2015 });
2016
2017 let file = File::from_dyn(buffer.read(cx).file())?;
2018 let worktree = file.worktree.read(cx).as_local()?;
2019 let worktree_id = worktree.id();
2020 let worktree_abs_path = worktree.abs_path().clone();
2021 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
2022
2023 None
2024 }
2025
2026 fn start_language_server(
2027 &mut self,
2028 worktree_id: WorktreeId,
2029 worktree_path: Arc<Path>,
2030 language: Arc<Language>,
2031 cx: &mut ModelContext<Self>,
2032 ) {
2033 if !cx
2034 .global::<Settings>()
2035 .enable_language_server(Some(&language.name()))
2036 {
2037 return;
2038 }
2039
2040 let adapter = if let Some(adapter) = language.lsp_adapter() {
2041 adapter
2042 } else {
2043 return;
2044 };
2045 let key = (worktree_id, adapter.name());
2046 self.started_language_servers
2047 .entry(key.clone())
2048 .or_insert_with(|| {
2049 let server_id = post_inc(&mut self.next_language_server_id);
2050 let language_server = self.languages.start_language_server(
2051 server_id,
2052 language.clone(),
2053 worktree_path,
2054 self.client.http_client(),
2055 cx,
2056 );
2057 cx.spawn_weak(|this, mut cx| async move {
2058 let language_server = language_server?.await.log_err()?;
2059 let language_server = language_server
2060 .initialize(adapter.initialization_options())
2061 .await
2062 .log_err()?;
2063 let this = this.upgrade(&cx)?;
2064 let disk_based_diagnostics_progress_token =
2065 adapter.disk_based_diagnostics_progress_token();
2066
2067 language_server
2068 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2069 let this = this.downgrade();
2070 let adapter = adapter.clone();
2071 move |params, mut cx| {
2072 if let Some(this) = this.upgrade(&cx) {
2073 this.update(&mut cx, |this, cx| {
2074 this.on_lsp_diagnostics_published(
2075 server_id, params, &adapter, cx,
2076 );
2077 });
2078 }
2079 }
2080 })
2081 .detach();
2082
2083 language_server
2084 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2085 let settings = this
2086 .read_with(&cx, |this, _| this.language_server_settings.clone());
2087 move |params, _| {
2088 let settings = settings.lock().clone();
2089 async move {
2090 Ok(params
2091 .items
2092 .into_iter()
2093 .map(|item| {
2094 if let Some(section) = &item.section {
2095 settings
2096 .get(section)
2097 .cloned()
2098 .unwrap_or(serde_json::Value::Null)
2099 } else {
2100 settings.clone()
2101 }
2102 })
2103 .collect())
2104 }
2105 }
2106 })
2107 .detach();
2108
2109 // Even though we don't have handling for these requests, respond to them to
2110 // avoid stalling any language server like `gopls` which waits for a response
2111 // to these requests when initializing.
2112 language_server
2113 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
2114 let this = this.downgrade();
2115 move |params, mut cx| async move {
2116 if let Some(this) = this.upgrade(&cx) {
2117 this.update(&mut cx, |this, _| {
2118 if let Some(status) =
2119 this.language_server_statuses.get_mut(&server_id)
2120 {
2121 if let lsp::NumberOrString::String(token) = params.token
2122 {
2123 status.progress_tokens.insert(token);
2124 }
2125 }
2126 });
2127 }
2128 Ok(())
2129 }
2130 })
2131 .detach();
2132 language_server
2133 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2134 Ok(())
2135 })
2136 .detach();
2137
2138 language_server
2139 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2140 let this = this.downgrade();
2141 let adapter = adapter.clone();
2142 let language_server = language_server.clone();
2143 move |params, cx| {
2144 Self::on_lsp_workspace_edit(
2145 this,
2146 params,
2147 server_id,
2148 adapter.clone(),
2149 language_server.clone(),
2150 cx,
2151 )
2152 }
2153 })
2154 .detach();
2155
2156 language_server
2157 .on_notification::<lsp::notification::Progress, _>({
2158 let this = this.downgrade();
2159 move |params, mut cx| {
2160 if let Some(this) = this.upgrade(&cx) {
2161 this.update(&mut cx, |this, cx| {
2162 this.on_lsp_progress(
2163 params,
2164 server_id,
2165 disk_based_diagnostics_progress_token,
2166 cx,
2167 );
2168 });
2169 }
2170 }
2171 })
2172 .detach();
2173
2174 this.update(&mut cx, |this, cx| {
2175 this.language_servers
2176 .insert(key.clone(), (adapter.clone(), language_server.clone()));
2177 this.language_server_statuses.insert(
2178 server_id,
2179 LanguageServerStatus {
2180 name: language_server.name().to_string(),
2181 pending_work: Default::default(),
2182 has_pending_diagnostic_updates: false,
2183 progress_tokens: Default::default(),
2184 },
2185 );
2186 language_server
2187 .notify::<lsp::notification::DidChangeConfiguration>(
2188 lsp::DidChangeConfigurationParams {
2189 settings: this.language_server_settings.lock().clone(),
2190 },
2191 )
2192 .ok();
2193
2194 if let Some(project_id) = this.shared_remote_id() {
2195 this.client
2196 .send(proto::StartLanguageServer {
2197 project_id,
2198 server: Some(proto::LanguageServer {
2199 id: server_id as u64,
2200 name: language_server.name().to_string(),
2201 }),
2202 })
2203 .log_err();
2204 }
2205
2206 // Tell the language server about every open buffer in the worktree that matches the language.
2207 for buffer in this.opened_buffers.values() {
2208 if let Some(buffer_handle) = buffer.upgrade(cx) {
2209 let buffer = buffer_handle.read(cx);
2210 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2211 file
2212 } else {
2213 continue;
2214 };
2215 let language = if let Some(language) = buffer.language() {
2216 language
2217 } else {
2218 continue;
2219 };
2220 if file.worktree.read(cx).id() != key.0
2221 || language.lsp_adapter().map(|a| a.name())
2222 != Some(key.1.clone())
2223 {
2224 continue;
2225 }
2226
2227 let file = file.as_local()?;
2228 let versions = this
2229 .buffer_snapshots
2230 .entry(buffer.remote_id())
2231 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2232 let (version, initial_snapshot) = versions.last().unwrap();
2233 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2234 let language_id = adapter.id_for_language(language.name().as_ref());
2235 language_server
2236 .notify::<lsp::notification::DidOpenTextDocument>(
2237 lsp::DidOpenTextDocumentParams {
2238 text_document: lsp::TextDocumentItem::new(
2239 uri,
2240 language_id.unwrap_or_default(),
2241 *version,
2242 initial_snapshot.text(),
2243 ),
2244 },
2245 )
2246 .log_err()?;
2247 buffer_handle.update(cx, |buffer, cx| {
2248 buffer.set_completion_triggers(
2249 language_server
2250 .capabilities()
2251 .completion_provider
2252 .as_ref()
2253 .and_then(|provider| {
2254 provider.trigger_characters.clone()
2255 })
2256 .unwrap_or(Vec::new()),
2257 cx,
2258 )
2259 });
2260 }
2261 }
2262
2263 cx.notify();
2264 Some(())
2265 });
2266
2267 Some(language_server)
2268 })
2269 });
2270 }
2271
2272 fn stop_language_server(
2273 &mut self,
2274 worktree_id: WorktreeId,
2275 adapter_name: LanguageServerName,
2276 cx: &mut ModelContext<Self>,
2277 ) -> Task<()> {
2278 let key = (worktree_id, adapter_name);
2279 if let Some((_, language_server)) = self.language_servers.remove(&key) {
2280 self.language_server_statuses
2281 .remove(&language_server.server_id());
2282 cx.notify();
2283 }
2284
2285 if let Some(started_language_server) = self.started_language_servers.remove(&key) {
2286 cx.spawn_weak(|this, mut cx| async move {
2287 if let Some(language_server) = started_language_server.await {
2288 if let Some(shutdown) = language_server.shutdown() {
2289 shutdown.await;
2290 }
2291
2292 if let Some(this) = this.upgrade(&cx) {
2293 this.update(&mut cx, |this, cx| {
2294 this.language_server_statuses
2295 .remove(&language_server.server_id());
2296 cx.notify();
2297 });
2298 }
2299 }
2300 })
2301 } else {
2302 Task::ready(())
2303 }
2304 }
2305
2306 pub fn restart_language_servers_for_buffers(
2307 &mut self,
2308 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2309 cx: &mut ModelContext<Self>,
2310 ) -> Option<()> {
2311 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2312 .into_iter()
2313 .filter_map(|buffer| {
2314 let file = File::from_dyn(buffer.read(cx).file())?;
2315 let worktree = file.worktree.read(cx).as_local()?;
2316 let worktree_id = worktree.id();
2317 let worktree_abs_path = worktree.abs_path().clone();
2318 let full_path = file.full_path(cx);
2319 Some((worktree_id, worktree_abs_path, full_path))
2320 })
2321 .collect();
2322 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2323 let language = self.languages.select_language(&full_path)?;
2324 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2325 }
2326
2327 None
2328 }
2329
2330 fn restart_language_server(
2331 &mut self,
2332 worktree_id: WorktreeId,
2333 worktree_path: Arc<Path>,
2334 language: Arc<Language>,
2335 cx: &mut ModelContext<Self>,
2336 ) {
2337 let adapter = if let Some(adapter) = language.lsp_adapter() {
2338 adapter
2339 } else {
2340 return;
2341 };
2342
2343 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2344 cx.spawn_weak(|this, mut cx| async move {
2345 stop.await;
2346 if let Some(this) = this.upgrade(&cx) {
2347 this.update(&mut cx, |this, cx| {
2348 this.start_language_server(worktree_id, worktree_path, language, cx);
2349 });
2350 }
2351 })
2352 .detach();
2353 }
2354
2355 fn on_lsp_diagnostics_published(
2356 &mut self,
2357 server_id: usize,
2358 mut params: lsp::PublishDiagnosticsParams,
2359 adapter: &Arc<dyn LspAdapter>,
2360 cx: &mut ModelContext<Self>,
2361 ) {
2362 adapter.process_diagnostics(&mut params);
2363 self.update_diagnostics(
2364 server_id,
2365 params,
2366 adapter.disk_based_diagnostic_sources(),
2367 cx,
2368 )
2369 .log_err();
2370 }
2371
2372 fn on_lsp_progress(
2373 &mut self,
2374 progress: lsp::ProgressParams,
2375 server_id: usize,
2376 disk_based_diagnostics_progress_token: Option<&str>,
2377 cx: &mut ModelContext<Self>,
2378 ) {
2379 let token = match progress.token {
2380 lsp::NumberOrString::String(token) => token,
2381 lsp::NumberOrString::Number(token) => {
2382 log::info!("skipping numeric progress token {}", token);
2383 return;
2384 }
2385 };
2386 let progress = match progress.value {
2387 lsp::ProgressParamsValue::WorkDone(value) => value,
2388 };
2389 let language_server_status =
2390 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2391 status
2392 } else {
2393 return;
2394 };
2395
2396 if !language_server_status.progress_tokens.contains(&token) {
2397 return;
2398 }
2399
2400 match progress {
2401 lsp::WorkDoneProgress::Begin(report) => {
2402 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2403 language_server_status.has_pending_diagnostic_updates = true;
2404 self.disk_based_diagnostics_started(server_id, cx);
2405 self.broadcast_language_server_update(
2406 server_id,
2407 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2408 proto::LspDiskBasedDiagnosticsUpdating {},
2409 ),
2410 );
2411 } else {
2412 self.on_lsp_work_start(
2413 server_id,
2414 token.clone(),
2415 LanguageServerProgress {
2416 message: report.message.clone(),
2417 percentage: report.percentage.map(|p| p as usize),
2418 last_update_at: Instant::now(),
2419 },
2420 cx,
2421 );
2422 self.broadcast_language_server_update(
2423 server_id,
2424 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2425 token,
2426 message: report.message,
2427 percentage: report.percentage.map(|p| p as u32),
2428 }),
2429 );
2430 }
2431 }
2432 lsp::WorkDoneProgress::Report(report) => {
2433 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2434 self.on_lsp_work_progress(
2435 server_id,
2436 token.clone(),
2437 LanguageServerProgress {
2438 message: report.message.clone(),
2439 percentage: report.percentage.map(|p| p as usize),
2440 last_update_at: Instant::now(),
2441 },
2442 cx,
2443 );
2444 self.broadcast_language_server_update(
2445 server_id,
2446 proto::update_language_server::Variant::WorkProgress(
2447 proto::LspWorkProgress {
2448 token,
2449 message: report.message,
2450 percentage: report.percentage.map(|p| p as u32),
2451 },
2452 ),
2453 );
2454 }
2455 }
2456 lsp::WorkDoneProgress::End(_) => {
2457 language_server_status.progress_tokens.remove(&token);
2458
2459 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2460 language_server_status.has_pending_diagnostic_updates = false;
2461 self.disk_based_diagnostics_finished(server_id, cx);
2462 self.broadcast_language_server_update(
2463 server_id,
2464 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2465 proto::LspDiskBasedDiagnosticsUpdated {},
2466 ),
2467 );
2468 } else {
2469 self.on_lsp_work_end(server_id, token.clone(), cx);
2470 self.broadcast_language_server_update(
2471 server_id,
2472 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2473 token,
2474 }),
2475 );
2476 }
2477 }
2478 }
2479 }
2480
2481 fn on_lsp_work_start(
2482 &mut self,
2483 language_server_id: usize,
2484 token: String,
2485 progress: LanguageServerProgress,
2486 cx: &mut ModelContext<Self>,
2487 ) {
2488 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2489 status.pending_work.insert(token, progress);
2490 cx.notify();
2491 }
2492 }
2493
2494 fn on_lsp_work_progress(
2495 &mut self,
2496 language_server_id: usize,
2497 token: String,
2498 progress: LanguageServerProgress,
2499 cx: &mut ModelContext<Self>,
2500 ) {
2501 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2502 let entry = status
2503 .pending_work
2504 .entry(token)
2505 .or_insert(LanguageServerProgress {
2506 message: Default::default(),
2507 percentage: Default::default(),
2508 last_update_at: progress.last_update_at,
2509 });
2510 if progress.message.is_some() {
2511 entry.message = progress.message;
2512 }
2513 if progress.percentage.is_some() {
2514 entry.percentage = progress.percentage;
2515 }
2516 entry.last_update_at = progress.last_update_at;
2517 cx.notify();
2518 }
2519 }
2520
2521 fn on_lsp_work_end(
2522 &mut self,
2523 language_server_id: usize,
2524 token: String,
2525 cx: &mut ModelContext<Self>,
2526 ) {
2527 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2528 status.pending_work.remove(&token);
2529 cx.notify();
2530 }
2531 }
2532
2533 async fn on_lsp_workspace_edit(
2534 this: WeakModelHandle<Self>,
2535 params: lsp::ApplyWorkspaceEditParams,
2536 server_id: usize,
2537 adapter: Arc<dyn LspAdapter>,
2538 language_server: Arc<LanguageServer>,
2539 mut cx: AsyncAppContext,
2540 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2541 let this = this
2542 .upgrade(&cx)
2543 .ok_or_else(|| anyhow!("project project closed"))?;
2544 let transaction = Self::deserialize_workspace_edit(
2545 this.clone(),
2546 params.edit,
2547 true,
2548 adapter.clone(),
2549 language_server.clone(),
2550 &mut cx,
2551 )
2552 .await
2553 .log_err();
2554 this.update(&mut cx, |this, _| {
2555 if let Some(transaction) = transaction {
2556 this.last_workspace_edits_by_language_server
2557 .insert(server_id, transaction);
2558 }
2559 });
2560 Ok(lsp::ApplyWorkspaceEditResponse {
2561 applied: true,
2562 failed_change: None,
2563 failure_reason: None,
2564 })
2565 }
2566
2567 fn broadcast_language_server_update(
2568 &self,
2569 language_server_id: usize,
2570 event: proto::update_language_server::Variant,
2571 ) {
2572 if let Some(project_id) = self.shared_remote_id() {
2573 self.client
2574 .send(proto::UpdateLanguageServer {
2575 project_id,
2576 language_server_id: language_server_id as u64,
2577 variant: Some(event),
2578 })
2579 .log_err();
2580 }
2581 }
2582
2583 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2584 for (_, server) in self.language_servers.values() {
2585 server
2586 .notify::<lsp::notification::DidChangeConfiguration>(
2587 lsp::DidChangeConfigurationParams {
2588 settings: settings.clone(),
2589 },
2590 )
2591 .ok();
2592 }
2593 *self.language_server_settings.lock() = settings;
2594 }
2595
2596 pub fn language_server_statuses(
2597 &self,
2598 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2599 self.language_server_statuses.values()
2600 }
2601
2602 pub fn update_diagnostics(
2603 &mut self,
2604 language_server_id: usize,
2605 params: lsp::PublishDiagnosticsParams,
2606 disk_based_sources: &[&str],
2607 cx: &mut ModelContext<Self>,
2608 ) -> Result<()> {
2609 let abs_path = params
2610 .uri
2611 .to_file_path()
2612 .map_err(|_| anyhow!("URI is not a file"))?;
2613 let mut diagnostics = Vec::default();
2614 let mut primary_diagnostic_group_ids = HashMap::default();
2615 let mut sources_by_group_id = HashMap::default();
2616 let mut supporting_diagnostics = HashMap::default();
2617 for diagnostic in ¶ms.diagnostics {
2618 let source = diagnostic.source.as_ref();
2619 let code = diagnostic.code.as_ref().map(|code| match code {
2620 lsp::NumberOrString::Number(code) => code.to_string(),
2621 lsp::NumberOrString::String(code) => code.clone(),
2622 });
2623 let range = range_from_lsp(diagnostic.range);
2624 let is_supporting = diagnostic
2625 .related_information
2626 .as_ref()
2627 .map_or(false, |infos| {
2628 infos.iter().any(|info| {
2629 primary_diagnostic_group_ids.contains_key(&(
2630 source,
2631 code.clone(),
2632 range_from_lsp(info.location.range),
2633 ))
2634 })
2635 });
2636
2637 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2638 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2639 });
2640
2641 if is_supporting {
2642 supporting_diagnostics.insert(
2643 (source, code.clone(), range),
2644 (diagnostic.severity, is_unnecessary),
2645 );
2646 } else {
2647 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2648 let is_disk_based = source.map_or(false, |source| {
2649 disk_based_sources.contains(&source.as_str())
2650 });
2651
2652 sources_by_group_id.insert(group_id, source);
2653 primary_diagnostic_group_ids
2654 .insert((source, code.clone(), range.clone()), group_id);
2655
2656 diagnostics.push(DiagnosticEntry {
2657 range,
2658 diagnostic: Diagnostic {
2659 code: code.clone(),
2660 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2661 message: diagnostic.message.clone(),
2662 group_id,
2663 is_primary: true,
2664 is_valid: true,
2665 is_disk_based,
2666 is_unnecessary,
2667 },
2668 });
2669 if let Some(infos) = &diagnostic.related_information {
2670 for info in infos {
2671 if info.location.uri == params.uri && !info.message.is_empty() {
2672 let range = range_from_lsp(info.location.range);
2673 diagnostics.push(DiagnosticEntry {
2674 range,
2675 diagnostic: Diagnostic {
2676 code: code.clone(),
2677 severity: DiagnosticSeverity::INFORMATION,
2678 message: info.message.clone(),
2679 group_id,
2680 is_primary: false,
2681 is_valid: true,
2682 is_disk_based,
2683 is_unnecessary: false,
2684 },
2685 });
2686 }
2687 }
2688 }
2689 }
2690 }
2691
2692 for entry in &mut diagnostics {
2693 let diagnostic = &mut entry.diagnostic;
2694 if !diagnostic.is_primary {
2695 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2696 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2697 source,
2698 diagnostic.code.clone(),
2699 entry.range.clone(),
2700 )) {
2701 if let Some(severity) = severity {
2702 diagnostic.severity = severity;
2703 }
2704 diagnostic.is_unnecessary = is_unnecessary;
2705 }
2706 }
2707 }
2708
2709 self.update_diagnostic_entries(
2710 language_server_id,
2711 abs_path,
2712 params.version,
2713 diagnostics,
2714 cx,
2715 )?;
2716 Ok(())
2717 }
2718
2719 pub fn update_diagnostic_entries(
2720 &mut self,
2721 language_server_id: usize,
2722 abs_path: PathBuf,
2723 version: Option<i32>,
2724 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2725 cx: &mut ModelContext<Project>,
2726 ) -> Result<(), anyhow::Error> {
2727 let (worktree, relative_path) = self
2728 .find_local_worktree(&abs_path, cx)
2729 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2730
2731 let project_path = ProjectPath {
2732 worktree_id: worktree.read(cx).id(),
2733 path: relative_path.into(),
2734 };
2735 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2736 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2737 }
2738
2739 let updated = worktree.update(cx, |worktree, cx| {
2740 worktree
2741 .as_local_mut()
2742 .ok_or_else(|| anyhow!("not a local worktree"))?
2743 .update_diagnostics(
2744 language_server_id,
2745 project_path.path.clone(),
2746 diagnostics,
2747 cx,
2748 )
2749 })?;
2750 if updated {
2751 cx.emit(Event::DiagnosticsUpdated {
2752 language_server_id,
2753 path: project_path,
2754 });
2755 }
2756 Ok(())
2757 }
2758
2759 fn update_buffer_diagnostics(
2760 &mut self,
2761 buffer: &ModelHandle<Buffer>,
2762 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2763 version: Option<i32>,
2764 cx: &mut ModelContext<Self>,
2765 ) -> Result<()> {
2766 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2767 Ordering::Equal
2768 .then_with(|| b.is_primary.cmp(&a.is_primary))
2769 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2770 .then_with(|| a.severity.cmp(&b.severity))
2771 .then_with(|| a.message.cmp(&b.message))
2772 }
2773
2774 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2775
2776 diagnostics.sort_unstable_by(|a, b| {
2777 Ordering::Equal
2778 .then_with(|| a.range.start.cmp(&b.range.start))
2779 .then_with(|| b.range.end.cmp(&a.range.end))
2780 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2781 });
2782
2783 let mut sanitized_diagnostics = Vec::new();
2784 let edits_since_save = Patch::new(
2785 snapshot
2786 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2787 .collect(),
2788 );
2789 for entry in diagnostics {
2790 let start;
2791 let end;
2792 if entry.diagnostic.is_disk_based {
2793 // Some diagnostics are based on files on disk instead of buffers'
2794 // current contents. Adjust these diagnostics' ranges to reflect
2795 // any unsaved edits.
2796 start = edits_since_save.old_to_new(entry.range.start);
2797 end = edits_since_save.old_to_new(entry.range.end);
2798 } else {
2799 start = entry.range.start;
2800 end = entry.range.end;
2801 }
2802
2803 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2804 ..snapshot.clip_point_utf16(end, Bias::Right);
2805
2806 // Expand empty ranges by one character
2807 if range.start == range.end {
2808 range.end.column += 1;
2809 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2810 if range.start == range.end && range.end.column > 0 {
2811 range.start.column -= 1;
2812 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2813 }
2814 }
2815
2816 sanitized_diagnostics.push(DiagnosticEntry {
2817 range,
2818 diagnostic: entry.diagnostic,
2819 });
2820 }
2821 drop(edits_since_save);
2822
2823 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2824 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2825 Ok(())
2826 }
2827
2828 pub fn reload_buffers(
2829 &self,
2830 buffers: HashSet<ModelHandle<Buffer>>,
2831 push_to_history: bool,
2832 cx: &mut ModelContext<Self>,
2833 ) -> Task<Result<ProjectTransaction>> {
2834 let mut local_buffers = Vec::new();
2835 let mut remote_buffers = None;
2836 for buffer_handle in buffers {
2837 let buffer = buffer_handle.read(cx);
2838 if buffer.is_dirty() {
2839 if let Some(file) = File::from_dyn(buffer.file()) {
2840 if file.is_local() {
2841 local_buffers.push(buffer_handle);
2842 } else {
2843 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2844 }
2845 }
2846 }
2847 }
2848
2849 let remote_buffers = self.remote_id().zip(remote_buffers);
2850 let client = self.client.clone();
2851
2852 cx.spawn(|this, mut cx| async move {
2853 let mut project_transaction = ProjectTransaction::default();
2854
2855 if let Some((project_id, remote_buffers)) = remote_buffers {
2856 let response = client
2857 .request(proto::ReloadBuffers {
2858 project_id,
2859 buffer_ids: remote_buffers
2860 .iter()
2861 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2862 .collect(),
2863 })
2864 .await?
2865 .transaction
2866 .ok_or_else(|| anyhow!("missing transaction"))?;
2867 project_transaction = this
2868 .update(&mut cx, |this, cx| {
2869 this.deserialize_project_transaction(response, push_to_history, cx)
2870 })
2871 .await?;
2872 }
2873
2874 for buffer in local_buffers {
2875 let transaction = buffer
2876 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2877 .await?;
2878 buffer.update(&mut cx, |buffer, cx| {
2879 if let Some(transaction) = transaction {
2880 if !push_to_history {
2881 buffer.forget_transaction(transaction.id);
2882 }
2883 project_transaction.0.insert(cx.handle(), transaction);
2884 }
2885 });
2886 }
2887
2888 Ok(project_transaction)
2889 })
2890 }
2891
2892 pub fn format(
2893 &self,
2894 buffers: HashSet<ModelHandle<Buffer>>,
2895 push_to_history: bool,
2896 cx: &mut ModelContext<Project>,
2897 ) -> Task<Result<ProjectTransaction>> {
2898 let mut local_buffers = Vec::new();
2899 let mut remote_buffers = None;
2900 for buffer_handle in buffers {
2901 let buffer = buffer_handle.read(cx);
2902 if let Some(file) = File::from_dyn(buffer.file()) {
2903 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2904 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2905 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2906 }
2907 } else {
2908 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2909 }
2910 } else {
2911 return Task::ready(Ok(Default::default()));
2912 }
2913 }
2914
2915 let remote_buffers = self.remote_id().zip(remote_buffers);
2916 let client = self.client.clone();
2917
2918 cx.spawn(|this, mut cx| async move {
2919 let mut project_transaction = ProjectTransaction::default();
2920
2921 if let Some((project_id, remote_buffers)) = remote_buffers {
2922 let response = client
2923 .request(proto::FormatBuffers {
2924 project_id,
2925 buffer_ids: remote_buffers
2926 .iter()
2927 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2928 .collect(),
2929 })
2930 .await?
2931 .transaction
2932 .ok_or_else(|| anyhow!("missing transaction"))?;
2933 project_transaction = this
2934 .update(&mut cx, |this, cx| {
2935 this.deserialize_project_transaction(response, push_to_history, cx)
2936 })
2937 .await?;
2938 }
2939
2940 for (buffer, buffer_abs_path, language_server) in local_buffers {
2941 let text_document = lsp::TextDocumentIdentifier::new(
2942 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2943 );
2944 let capabilities = &language_server.capabilities();
2945 let tab_size = cx.update(|cx| {
2946 let language_name = buffer.read(cx).language().map(|language| language.name());
2947 cx.global::<Settings>().tab_size(language_name.as_deref())
2948 });
2949 let lsp_edits = if capabilities
2950 .document_formatting_provider
2951 .as_ref()
2952 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2953 {
2954 language_server
2955 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2956 text_document,
2957 options: lsp::FormattingOptions {
2958 tab_size: tab_size.into(),
2959 insert_spaces: true,
2960 insert_final_newline: Some(true),
2961 ..Default::default()
2962 },
2963 work_done_progress_params: Default::default(),
2964 })
2965 .await?
2966 } else if capabilities
2967 .document_range_formatting_provider
2968 .as_ref()
2969 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2970 {
2971 let buffer_start = lsp::Position::new(0, 0);
2972 let buffer_end =
2973 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2974 language_server
2975 .request::<lsp::request::RangeFormatting>(
2976 lsp::DocumentRangeFormattingParams {
2977 text_document,
2978 range: lsp::Range::new(buffer_start, buffer_end),
2979 options: lsp::FormattingOptions {
2980 tab_size: tab_size.into(),
2981 insert_spaces: true,
2982 insert_final_newline: Some(true),
2983 ..Default::default()
2984 },
2985 work_done_progress_params: Default::default(),
2986 },
2987 )
2988 .await?
2989 } else {
2990 continue;
2991 };
2992
2993 if let Some(lsp_edits) = lsp_edits {
2994 let edits = this
2995 .update(&mut cx, |this, cx| {
2996 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2997 })
2998 .await?;
2999 buffer.update(&mut cx, |buffer, cx| {
3000 buffer.finalize_last_transaction();
3001 buffer.start_transaction();
3002 for (range, text) in edits {
3003 buffer.edit([(range, text)], cx);
3004 }
3005 if buffer.end_transaction(cx).is_some() {
3006 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3007 if !push_to_history {
3008 buffer.forget_transaction(transaction.id);
3009 }
3010 project_transaction.0.insert(cx.handle(), transaction);
3011 }
3012 });
3013 }
3014 }
3015
3016 Ok(project_transaction)
3017 })
3018 }
3019
3020 pub fn definition<T: ToPointUtf16>(
3021 &self,
3022 buffer: &ModelHandle<Buffer>,
3023 position: T,
3024 cx: &mut ModelContext<Self>,
3025 ) -> Task<Result<Vec<LocationLink>>> {
3026 let position = position.to_point_utf16(buffer.read(cx));
3027 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
3028 }
3029
3030 pub fn references<T: ToPointUtf16>(
3031 &self,
3032 buffer: &ModelHandle<Buffer>,
3033 position: T,
3034 cx: &mut ModelContext<Self>,
3035 ) -> Task<Result<Vec<Location>>> {
3036 let position = position.to_point_utf16(buffer.read(cx));
3037 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
3038 }
3039
3040 pub fn document_highlights<T: ToPointUtf16>(
3041 &self,
3042 buffer: &ModelHandle<Buffer>,
3043 position: T,
3044 cx: &mut ModelContext<Self>,
3045 ) -> Task<Result<Vec<DocumentHighlight>>> {
3046 let position = position.to_point_utf16(buffer.read(cx));
3047
3048 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
3049 }
3050
3051 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
3052 if self.is_local() {
3053 let mut requests = Vec::new();
3054 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
3055 let worktree_id = *worktree_id;
3056 if let Some(worktree) = self
3057 .worktree_for_id(worktree_id, cx)
3058 .and_then(|worktree| worktree.read(cx).as_local())
3059 {
3060 let lsp_adapter = lsp_adapter.clone();
3061 let worktree_abs_path = worktree.abs_path().clone();
3062 requests.push(
3063 language_server
3064 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
3065 query: query.to_string(),
3066 ..Default::default()
3067 })
3068 .log_err()
3069 .map(move |response| {
3070 (
3071 lsp_adapter,
3072 worktree_id,
3073 worktree_abs_path,
3074 response.unwrap_or_default(),
3075 )
3076 }),
3077 );
3078 }
3079 }
3080
3081 cx.spawn_weak(|this, cx| async move {
3082 let responses = futures::future::join_all(requests).await;
3083 let this = if let Some(this) = this.upgrade(&cx) {
3084 this
3085 } else {
3086 return Ok(Default::default());
3087 };
3088 this.read_with(&cx, |this, cx| {
3089 let mut symbols = Vec::new();
3090 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
3091 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
3092 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
3093 let mut worktree_id = source_worktree_id;
3094 let path;
3095 if let Some((worktree, rel_path)) =
3096 this.find_local_worktree(&abs_path, cx)
3097 {
3098 worktree_id = worktree.read(cx).id();
3099 path = rel_path;
3100 } else {
3101 path = relativize_path(&worktree_abs_path, &abs_path);
3102 }
3103
3104 let label = this
3105 .languages
3106 .select_language(&path)
3107 .and_then(|language| {
3108 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
3109 })
3110 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
3111 let signature = this.symbol_signature(worktree_id, &path);
3112
3113 Some(Symbol {
3114 source_worktree_id,
3115 worktree_id,
3116 language_server_name: adapter.name(),
3117 name: lsp_symbol.name,
3118 kind: lsp_symbol.kind,
3119 label,
3120 path,
3121 range: range_from_lsp(lsp_symbol.location.range),
3122 signature,
3123 })
3124 }));
3125 }
3126 Ok(symbols)
3127 })
3128 })
3129 } else if let Some(project_id) = self.remote_id() {
3130 let request = self.client.request(proto::GetProjectSymbols {
3131 project_id,
3132 query: query.to_string(),
3133 });
3134 cx.spawn_weak(|this, cx| async move {
3135 let response = request.await?;
3136 let mut symbols = Vec::new();
3137 if let Some(this) = this.upgrade(&cx) {
3138 this.read_with(&cx, |this, _| {
3139 symbols.extend(
3140 response
3141 .symbols
3142 .into_iter()
3143 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3144 );
3145 })
3146 }
3147 Ok(symbols)
3148 })
3149 } else {
3150 Task::ready(Ok(Default::default()))
3151 }
3152 }
3153
3154 pub fn open_buffer_for_symbol(
3155 &mut self,
3156 symbol: &Symbol,
3157 cx: &mut ModelContext<Self>,
3158 ) -> Task<Result<ModelHandle<Buffer>>> {
3159 if self.is_local() {
3160 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
3161 symbol.source_worktree_id,
3162 symbol.language_server_name.clone(),
3163 )) {
3164 server.clone()
3165 } else {
3166 return Task::ready(Err(anyhow!(
3167 "language server for worktree and language not found"
3168 )));
3169 };
3170
3171 let worktree_abs_path = if let Some(worktree_abs_path) = self
3172 .worktree_for_id(symbol.worktree_id, cx)
3173 .and_then(|worktree| worktree.read(cx).as_local())
3174 .map(|local_worktree| local_worktree.abs_path())
3175 {
3176 worktree_abs_path
3177 } else {
3178 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3179 };
3180 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3181 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3182 uri
3183 } else {
3184 return Task::ready(Err(anyhow!("invalid symbol path")));
3185 };
3186
3187 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
3188 } else if let Some(project_id) = self.remote_id() {
3189 let request = self.client.request(proto::OpenBufferForSymbol {
3190 project_id,
3191 symbol: Some(serialize_symbol(symbol)),
3192 });
3193 cx.spawn(|this, mut cx| async move {
3194 let response = request.await?;
3195 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3196 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3197 .await
3198 })
3199 } else {
3200 Task::ready(Err(anyhow!("project does not have a remote id")))
3201 }
3202 }
3203
3204 pub fn hover<T: ToPointUtf16>(
3205 &self,
3206 buffer: &ModelHandle<Buffer>,
3207 position: T,
3208 cx: &mut ModelContext<Self>,
3209 ) -> Task<Result<Option<Hover>>> {
3210 let position = position.to_point_utf16(buffer.read(cx));
3211 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3212 }
3213
3214 pub fn completions<T: ToPointUtf16>(
3215 &self,
3216 source_buffer_handle: &ModelHandle<Buffer>,
3217 position: T,
3218 cx: &mut ModelContext<Self>,
3219 ) -> Task<Result<Vec<Completion>>> {
3220 let source_buffer_handle = source_buffer_handle.clone();
3221 let source_buffer = source_buffer_handle.read(cx);
3222 let buffer_id = source_buffer.remote_id();
3223 let language = source_buffer.language().cloned();
3224 let worktree;
3225 let buffer_abs_path;
3226 if let Some(file) = File::from_dyn(source_buffer.file()) {
3227 worktree = file.worktree.clone();
3228 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3229 } else {
3230 return Task::ready(Ok(Default::default()));
3231 };
3232
3233 let position = position.to_point_utf16(source_buffer);
3234 let anchor = source_buffer.anchor_after(position);
3235
3236 if worktree.read(cx).as_local().is_some() {
3237 let buffer_abs_path = buffer_abs_path.unwrap();
3238 let (_, lang_server) =
3239 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
3240 server.clone()
3241 } else {
3242 return Task::ready(Ok(Default::default()));
3243 };
3244
3245 cx.spawn(|_, cx| async move {
3246 let completions = lang_server
3247 .request::<lsp::request::Completion>(lsp::CompletionParams {
3248 text_document_position: lsp::TextDocumentPositionParams::new(
3249 lsp::TextDocumentIdentifier::new(
3250 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3251 ),
3252 point_to_lsp(position),
3253 ),
3254 context: Default::default(),
3255 work_done_progress_params: Default::default(),
3256 partial_result_params: Default::default(),
3257 })
3258 .await
3259 .context("lsp completion request failed")?;
3260
3261 let completions = if let Some(completions) = completions {
3262 match completions {
3263 lsp::CompletionResponse::Array(completions) => completions,
3264 lsp::CompletionResponse::List(list) => list.items,
3265 }
3266 } else {
3267 Default::default()
3268 };
3269
3270 source_buffer_handle.read_with(&cx, |this, _| {
3271 let snapshot = this.snapshot();
3272 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3273 let mut range_for_token = None;
3274 Ok(completions
3275 .into_iter()
3276 .filter_map(|lsp_completion| {
3277 // For now, we can only handle additional edits if they are returned
3278 // when resolving the completion, not if they are present initially.
3279 if lsp_completion
3280 .additional_text_edits
3281 .as_ref()
3282 .map_or(false, |edits| !edits.is_empty())
3283 {
3284 return None;
3285 }
3286
3287 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3288 // If the language server provides a range to overwrite, then
3289 // check that the range is valid.
3290 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3291 let range = range_from_lsp(edit.range);
3292 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3293 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3294 if start != range.start || end != range.end {
3295 log::info!("completion out of expected range");
3296 return None;
3297 }
3298 (
3299 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3300 edit.new_text.clone(),
3301 )
3302 }
3303 // If the language server does not provide a range, then infer
3304 // the range based on the syntax tree.
3305 None => {
3306 if position != clipped_position {
3307 log::info!("completion out of expected range");
3308 return None;
3309 }
3310 let Range { start, end } = range_for_token
3311 .get_or_insert_with(|| {
3312 let offset = position.to_offset(&snapshot);
3313 let (range, kind) = snapshot.surrounding_word(offset);
3314 if kind == Some(CharKind::Word) {
3315 range
3316 } else {
3317 offset..offset
3318 }
3319 })
3320 .clone();
3321 let text = lsp_completion
3322 .insert_text
3323 .as_ref()
3324 .unwrap_or(&lsp_completion.label)
3325 .clone();
3326 (
3327 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3328 text.clone(),
3329 )
3330 }
3331 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3332 log::info!("unsupported insert/replace completion");
3333 return None;
3334 }
3335 };
3336
3337 Some(Completion {
3338 old_range,
3339 new_text,
3340 label: language
3341 .as_ref()
3342 .and_then(|l| l.label_for_completion(&lsp_completion))
3343 .unwrap_or_else(|| {
3344 CodeLabel::plain(
3345 lsp_completion.label.clone(),
3346 lsp_completion.filter_text.as_deref(),
3347 )
3348 }),
3349 lsp_completion,
3350 })
3351 })
3352 .collect())
3353 })
3354 })
3355 } else if let Some(project_id) = self.remote_id() {
3356 let rpc = self.client.clone();
3357 let message = proto::GetCompletions {
3358 project_id,
3359 buffer_id,
3360 position: Some(language::proto::serialize_anchor(&anchor)),
3361 version: serialize_version(&source_buffer.version()),
3362 };
3363 cx.spawn_weak(|_, mut cx| async move {
3364 let response = rpc.request(message).await?;
3365
3366 source_buffer_handle
3367 .update(&mut cx, |buffer, _| {
3368 buffer.wait_for_version(deserialize_version(response.version))
3369 })
3370 .await;
3371
3372 response
3373 .completions
3374 .into_iter()
3375 .map(|completion| {
3376 language::proto::deserialize_completion(completion, language.as_ref())
3377 })
3378 .collect()
3379 })
3380 } else {
3381 Task::ready(Ok(Default::default()))
3382 }
3383 }
3384
3385 pub fn apply_additional_edits_for_completion(
3386 &self,
3387 buffer_handle: ModelHandle<Buffer>,
3388 completion: Completion,
3389 push_to_history: bool,
3390 cx: &mut ModelContext<Self>,
3391 ) -> Task<Result<Option<Transaction>>> {
3392 let buffer = buffer_handle.read(cx);
3393 let buffer_id = buffer.remote_id();
3394
3395 if self.is_local() {
3396 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3397 {
3398 server.clone()
3399 } else {
3400 return Task::ready(Ok(Default::default()));
3401 };
3402
3403 cx.spawn(|this, mut cx| async move {
3404 let resolved_completion = lang_server
3405 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3406 .await?;
3407 if let Some(edits) = resolved_completion.additional_text_edits {
3408 let edits = this
3409 .update(&mut cx, |this, cx| {
3410 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3411 })
3412 .await?;
3413 buffer_handle.update(&mut cx, |buffer, cx| {
3414 buffer.finalize_last_transaction();
3415 buffer.start_transaction();
3416 for (range, text) in edits {
3417 buffer.edit([(range, text)], cx);
3418 }
3419 let transaction = if buffer.end_transaction(cx).is_some() {
3420 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3421 if !push_to_history {
3422 buffer.forget_transaction(transaction.id);
3423 }
3424 Some(transaction)
3425 } else {
3426 None
3427 };
3428 Ok(transaction)
3429 })
3430 } else {
3431 Ok(None)
3432 }
3433 })
3434 } else if let Some(project_id) = self.remote_id() {
3435 let client = self.client.clone();
3436 cx.spawn(|_, mut cx| async move {
3437 let response = client
3438 .request(proto::ApplyCompletionAdditionalEdits {
3439 project_id,
3440 buffer_id,
3441 completion: Some(language::proto::serialize_completion(&completion)),
3442 })
3443 .await?;
3444
3445 if let Some(transaction) = response.transaction {
3446 let transaction = language::proto::deserialize_transaction(transaction)?;
3447 buffer_handle
3448 .update(&mut cx, |buffer, _| {
3449 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3450 })
3451 .await;
3452 if push_to_history {
3453 buffer_handle.update(&mut cx, |buffer, _| {
3454 buffer.push_transaction(transaction.clone(), Instant::now());
3455 });
3456 }
3457 Ok(Some(transaction))
3458 } else {
3459 Ok(None)
3460 }
3461 })
3462 } else {
3463 Task::ready(Err(anyhow!("project does not have a remote id")))
3464 }
3465 }
3466
3467 pub fn code_actions<T: Clone + ToOffset>(
3468 &self,
3469 buffer_handle: &ModelHandle<Buffer>,
3470 range: Range<T>,
3471 cx: &mut ModelContext<Self>,
3472 ) -> Task<Result<Vec<CodeAction>>> {
3473 let buffer_handle = buffer_handle.clone();
3474 let buffer = buffer_handle.read(cx);
3475 let snapshot = buffer.snapshot();
3476 let relevant_diagnostics = snapshot
3477 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3478 .map(|entry| entry.to_lsp_diagnostic_stub())
3479 .collect();
3480 let buffer_id = buffer.remote_id();
3481 let worktree;
3482 let buffer_abs_path;
3483 if let Some(file) = File::from_dyn(buffer.file()) {
3484 worktree = file.worktree.clone();
3485 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3486 } else {
3487 return Task::ready(Ok(Default::default()));
3488 };
3489 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3490
3491 if worktree.read(cx).as_local().is_some() {
3492 let buffer_abs_path = buffer_abs_path.unwrap();
3493 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3494 {
3495 server.clone()
3496 } else {
3497 return Task::ready(Ok(Default::default()));
3498 };
3499
3500 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3501 cx.foreground().spawn(async move {
3502 if !lang_server.capabilities().code_action_provider.is_some() {
3503 return Ok(Default::default());
3504 }
3505
3506 Ok(lang_server
3507 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3508 text_document: lsp::TextDocumentIdentifier::new(
3509 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3510 ),
3511 range: lsp_range,
3512 work_done_progress_params: Default::default(),
3513 partial_result_params: Default::default(),
3514 context: lsp::CodeActionContext {
3515 diagnostics: relevant_diagnostics,
3516 only: Some(vec![
3517 lsp::CodeActionKind::QUICKFIX,
3518 lsp::CodeActionKind::REFACTOR,
3519 lsp::CodeActionKind::REFACTOR_EXTRACT,
3520 lsp::CodeActionKind::SOURCE,
3521 ]),
3522 },
3523 })
3524 .await?
3525 .unwrap_or_default()
3526 .into_iter()
3527 .filter_map(|entry| {
3528 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3529 Some(CodeAction {
3530 range: range.clone(),
3531 lsp_action,
3532 })
3533 } else {
3534 None
3535 }
3536 })
3537 .collect())
3538 })
3539 } else if let Some(project_id) = self.remote_id() {
3540 let rpc = self.client.clone();
3541 let version = buffer.version();
3542 cx.spawn_weak(|_, mut cx| async move {
3543 let response = rpc
3544 .request(proto::GetCodeActions {
3545 project_id,
3546 buffer_id,
3547 start: Some(language::proto::serialize_anchor(&range.start)),
3548 end: Some(language::proto::serialize_anchor(&range.end)),
3549 version: serialize_version(&version),
3550 })
3551 .await?;
3552
3553 buffer_handle
3554 .update(&mut cx, |buffer, _| {
3555 buffer.wait_for_version(deserialize_version(response.version))
3556 })
3557 .await;
3558
3559 response
3560 .actions
3561 .into_iter()
3562 .map(language::proto::deserialize_code_action)
3563 .collect()
3564 })
3565 } else {
3566 Task::ready(Ok(Default::default()))
3567 }
3568 }
3569
3570 pub fn apply_code_action(
3571 &self,
3572 buffer_handle: ModelHandle<Buffer>,
3573 mut action: CodeAction,
3574 push_to_history: bool,
3575 cx: &mut ModelContext<Self>,
3576 ) -> Task<Result<ProjectTransaction>> {
3577 if self.is_local() {
3578 let buffer = buffer_handle.read(cx);
3579 let (lsp_adapter, lang_server) =
3580 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3581 server.clone()
3582 } else {
3583 return Task::ready(Ok(Default::default()));
3584 };
3585 let range = action.range.to_point_utf16(buffer);
3586
3587 cx.spawn(|this, mut cx| async move {
3588 if let Some(lsp_range) = action
3589 .lsp_action
3590 .data
3591 .as_mut()
3592 .and_then(|d| d.get_mut("codeActionParams"))
3593 .and_then(|d| d.get_mut("range"))
3594 {
3595 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3596 action.lsp_action = lang_server
3597 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3598 .await?;
3599 } else {
3600 let actions = this
3601 .update(&mut cx, |this, cx| {
3602 this.code_actions(&buffer_handle, action.range, cx)
3603 })
3604 .await?;
3605 action.lsp_action = actions
3606 .into_iter()
3607 .find(|a| a.lsp_action.title == action.lsp_action.title)
3608 .ok_or_else(|| anyhow!("code action is outdated"))?
3609 .lsp_action;
3610 }
3611
3612 if let Some(edit) = action.lsp_action.edit {
3613 Self::deserialize_workspace_edit(
3614 this,
3615 edit,
3616 push_to_history,
3617 lsp_adapter,
3618 lang_server,
3619 &mut cx,
3620 )
3621 .await
3622 } else if let Some(command) = action.lsp_action.command {
3623 this.update(&mut cx, |this, _| {
3624 this.last_workspace_edits_by_language_server
3625 .remove(&lang_server.server_id());
3626 });
3627 lang_server
3628 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3629 command: command.command,
3630 arguments: command.arguments.unwrap_or_default(),
3631 ..Default::default()
3632 })
3633 .await?;
3634 Ok(this.update(&mut cx, |this, _| {
3635 this.last_workspace_edits_by_language_server
3636 .remove(&lang_server.server_id())
3637 .unwrap_or_default()
3638 }))
3639 } else {
3640 Ok(ProjectTransaction::default())
3641 }
3642 })
3643 } else if let Some(project_id) = self.remote_id() {
3644 let client = self.client.clone();
3645 let request = proto::ApplyCodeAction {
3646 project_id,
3647 buffer_id: buffer_handle.read(cx).remote_id(),
3648 action: Some(language::proto::serialize_code_action(&action)),
3649 };
3650 cx.spawn(|this, mut cx| async move {
3651 let response = client
3652 .request(request)
3653 .await?
3654 .transaction
3655 .ok_or_else(|| anyhow!("missing transaction"))?;
3656 this.update(&mut cx, |this, cx| {
3657 this.deserialize_project_transaction(response, push_to_history, cx)
3658 })
3659 .await
3660 })
3661 } else {
3662 Task::ready(Err(anyhow!("project does not have a remote id")))
3663 }
3664 }
3665
3666 async fn deserialize_workspace_edit(
3667 this: ModelHandle<Self>,
3668 edit: lsp::WorkspaceEdit,
3669 push_to_history: bool,
3670 lsp_adapter: Arc<dyn LspAdapter>,
3671 language_server: Arc<LanguageServer>,
3672 cx: &mut AsyncAppContext,
3673 ) -> Result<ProjectTransaction> {
3674 let fs = this.read_with(cx, |this, _| this.fs.clone());
3675 let mut operations = Vec::new();
3676 if let Some(document_changes) = edit.document_changes {
3677 match document_changes {
3678 lsp::DocumentChanges::Edits(edits) => {
3679 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3680 }
3681 lsp::DocumentChanges::Operations(ops) => operations = ops,
3682 }
3683 } else if let Some(changes) = edit.changes {
3684 operations.extend(changes.into_iter().map(|(uri, edits)| {
3685 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3686 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3687 uri,
3688 version: None,
3689 },
3690 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3691 })
3692 }));
3693 }
3694
3695 let mut project_transaction = ProjectTransaction::default();
3696 for operation in operations {
3697 match operation {
3698 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3699 let abs_path = op
3700 .uri
3701 .to_file_path()
3702 .map_err(|_| anyhow!("can't convert URI to path"))?;
3703
3704 if let Some(parent_path) = abs_path.parent() {
3705 fs.create_dir(parent_path).await?;
3706 }
3707 if abs_path.ends_with("/") {
3708 fs.create_dir(&abs_path).await?;
3709 } else {
3710 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3711 .await?;
3712 }
3713 }
3714 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3715 let source_abs_path = op
3716 .old_uri
3717 .to_file_path()
3718 .map_err(|_| anyhow!("can't convert URI to path"))?;
3719 let target_abs_path = op
3720 .new_uri
3721 .to_file_path()
3722 .map_err(|_| anyhow!("can't convert URI to path"))?;
3723 fs.rename(
3724 &source_abs_path,
3725 &target_abs_path,
3726 op.options.map(Into::into).unwrap_or_default(),
3727 )
3728 .await?;
3729 }
3730 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3731 let abs_path = op
3732 .uri
3733 .to_file_path()
3734 .map_err(|_| anyhow!("can't convert URI to path"))?;
3735 let options = op.options.map(Into::into).unwrap_or_default();
3736 if abs_path.ends_with("/") {
3737 fs.remove_dir(&abs_path, options).await?;
3738 } else {
3739 fs.remove_file(&abs_path, options).await?;
3740 }
3741 }
3742 lsp::DocumentChangeOperation::Edit(op) => {
3743 let buffer_to_edit = this
3744 .update(cx, |this, cx| {
3745 this.open_local_buffer_via_lsp(
3746 op.text_document.uri,
3747 lsp_adapter.clone(),
3748 language_server.clone(),
3749 cx,
3750 )
3751 })
3752 .await?;
3753
3754 let edits = this
3755 .update(cx, |this, cx| {
3756 let edits = op.edits.into_iter().map(|edit| match edit {
3757 lsp::OneOf::Left(edit) => edit,
3758 lsp::OneOf::Right(edit) => edit.text_edit,
3759 });
3760 this.edits_from_lsp(
3761 &buffer_to_edit,
3762 edits,
3763 op.text_document.version,
3764 cx,
3765 )
3766 })
3767 .await?;
3768
3769 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3770 buffer.finalize_last_transaction();
3771 buffer.start_transaction();
3772 for (range, text) in edits {
3773 buffer.edit([(range, text)], cx);
3774 }
3775 let transaction = if buffer.end_transaction(cx).is_some() {
3776 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3777 if !push_to_history {
3778 buffer.forget_transaction(transaction.id);
3779 }
3780 Some(transaction)
3781 } else {
3782 None
3783 };
3784
3785 transaction
3786 });
3787 if let Some(transaction) = transaction {
3788 project_transaction.0.insert(buffer_to_edit, transaction);
3789 }
3790 }
3791 }
3792 }
3793
3794 Ok(project_transaction)
3795 }
3796
3797 pub fn prepare_rename<T: ToPointUtf16>(
3798 &self,
3799 buffer: ModelHandle<Buffer>,
3800 position: T,
3801 cx: &mut ModelContext<Self>,
3802 ) -> Task<Result<Option<Range<Anchor>>>> {
3803 let position = position.to_point_utf16(buffer.read(cx));
3804 self.request_lsp(buffer, PrepareRename { position }, cx)
3805 }
3806
3807 pub fn perform_rename<T: ToPointUtf16>(
3808 &self,
3809 buffer: ModelHandle<Buffer>,
3810 position: T,
3811 new_name: String,
3812 push_to_history: bool,
3813 cx: &mut ModelContext<Self>,
3814 ) -> Task<Result<ProjectTransaction>> {
3815 let position = position.to_point_utf16(buffer.read(cx));
3816 self.request_lsp(
3817 buffer,
3818 PerformRename {
3819 position,
3820 new_name,
3821 push_to_history,
3822 },
3823 cx,
3824 )
3825 }
3826
3827 pub fn search(
3828 &self,
3829 query: SearchQuery,
3830 cx: &mut ModelContext<Self>,
3831 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3832 if self.is_local() {
3833 let snapshots = self
3834 .visible_worktrees(cx)
3835 .filter_map(|tree| {
3836 let tree = tree.read(cx).as_local()?;
3837 Some(tree.snapshot())
3838 })
3839 .collect::<Vec<_>>();
3840
3841 let background = cx.background().clone();
3842 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3843 if path_count == 0 {
3844 return Task::ready(Ok(Default::default()));
3845 }
3846 let workers = background.num_cpus().min(path_count);
3847 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3848 cx.background()
3849 .spawn({
3850 let fs = self.fs.clone();
3851 let background = cx.background().clone();
3852 let query = query.clone();
3853 async move {
3854 let fs = &fs;
3855 let query = &query;
3856 let matching_paths_tx = &matching_paths_tx;
3857 let paths_per_worker = (path_count + workers - 1) / workers;
3858 let snapshots = &snapshots;
3859 background
3860 .scoped(|scope| {
3861 for worker_ix in 0..workers {
3862 let worker_start_ix = worker_ix * paths_per_worker;
3863 let worker_end_ix = worker_start_ix + paths_per_worker;
3864 scope.spawn(async move {
3865 let mut snapshot_start_ix = 0;
3866 let mut abs_path = PathBuf::new();
3867 for snapshot in snapshots {
3868 let snapshot_end_ix =
3869 snapshot_start_ix + snapshot.visible_file_count();
3870 if worker_end_ix <= snapshot_start_ix {
3871 break;
3872 } else if worker_start_ix > snapshot_end_ix {
3873 snapshot_start_ix = snapshot_end_ix;
3874 continue;
3875 } else {
3876 let start_in_snapshot = worker_start_ix
3877 .saturating_sub(snapshot_start_ix);
3878 let end_in_snapshot =
3879 cmp::min(worker_end_ix, snapshot_end_ix)
3880 - snapshot_start_ix;
3881
3882 for entry in snapshot
3883 .files(false, start_in_snapshot)
3884 .take(end_in_snapshot - start_in_snapshot)
3885 {
3886 if matching_paths_tx.is_closed() {
3887 break;
3888 }
3889
3890 abs_path.clear();
3891 abs_path.push(&snapshot.abs_path());
3892 abs_path.push(&entry.path);
3893 let matches = if let Some(file) =
3894 fs.open_sync(&abs_path).await.log_err()
3895 {
3896 query.detect(file).unwrap_or(false)
3897 } else {
3898 false
3899 };
3900
3901 if matches {
3902 let project_path =
3903 (snapshot.id(), entry.path.clone());
3904 if matching_paths_tx
3905 .send(project_path)
3906 .await
3907 .is_err()
3908 {
3909 break;
3910 }
3911 }
3912 }
3913
3914 snapshot_start_ix = snapshot_end_ix;
3915 }
3916 }
3917 });
3918 }
3919 })
3920 .await;
3921 }
3922 })
3923 .detach();
3924
3925 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3926 let open_buffers = self
3927 .opened_buffers
3928 .values()
3929 .filter_map(|b| b.upgrade(cx))
3930 .collect::<HashSet<_>>();
3931 cx.spawn(|this, cx| async move {
3932 for buffer in &open_buffers {
3933 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3934 buffers_tx.send((buffer.clone(), snapshot)).await?;
3935 }
3936
3937 let open_buffers = Rc::new(RefCell::new(open_buffers));
3938 while let Some(project_path) = matching_paths_rx.next().await {
3939 if buffers_tx.is_closed() {
3940 break;
3941 }
3942
3943 let this = this.clone();
3944 let open_buffers = open_buffers.clone();
3945 let buffers_tx = buffers_tx.clone();
3946 cx.spawn(|mut cx| async move {
3947 if let Some(buffer) = this
3948 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3949 .await
3950 .log_err()
3951 {
3952 if open_buffers.borrow_mut().insert(buffer.clone()) {
3953 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3954 buffers_tx.send((buffer, snapshot)).await?;
3955 }
3956 }
3957
3958 Ok::<_, anyhow::Error>(())
3959 })
3960 .detach();
3961 }
3962
3963 Ok::<_, anyhow::Error>(())
3964 })
3965 .detach_and_log_err(cx);
3966
3967 let background = cx.background().clone();
3968 cx.background().spawn(async move {
3969 let query = &query;
3970 let mut matched_buffers = Vec::new();
3971 for _ in 0..workers {
3972 matched_buffers.push(HashMap::default());
3973 }
3974 background
3975 .scoped(|scope| {
3976 for worker_matched_buffers in matched_buffers.iter_mut() {
3977 let mut buffers_rx = buffers_rx.clone();
3978 scope.spawn(async move {
3979 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3980 let buffer_matches = query
3981 .search(snapshot.as_rope())
3982 .await
3983 .iter()
3984 .map(|range| {
3985 snapshot.anchor_before(range.start)
3986 ..snapshot.anchor_after(range.end)
3987 })
3988 .collect::<Vec<_>>();
3989 if !buffer_matches.is_empty() {
3990 worker_matched_buffers
3991 .insert(buffer.clone(), buffer_matches);
3992 }
3993 }
3994 });
3995 }
3996 })
3997 .await;
3998 Ok(matched_buffers.into_iter().flatten().collect())
3999 })
4000 } else if let Some(project_id) = self.remote_id() {
4001 let request = self.client.request(query.to_proto(project_id));
4002 cx.spawn(|this, mut cx| async move {
4003 let response = request.await?;
4004 let mut result = HashMap::default();
4005 for location in response.locations {
4006 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
4007 let target_buffer = this
4008 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4009 .await?;
4010 let start = location
4011 .start
4012 .and_then(deserialize_anchor)
4013 .ok_or_else(|| anyhow!("missing target start"))?;
4014 let end = location
4015 .end
4016 .and_then(deserialize_anchor)
4017 .ok_or_else(|| anyhow!("missing target end"))?;
4018 result
4019 .entry(target_buffer)
4020 .or_insert(Vec::new())
4021 .push(start..end)
4022 }
4023 Ok(result)
4024 })
4025 } else {
4026 Task::ready(Ok(Default::default()))
4027 }
4028 }
4029
4030 fn request_lsp<R: LspCommand>(
4031 &self,
4032 buffer_handle: ModelHandle<Buffer>,
4033 request: R,
4034 cx: &mut ModelContext<Self>,
4035 ) -> Task<Result<R::Response>>
4036 where
4037 <R::LspRequest as lsp::request::Request>::Result: Send,
4038 {
4039 let buffer = buffer_handle.read(cx);
4040 if self.is_local() {
4041 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
4042 if let Some((file, (_, language_server))) =
4043 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
4044 {
4045 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
4046 return cx.spawn(|this, cx| async move {
4047 if !request.check_capabilities(&language_server.capabilities()) {
4048 return Ok(Default::default());
4049 }
4050
4051 let response = language_server
4052 .request::<R::LspRequest>(lsp_params)
4053 .await
4054 .context("lsp request failed")?;
4055 request
4056 .response_from_lsp(response, this, buffer_handle, cx)
4057 .await
4058 });
4059 }
4060 } else if let Some(project_id) = self.remote_id() {
4061 let rpc = self.client.clone();
4062 let message = request.to_proto(project_id, buffer);
4063 return cx.spawn(|this, cx| async move {
4064 let response = rpc.request(message).await?;
4065 request
4066 .response_from_proto(response, this, buffer_handle, cx)
4067 .await
4068 });
4069 }
4070 Task::ready(Ok(Default::default()))
4071 }
4072
4073 pub fn find_or_create_local_worktree(
4074 &mut self,
4075 abs_path: impl AsRef<Path>,
4076 visible: bool,
4077 cx: &mut ModelContext<Self>,
4078 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
4079 let abs_path = abs_path.as_ref();
4080 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
4081 Task::ready(Ok((tree.clone(), relative_path.into())))
4082 } else {
4083 let worktree = self.create_local_worktree(abs_path, visible, cx);
4084 cx.foreground()
4085 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
4086 }
4087 }
4088
4089 pub fn find_local_worktree(
4090 &self,
4091 abs_path: &Path,
4092 cx: &AppContext,
4093 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
4094 for tree in &self.worktrees {
4095 if let Some(tree) = tree.upgrade(cx) {
4096 if let Some(relative_path) = tree
4097 .read(cx)
4098 .as_local()
4099 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
4100 {
4101 return Some((tree.clone(), relative_path.into()));
4102 }
4103 }
4104 }
4105 None
4106 }
4107
4108 pub fn is_shared(&self) -> bool {
4109 match &self.client_state {
4110 ProjectClientState::Local { is_shared, .. } => *is_shared,
4111 ProjectClientState::Remote { .. } => false,
4112 }
4113 }
4114
4115 fn create_local_worktree(
4116 &mut self,
4117 abs_path: impl AsRef<Path>,
4118 visible: bool,
4119 cx: &mut ModelContext<Self>,
4120 ) -> Task<Result<ModelHandle<Worktree>>> {
4121 let fs = self.fs.clone();
4122 let client = self.client.clone();
4123 let next_entry_id = self.next_entry_id.clone();
4124 let path: Arc<Path> = abs_path.as_ref().into();
4125 let task = self
4126 .loading_local_worktrees
4127 .entry(path.clone())
4128 .or_insert_with(|| {
4129 cx.spawn(|project, mut cx| {
4130 async move {
4131 let worktree = Worktree::local(
4132 client.clone(),
4133 path.clone(),
4134 visible,
4135 fs,
4136 next_entry_id,
4137 &mut cx,
4138 )
4139 .await;
4140 project.update(&mut cx, |project, _| {
4141 project.loading_local_worktrees.remove(&path);
4142 });
4143 let worktree = worktree?;
4144
4145 let project_id = project.update(&mut cx, |project, cx| {
4146 project.add_worktree(&worktree, cx);
4147 project.shared_remote_id()
4148 });
4149
4150 if let Some(project_id) = project_id {
4151 worktree
4152 .update(&mut cx, |worktree, cx| {
4153 worktree.as_local_mut().unwrap().share(project_id, cx)
4154 })
4155 .await
4156 .log_err();
4157 }
4158
4159 Ok(worktree)
4160 }
4161 .map_err(|err| Arc::new(err))
4162 })
4163 .shared()
4164 })
4165 .clone();
4166 cx.foreground().spawn(async move {
4167 match task.await {
4168 Ok(worktree) => Ok(worktree),
4169 Err(err) => Err(anyhow!("{}", err)),
4170 }
4171 })
4172 }
4173
4174 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4175 self.worktrees.retain(|worktree| {
4176 if let Some(worktree) = worktree.upgrade(cx) {
4177 let id = worktree.read(cx).id();
4178 if id == id_to_remove {
4179 cx.emit(Event::WorktreeRemoved(id));
4180 false
4181 } else {
4182 true
4183 }
4184 } else {
4185 false
4186 }
4187 });
4188 self.metadata_changed(true, cx);
4189 cx.notify();
4190 }
4191
4192 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4193 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4194 if worktree.read(cx).is_local() {
4195 cx.subscribe(&worktree, |this, worktree, _, cx| {
4196 this.update_local_worktree_buffers(worktree, cx);
4197 })
4198 .detach();
4199 }
4200
4201 let push_strong_handle = {
4202 let worktree = worktree.read(cx);
4203 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4204 };
4205 if push_strong_handle {
4206 self.worktrees
4207 .push(WorktreeHandle::Strong(worktree.clone()));
4208 } else {
4209 self.worktrees
4210 .push(WorktreeHandle::Weak(worktree.downgrade()));
4211 }
4212
4213 self.metadata_changed(true, cx);
4214 cx.observe_release(&worktree, |this, worktree, cx| {
4215 this.remove_worktree(worktree.id(), cx);
4216 cx.notify();
4217 })
4218 .detach();
4219
4220 cx.emit(Event::WorktreeAdded);
4221 cx.notify();
4222 }
4223
4224 fn update_local_worktree_buffers(
4225 &mut self,
4226 worktree_handle: ModelHandle<Worktree>,
4227 cx: &mut ModelContext<Self>,
4228 ) {
4229 let snapshot = worktree_handle.read(cx).snapshot();
4230 let mut buffers_to_delete = Vec::new();
4231 let mut renamed_buffers = Vec::new();
4232 for (buffer_id, buffer) in &self.opened_buffers {
4233 if let Some(buffer) = buffer.upgrade(cx) {
4234 buffer.update(cx, |buffer, cx| {
4235 if let Some(old_file) = File::from_dyn(buffer.file()) {
4236 if old_file.worktree != worktree_handle {
4237 return;
4238 }
4239
4240 let new_file = if let Some(entry) = old_file
4241 .entry_id
4242 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4243 {
4244 File {
4245 is_local: true,
4246 entry_id: Some(entry.id),
4247 mtime: entry.mtime,
4248 path: entry.path.clone(),
4249 worktree: worktree_handle.clone(),
4250 }
4251 } else if let Some(entry) =
4252 snapshot.entry_for_path(old_file.path().as_ref())
4253 {
4254 File {
4255 is_local: true,
4256 entry_id: Some(entry.id),
4257 mtime: entry.mtime,
4258 path: entry.path.clone(),
4259 worktree: worktree_handle.clone(),
4260 }
4261 } else {
4262 File {
4263 is_local: true,
4264 entry_id: None,
4265 path: old_file.path().clone(),
4266 mtime: old_file.mtime(),
4267 worktree: worktree_handle.clone(),
4268 }
4269 };
4270
4271 let old_path = old_file.abs_path(cx);
4272 if new_file.abs_path(cx) != old_path {
4273 renamed_buffers.push((cx.handle(), old_path));
4274 }
4275
4276 if let Some(project_id) = self.shared_remote_id() {
4277 self.client
4278 .send(proto::UpdateBufferFile {
4279 project_id,
4280 buffer_id: *buffer_id as u64,
4281 file: Some(new_file.to_proto()),
4282 })
4283 .log_err();
4284 }
4285 buffer.file_updated(Arc::new(new_file), cx).detach();
4286 }
4287 });
4288 } else {
4289 buffers_to_delete.push(*buffer_id);
4290 }
4291 }
4292
4293 for buffer_id in buffers_to_delete {
4294 self.opened_buffers.remove(&buffer_id);
4295 }
4296
4297 for (buffer, old_path) in renamed_buffers {
4298 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4299 self.assign_language_to_buffer(&buffer, cx);
4300 self.register_buffer_with_language_server(&buffer, cx);
4301 }
4302 }
4303
4304 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4305 let new_active_entry = entry.and_then(|project_path| {
4306 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4307 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4308 Some(entry.id)
4309 });
4310 if new_active_entry != self.active_entry {
4311 self.active_entry = new_active_entry;
4312 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4313 }
4314 }
4315
4316 pub fn language_servers_running_disk_based_diagnostics<'a>(
4317 &'a self,
4318 ) -> impl 'a + Iterator<Item = usize> {
4319 self.language_server_statuses
4320 .iter()
4321 .filter_map(|(id, status)| {
4322 if status.has_pending_diagnostic_updates {
4323 Some(*id)
4324 } else {
4325 None
4326 }
4327 })
4328 }
4329
4330 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4331 let mut summary = DiagnosticSummary::default();
4332 for (_, path_summary) in self.diagnostic_summaries(cx) {
4333 summary.error_count += path_summary.error_count;
4334 summary.warning_count += path_summary.warning_count;
4335 }
4336 summary
4337 }
4338
4339 pub fn diagnostic_summaries<'a>(
4340 &'a self,
4341 cx: &'a AppContext,
4342 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4343 self.visible_worktrees(cx).flat_map(move |worktree| {
4344 let worktree = worktree.read(cx);
4345 let worktree_id = worktree.id();
4346 worktree
4347 .diagnostic_summaries()
4348 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4349 })
4350 }
4351
4352 pub fn disk_based_diagnostics_started(
4353 &mut self,
4354 language_server_id: usize,
4355 cx: &mut ModelContext<Self>,
4356 ) {
4357 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4358 }
4359
4360 pub fn disk_based_diagnostics_finished(
4361 &mut self,
4362 language_server_id: usize,
4363 cx: &mut ModelContext<Self>,
4364 ) {
4365 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4366 }
4367
4368 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4369 self.active_entry
4370 }
4371
4372 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4373 self.worktree_for_id(path.worktree_id, cx)?
4374 .read(cx)
4375 .entry_for_path(&path.path)
4376 .map(|entry| entry.id)
4377 }
4378
4379 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4380 let worktree = self.worktree_for_entry(entry_id, cx)?;
4381 let worktree = worktree.read(cx);
4382 let worktree_id = worktree.id();
4383 let path = worktree.entry_for_id(entry_id)?.path.clone();
4384 Some(ProjectPath { worktree_id, path })
4385 }
4386
4387 // RPC message handlers
4388
4389 async fn handle_request_join_project(
4390 this: ModelHandle<Self>,
4391 message: TypedEnvelope<proto::RequestJoinProject>,
4392 _: Arc<Client>,
4393 mut cx: AsyncAppContext,
4394 ) -> Result<()> {
4395 let user_id = message.payload.requester_id;
4396 if this.read_with(&cx, |project, _| {
4397 project.collaborators.values().any(|c| c.user.id == user_id)
4398 }) {
4399 this.update(&mut cx, |this, cx| {
4400 this.respond_to_join_request(user_id, true, cx)
4401 });
4402 } else {
4403 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4404 let user = user_store
4405 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4406 .await?;
4407 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4408 }
4409 Ok(())
4410 }
4411
4412 async fn handle_unregister_project(
4413 this: ModelHandle<Self>,
4414 _: TypedEnvelope<proto::UnregisterProject>,
4415 _: Arc<Client>,
4416 mut cx: AsyncAppContext,
4417 ) -> Result<()> {
4418 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4419 Ok(())
4420 }
4421
4422 async fn handle_project_unshared(
4423 this: ModelHandle<Self>,
4424 _: TypedEnvelope<proto::ProjectUnshared>,
4425 _: Arc<Client>,
4426 mut cx: AsyncAppContext,
4427 ) -> Result<()> {
4428 this.update(&mut cx, |this, cx| this.unshared(cx));
4429 Ok(())
4430 }
4431
4432 async fn handle_add_collaborator(
4433 this: ModelHandle<Self>,
4434 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4435 _: Arc<Client>,
4436 mut cx: AsyncAppContext,
4437 ) -> Result<()> {
4438 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4439 let collaborator = envelope
4440 .payload
4441 .collaborator
4442 .take()
4443 .ok_or_else(|| anyhow!("empty collaborator"))?;
4444
4445 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4446 this.update(&mut cx, |this, cx| {
4447 this.collaborators
4448 .insert(collaborator.peer_id, collaborator);
4449 cx.notify();
4450 });
4451
4452 Ok(())
4453 }
4454
4455 async fn handle_remove_collaborator(
4456 this: ModelHandle<Self>,
4457 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4458 _: Arc<Client>,
4459 mut cx: AsyncAppContext,
4460 ) -> Result<()> {
4461 this.update(&mut cx, |this, cx| {
4462 let peer_id = PeerId(envelope.payload.peer_id);
4463 let replica_id = this
4464 .collaborators
4465 .remove(&peer_id)
4466 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4467 .replica_id;
4468 for (_, buffer) in &this.opened_buffers {
4469 if let Some(buffer) = buffer.upgrade(cx) {
4470 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4471 }
4472 }
4473
4474 cx.emit(Event::CollaboratorLeft(peer_id));
4475 cx.notify();
4476 Ok(())
4477 })
4478 }
4479
4480 async fn handle_join_project_request_cancelled(
4481 this: ModelHandle<Self>,
4482 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4483 _: Arc<Client>,
4484 mut cx: AsyncAppContext,
4485 ) -> Result<()> {
4486 let user = this
4487 .update(&mut cx, |this, cx| {
4488 this.user_store.update(cx, |user_store, cx| {
4489 user_store.fetch_user(envelope.payload.requester_id, cx)
4490 })
4491 })
4492 .await?;
4493
4494 this.update(&mut cx, |_, cx| {
4495 cx.emit(Event::ContactCancelledJoinRequest(user));
4496 });
4497
4498 Ok(())
4499 }
4500
4501 async fn handle_update_project(
4502 this: ModelHandle<Self>,
4503 envelope: TypedEnvelope<proto::UpdateProject>,
4504 client: Arc<Client>,
4505 mut cx: AsyncAppContext,
4506 ) -> Result<()> {
4507 this.update(&mut cx, |this, cx| {
4508 let replica_id = this.replica_id();
4509 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4510
4511 let mut old_worktrees_by_id = this
4512 .worktrees
4513 .drain(..)
4514 .filter_map(|worktree| {
4515 let worktree = worktree.upgrade(cx)?;
4516 Some((worktree.read(cx).id(), worktree))
4517 })
4518 .collect::<HashMap<_, _>>();
4519
4520 for worktree in envelope.payload.worktrees {
4521 if let Some(old_worktree) =
4522 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4523 {
4524 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4525 } else {
4526 let worktree = proto::Worktree {
4527 id: worktree.id,
4528 root_name: worktree.root_name,
4529 entries: Default::default(),
4530 diagnostic_summaries: Default::default(),
4531 visible: worktree.visible,
4532 scan_id: 0,
4533 };
4534 let (worktree, load_task) =
4535 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4536 this.add_worktree(&worktree, cx);
4537 load_task.detach();
4538 }
4539 }
4540
4541 this.metadata_changed(true, cx);
4542 for (id, _) in old_worktrees_by_id {
4543 cx.emit(Event::WorktreeRemoved(id));
4544 }
4545
4546 Ok(())
4547 })
4548 }
4549
4550 async fn handle_update_worktree(
4551 this: ModelHandle<Self>,
4552 envelope: TypedEnvelope<proto::UpdateWorktree>,
4553 _: Arc<Client>,
4554 mut cx: AsyncAppContext,
4555 ) -> Result<()> {
4556 this.update(&mut cx, |this, cx| {
4557 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4558 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4559 worktree.update(cx, |worktree, _| {
4560 let worktree = worktree.as_remote_mut().unwrap();
4561 worktree.update_from_remote(envelope)
4562 })?;
4563 }
4564 Ok(())
4565 })
4566 }
4567
4568 async fn handle_create_project_entry(
4569 this: ModelHandle<Self>,
4570 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4571 _: Arc<Client>,
4572 mut cx: AsyncAppContext,
4573 ) -> Result<proto::ProjectEntryResponse> {
4574 let worktree = this.update(&mut cx, |this, cx| {
4575 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4576 this.worktree_for_id(worktree_id, cx)
4577 .ok_or_else(|| anyhow!("worktree not found"))
4578 })?;
4579 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4580 let entry = worktree
4581 .update(&mut cx, |worktree, cx| {
4582 let worktree = worktree.as_local_mut().unwrap();
4583 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4584 worktree.create_entry(path, envelope.payload.is_directory, cx)
4585 })
4586 .await?;
4587 Ok(proto::ProjectEntryResponse {
4588 entry: Some((&entry).into()),
4589 worktree_scan_id: worktree_scan_id as u64,
4590 })
4591 }
4592
4593 async fn handle_rename_project_entry(
4594 this: ModelHandle<Self>,
4595 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4596 _: Arc<Client>,
4597 mut cx: AsyncAppContext,
4598 ) -> Result<proto::ProjectEntryResponse> {
4599 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4600 let worktree = this.read_with(&cx, |this, cx| {
4601 this.worktree_for_entry(entry_id, cx)
4602 .ok_or_else(|| anyhow!("worktree not found"))
4603 })?;
4604 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4605 let entry = worktree
4606 .update(&mut cx, |worktree, cx| {
4607 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4608 worktree
4609 .as_local_mut()
4610 .unwrap()
4611 .rename_entry(entry_id, new_path, cx)
4612 .ok_or_else(|| anyhow!("invalid entry"))
4613 })?
4614 .await?;
4615 Ok(proto::ProjectEntryResponse {
4616 entry: Some((&entry).into()),
4617 worktree_scan_id: worktree_scan_id as u64,
4618 })
4619 }
4620
4621 async fn handle_copy_project_entry(
4622 this: ModelHandle<Self>,
4623 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4624 _: Arc<Client>,
4625 mut cx: AsyncAppContext,
4626 ) -> Result<proto::ProjectEntryResponse> {
4627 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4628 let worktree = this.read_with(&cx, |this, cx| {
4629 this.worktree_for_entry(entry_id, cx)
4630 .ok_or_else(|| anyhow!("worktree not found"))
4631 })?;
4632 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4633 let entry = worktree
4634 .update(&mut cx, |worktree, cx| {
4635 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4636 worktree
4637 .as_local_mut()
4638 .unwrap()
4639 .copy_entry(entry_id, new_path, cx)
4640 .ok_or_else(|| anyhow!("invalid entry"))
4641 })?
4642 .await?;
4643 Ok(proto::ProjectEntryResponse {
4644 entry: Some((&entry).into()),
4645 worktree_scan_id: worktree_scan_id as u64,
4646 })
4647 }
4648
4649 async fn handle_delete_project_entry(
4650 this: ModelHandle<Self>,
4651 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4652 _: Arc<Client>,
4653 mut cx: AsyncAppContext,
4654 ) -> Result<proto::ProjectEntryResponse> {
4655 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4656 let worktree = this.read_with(&cx, |this, cx| {
4657 this.worktree_for_entry(entry_id, cx)
4658 .ok_or_else(|| anyhow!("worktree not found"))
4659 })?;
4660 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4661 worktree
4662 .update(&mut cx, |worktree, cx| {
4663 worktree
4664 .as_local_mut()
4665 .unwrap()
4666 .delete_entry(entry_id, cx)
4667 .ok_or_else(|| anyhow!("invalid entry"))
4668 })?
4669 .await?;
4670 Ok(proto::ProjectEntryResponse {
4671 entry: None,
4672 worktree_scan_id: worktree_scan_id as u64,
4673 })
4674 }
4675
4676 async fn handle_update_diagnostic_summary(
4677 this: ModelHandle<Self>,
4678 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4679 _: Arc<Client>,
4680 mut cx: AsyncAppContext,
4681 ) -> Result<()> {
4682 this.update(&mut cx, |this, cx| {
4683 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4684 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4685 if let Some(summary) = envelope.payload.summary {
4686 let project_path = ProjectPath {
4687 worktree_id,
4688 path: Path::new(&summary.path).into(),
4689 };
4690 worktree.update(cx, |worktree, _| {
4691 worktree
4692 .as_remote_mut()
4693 .unwrap()
4694 .update_diagnostic_summary(project_path.path.clone(), &summary);
4695 });
4696 cx.emit(Event::DiagnosticsUpdated {
4697 language_server_id: summary.language_server_id as usize,
4698 path: project_path,
4699 });
4700 }
4701 }
4702 Ok(())
4703 })
4704 }
4705
4706 async fn handle_start_language_server(
4707 this: ModelHandle<Self>,
4708 envelope: TypedEnvelope<proto::StartLanguageServer>,
4709 _: Arc<Client>,
4710 mut cx: AsyncAppContext,
4711 ) -> Result<()> {
4712 let server = envelope
4713 .payload
4714 .server
4715 .ok_or_else(|| anyhow!("invalid server"))?;
4716 this.update(&mut cx, |this, cx| {
4717 this.language_server_statuses.insert(
4718 server.id as usize,
4719 LanguageServerStatus {
4720 name: server.name,
4721 pending_work: Default::default(),
4722 has_pending_diagnostic_updates: false,
4723 progress_tokens: Default::default(),
4724 },
4725 );
4726 cx.notify();
4727 });
4728 Ok(())
4729 }
4730
4731 async fn handle_update_language_server(
4732 this: ModelHandle<Self>,
4733 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4734 _: Arc<Client>,
4735 mut cx: AsyncAppContext,
4736 ) -> Result<()> {
4737 let language_server_id = envelope.payload.language_server_id as usize;
4738 match envelope
4739 .payload
4740 .variant
4741 .ok_or_else(|| anyhow!("invalid variant"))?
4742 {
4743 proto::update_language_server::Variant::WorkStart(payload) => {
4744 this.update(&mut cx, |this, cx| {
4745 this.on_lsp_work_start(
4746 language_server_id,
4747 payload.token,
4748 LanguageServerProgress {
4749 message: payload.message,
4750 percentage: payload.percentage.map(|p| p as usize),
4751 last_update_at: Instant::now(),
4752 },
4753 cx,
4754 );
4755 })
4756 }
4757 proto::update_language_server::Variant::WorkProgress(payload) => {
4758 this.update(&mut cx, |this, cx| {
4759 this.on_lsp_work_progress(
4760 language_server_id,
4761 payload.token,
4762 LanguageServerProgress {
4763 message: payload.message,
4764 percentage: payload.percentage.map(|p| p as usize),
4765 last_update_at: Instant::now(),
4766 },
4767 cx,
4768 );
4769 })
4770 }
4771 proto::update_language_server::Variant::WorkEnd(payload) => {
4772 this.update(&mut cx, |this, cx| {
4773 this.on_lsp_work_end(language_server_id, payload.token, cx);
4774 })
4775 }
4776 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4777 this.update(&mut cx, |this, cx| {
4778 this.disk_based_diagnostics_started(language_server_id, cx);
4779 })
4780 }
4781 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4782 this.update(&mut cx, |this, cx| {
4783 this.disk_based_diagnostics_finished(language_server_id, cx)
4784 });
4785 }
4786 }
4787
4788 Ok(())
4789 }
4790
4791 async fn handle_update_buffer(
4792 this: ModelHandle<Self>,
4793 envelope: TypedEnvelope<proto::UpdateBuffer>,
4794 _: Arc<Client>,
4795 mut cx: AsyncAppContext,
4796 ) -> Result<()> {
4797 this.update(&mut cx, |this, cx| {
4798 let payload = envelope.payload.clone();
4799 let buffer_id = payload.buffer_id;
4800 let ops = payload
4801 .operations
4802 .into_iter()
4803 .map(|op| language::proto::deserialize_operation(op))
4804 .collect::<Result<Vec<_>, _>>()?;
4805 let is_remote = this.is_remote();
4806 match this.opened_buffers.entry(buffer_id) {
4807 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4808 OpenBuffer::Strong(buffer) => {
4809 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4810 }
4811 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4812 OpenBuffer::Weak(_) => {}
4813 },
4814 hash_map::Entry::Vacant(e) => {
4815 assert!(
4816 is_remote,
4817 "received buffer update from {:?}",
4818 envelope.original_sender_id
4819 );
4820 e.insert(OpenBuffer::Loading(ops));
4821 }
4822 }
4823 Ok(())
4824 })
4825 }
4826
4827 async fn handle_update_buffer_file(
4828 this: ModelHandle<Self>,
4829 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4830 _: Arc<Client>,
4831 mut cx: AsyncAppContext,
4832 ) -> Result<()> {
4833 this.update(&mut cx, |this, cx| {
4834 let payload = envelope.payload.clone();
4835 let buffer_id = payload.buffer_id;
4836 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4837 let worktree = this
4838 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4839 .ok_or_else(|| anyhow!("no such worktree"))?;
4840 let file = File::from_proto(file, worktree.clone(), cx)?;
4841 let buffer = this
4842 .opened_buffers
4843 .get_mut(&buffer_id)
4844 .and_then(|b| b.upgrade(cx))
4845 .ok_or_else(|| anyhow!("no such buffer"))?;
4846 buffer.update(cx, |buffer, cx| {
4847 buffer.file_updated(Arc::new(file), cx).detach();
4848 });
4849 Ok(())
4850 })
4851 }
4852
4853 async fn handle_save_buffer(
4854 this: ModelHandle<Self>,
4855 envelope: TypedEnvelope<proto::SaveBuffer>,
4856 _: Arc<Client>,
4857 mut cx: AsyncAppContext,
4858 ) -> Result<proto::BufferSaved> {
4859 let buffer_id = envelope.payload.buffer_id;
4860 let requested_version = deserialize_version(envelope.payload.version);
4861
4862 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4863 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4864 let buffer = this
4865 .opened_buffers
4866 .get(&buffer_id)
4867 .and_then(|buffer| buffer.upgrade(cx))
4868 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4869 Ok::<_, anyhow::Error>((project_id, buffer))
4870 })?;
4871 buffer
4872 .update(&mut cx, |buffer, _| {
4873 buffer.wait_for_version(requested_version)
4874 })
4875 .await;
4876
4877 let (saved_version, fingerprint, mtime) =
4878 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4879 Ok(proto::BufferSaved {
4880 project_id,
4881 buffer_id,
4882 version: serialize_version(&saved_version),
4883 mtime: Some(mtime.into()),
4884 fingerprint,
4885 })
4886 }
4887
4888 async fn handle_reload_buffers(
4889 this: ModelHandle<Self>,
4890 envelope: TypedEnvelope<proto::ReloadBuffers>,
4891 _: Arc<Client>,
4892 mut cx: AsyncAppContext,
4893 ) -> Result<proto::ReloadBuffersResponse> {
4894 let sender_id = envelope.original_sender_id()?;
4895 let reload = this.update(&mut cx, |this, cx| {
4896 let mut buffers = HashSet::default();
4897 for buffer_id in &envelope.payload.buffer_ids {
4898 buffers.insert(
4899 this.opened_buffers
4900 .get(buffer_id)
4901 .and_then(|buffer| buffer.upgrade(cx))
4902 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4903 );
4904 }
4905 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4906 })?;
4907
4908 let project_transaction = reload.await?;
4909 let project_transaction = this.update(&mut cx, |this, cx| {
4910 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4911 });
4912 Ok(proto::ReloadBuffersResponse {
4913 transaction: Some(project_transaction),
4914 })
4915 }
4916
4917 async fn handle_format_buffers(
4918 this: ModelHandle<Self>,
4919 envelope: TypedEnvelope<proto::FormatBuffers>,
4920 _: Arc<Client>,
4921 mut cx: AsyncAppContext,
4922 ) -> Result<proto::FormatBuffersResponse> {
4923 let sender_id = envelope.original_sender_id()?;
4924 let format = this.update(&mut cx, |this, cx| {
4925 let mut buffers = HashSet::default();
4926 for buffer_id in &envelope.payload.buffer_ids {
4927 buffers.insert(
4928 this.opened_buffers
4929 .get(buffer_id)
4930 .and_then(|buffer| buffer.upgrade(cx))
4931 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4932 );
4933 }
4934 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4935 })?;
4936
4937 let project_transaction = format.await?;
4938 let project_transaction = this.update(&mut cx, |this, cx| {
4939 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4940 });
4941 Ok(proto::FormatBuffersResponse {
4942 transaction: Some(project_transaction),
4943 })
4944 }
4945
4946 async fn handle_get_completions(
4947 this: ModelHandle<Self>,
4948 envelope: TypedEnvelope<proto::GetCompletions>,
4949 _: Arc<Client>,
4950 mut cx: AsyncAppContext,
4951 ) -> Result<proto::GetCompletionsResponse> {
4952 let position = envelope
4953 .payload
4954 .position
4955 .and_then(language::proto::deserialize_anchor)
4956 .ok_or_else(|| anyhow!("invalid position"))?;
4957 let version = deserialize_version(envelope.payload.version);
4958 let buffer = this.read_with(&cx, |this, cx| {
4959 this.opened_buffers
4960 .get(&envelope.payload.buffer_id)
4961 .and_then(|buffer| buffer.upgrade(cx))
4962 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4963 })?;
4964 buffer
4965 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4966 .await;
4967 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4968 let completions = this
4969 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4970 .await?;
4971
4972 Ok(proto::GetCompletionsResponse {
4973 completions: completions
4974 .iter()
4975 .map(language::proto::serialize_completion)
4976 .collect(),
4977 version: serialize_version(&version),
4978 })
4979 }
4980
4981 async fn handle_apply_additional_edits_for_completion(
4982 this: ModelHandle<Self>,
4983 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4984 _: Arc<Client>,
4985 mut cx: AsyncAppContext,
4986 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4987 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4988 let buffer = this
4989 .opened_buffers
4990 .get(&envelope.payload.buffer_id)
4991 .and_then(|buffer| buffer.upgrade(cx))
4992 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4993 let language = buffer.read(cx).language();
4994 let completion = language::proto::deserialize_completion(
4995 envelope
4996 .payload
4997 .completion
4998 .ok_or_else(|| anyhow!("invalid completion"))?,
4999 language,
5000 )?;
5001 Ok::<_, anyhow::Error>(
5002 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
5003 )
5004 })?;
5005
5006 Ok(proto::ApplyCompletionAdditionalEditsResponse {
5007 transaction: apply_additional_edits
5008 .await?
5009 .as_ref()
5010 .map(language::proto::serialize_transaction),
5011 })
5012 }
5013
5014 async fn handle_get_code_actions(
5015 this: ModelHandle<Self>,
5016 envelope: TypedEnvelope<proto::GetCodeActions>,
5017 _: Arc<Client>,
5018 mut cx: AsyncAppContext,
5019 ) -> Result<proto::GetCodeActionsResponse> {
5020 let start = envelope
5021 .payload
5022 .start
5023 .and_then(language::proto::deserialize_anchor)
5024 .ok_or_else(|| anyhow!("invalid start"))?;
5025 let end = envelope
5026 .payload
5027 .end
5028 .and_then(language::proto::deserialize_anchor)
5029 .ok_or_else(|| anyhow!("invalid end"))?;
5030 let buffer = this.update(&mut cx, |this, cx| {
5031 this.opened_buffers
5032 .get(&envelope.payload.buffer_id)
5033 .and_then(|buffer| buffer.upgrade(cx))
5034 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
5035 })?;
5036 buffer
5037 .update(&mut cx, |buffer, _| {
5038 buffer.wait_for_version(deserialize_version(envelope.payload.version))
5039 })
5040 .await;
5041
5042 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
5043 let code_actions = this.update(&mut cx, |this, cx| {
5044 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
5045 })?;
5046
5047 Ok(proto::GetCodeActionsResponse {
5048 actions: code_actions
5049 .await?
5050 .iter()
5051 .map(language::proto::serialize_code_action)
5052 .collect(),
5053 version: serialize_version(&version),
5054 })
5055 }
5056
5057 async fn handle_apply_code_action(
5058 this: ModelHandle<Self>,
5059 envelope: TypedEnvelope<proto::ApplyCodeAction>,
5060 _: Arc<Client>,
5061 mut cx: AsyncAppContext,
5062 ) -> Result<proto::ApplyCodeActionResponse> {
5063 let sender_id = envelope.original_sender_id()?;
5064 let action = language::proto::deserialize_code_action(
5065 envelope
5066 .payload
5067 .action
5068 .ok_or_else(|| anyhow!("invalid action"))?,
5069 )?;
5070 let apply_code_action = this.update(&mut cx, |this, cx| {
5071 let buffer = this
5072 .opened_buffers
5073 .get(&envelope.payload.buffer_id)
5074 .and_then(|buffer| buffer.upgrade(cx))
5075 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5076 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
5077 })?;
5078
5079 let project_transaction = apply_code_action.await?;
5080 let project_transaction = this.update(&mut cx, |this, cx| {
5081 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5082 });
5083 Ok(proto::ApplyCodeActionResponse {
5084 transaction: Some(project_transaction),
5085 })
5086 }
5087
5088 async fn handle_lsp_command<T: LspCommand>(
5089 this: ModelHandle<Self>,
5090 envelope: TypedEnvelope<T::ProtoRequest>,
5091 _: Arc<Client>,
5092 mut cx: AsyncAppContext,
5093 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
5094 where
5095 <T::LspRequest as lsp::request::Request>::Result: Send,
5096 {
5097 let sender_id = envelope.original_sender_id()?;
5098 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
5099 let buffer_handle = this.read_with(&cx, |this, _| {
5100 this.opened_buffers
5101 .get(&buffer_id)
5102 .and_then(|buffer| buffer.upgrade(&cx))
5103 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
5104 })?;
5105 let request = T::from_proto(
5106 envelope.payload,
5107 this.clone(),
5108 buffer_handle.clone(),
5109 cx.clone(),
5110 )
5111 .await?;
5112 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
5113 let response = this
5114 .update(&mut cx, |this, cx| {
5115 this.request_lsp(buffer_handle, request, cx)
5116 })
5117 .await?;
5118 this.update(&mut cx, |this, cx| {
5119 Ok(T::response_to_proto(
5120 response,
5121 this,
5122 sender_id,
5123 &buffer_version,
5124 cx,
5125 ))
5126 })
5127 }
5128
5129 async fn handle_get_project_symbols(
5130 this: ModelHandle<Self>,
5131 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5132 _: Arc<Client>,
5133 mut cx: AsyncAppContext,
5134 ) -> Result<proto::GetProjectSymbolsResponse> {
5135 let symbols = this
5136 .update(&mut cx, |this, cx| {
5137 this.symbols(&envelope.payload.query, cx)
5138 })
5139 .await?;
5140
5141 Ok(proto::GetProjectSymbolsResponse {
5142 symbols: symbols.iter().map(serialize_symbol).collect(),
5143 })
5144 }
5145
5146 async fn handle_search_project(
5147 this: ModelHandle<Self>,
5148 envelope: TypedEnvelope<proto::SearchProject>,
5149 _: Arc<Client>,
5150 mut cx: AsyncAppContext,
5151 ) -> Result<proto::SearchProjectResponse> {
5152 let peer_id = envelope.original_sender_id()?;
5153 let query = SearchQuery::from_proto(envelope.payload)?;
5154 let result = this
5155 .update(&mut cx, |this, cx| this.search(query, cx))
5156 .await?;
5157
5158 this.update(&mut cx, |this, cx| {
5159 let mut locations = Vec::new();
5160 for (buffer, ranges) in result {
5161 for range in ranges {
5162 let start = serialize_anchor(&range.start);
5163 let end = serialize_anchor(&range.end);
5164 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5165 locations.push(proto::Location {
5166 buffer: Some(buffer),
5167 start: Some(start),
5168 end: Some(end),
5169 });
5170 }
5171 }
5172 Ok(proto::SearchProjectResponse { locations })
5173 })
5174 }
5175
5176 async fn handle_open_buffer_for_symbol(
5177 this: ModelHandle<Self>,
5178 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5179 _: Arc<Client>,
5180 mut cx: AsyncAppContext,
5181 ) -> Result<proto::OpenBufferForSymbolResponse> {
5182 let peer_id = envelope.original_sender_id()?;
5183 let symbol = envelope
5184 .payload
5185 .symbol
5186 .ok_or_else(|| anyhow!("invalid symbol"))?;
5187 let symbol = this.read_with(&cx, |this, _| {
5188 let symbol = this.deserialize_symbol(symbol)?;
5189 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5190 if signature == symbol.signature {
5191 Ok(symbol)
5192 } else {
5193 Err(anyhow!("invalid symbol signature"))
5194 }
5195 })?;
5196 let buffer = this
5197 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5198 .await?;
5199
5200 Ok(proto::OpenBufferForSymbolResponse {
5201 buffer: Some(this.update(&mut cx, |this, cx| {
5202 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5203 })),
5204 })
5205 }
5206
5207 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5208 let mut hasher = Sha256::new();
5209 hasher.update(worktree_id.to_proto().to_be_bytes());
5210 hasher.update(path.to_string_lossy().as_bytes());
5211 hasher.update(self.nonce.to_be_bytes());
5212 hasher.finalize().as_slice().try_into().unwrap()
5213 }
5214
5215 async fn handle_open_buffer_by_id(
5216 this: ModelHandle<Self>,
5217 envelope: TypedEnvelope<proto::OpenBufferById>,
5218 _: Arc<Client>,
5219 mut cx: AsyncAppContext,
5220 ) -> Result<proto::OpenBufferResponse> {
5221 let peer_id = envelope.original_sender_id()?;
5222 let buffer = this
5223 .update(&mut cx, |this, cx| {
5224 this.open_buffer_by_id(envelope.payload.id, cx)
5225 })
5226 .await?;
5227 this.update(&mut cx, |this, cx| {
5228 Ok(proto::OpenBufferResponse {
5229 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5230 })
5231 })
5232 }
5233
5234 async fn handle_open_buffer_by_path(
5235 this: ModelHandle<Self>,
5236 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5237 _: Arc<Client>,
5238 mut cx: AsyncAppContext,
5239 ) -> Result<proto::OpenBufferResponse> {
5240 let peer_id = envelope.original_sender_id()?;
5241 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5242 let open_buffer = this.update(&mut cx, |this, cx| {
5243 this.open_buffer(
5244 ProjectPath {
5245 worktree_id,
5246 path: PathBuf::from(envelope.payload.path).into(),
5247 },
5248 cx,
5249 )
5250 });
5251
5252 let buffer = open_buffer.await?;
5253 this.update(&mut cx, |this, cx| {
5254 Ok(proto::OpenBufferResponse {
5255 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5256 })
5257 })
5258 }
5259
5260 fn serialize_project_transaction_for_peer(
5261 &mut self,
5262 project_transaction: ProjectTransaction,
5263 peer_id: PeerId,
5264 cx: &AppContext,
5265 ) -> proto::ProjectTransaction {
5266 let mut serialized_transaction = proto::ProjectTransaction {
5267 buffers: Default::default(),
5268 transactions: Default::default(),
5269 };
5270 for (buffer, transaction) in project_transaction.0 {
5271 serialized_transaction
5272 .buffers
5273 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5274 serialized_transaction
5275 .transactions
5276 .push(language::proto::serialize_transaction(&transaction));
5277 }
5278 serialized_transaction
5279 }
5280
5281 fn deserialize_project_transaction(
5282 &mut self,
5283 message: proto::ProjectTransaction,
5284 push_to_history: bool,
5285 cx: &mut ModelContext<Self>,
5286 ) -> Task<Result<ProjectTransaction>> {
5287 cx.spawn(|this, mut cx| async move {
5288 let mut project_transaction = ProjectTransaction::default();
5289 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5290 let buffer = this
5291 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5292 .await?;
5293 let transaction = language::proto::deserialize_transaction(transaction)?;
5294 project_transaction.0.insert(buffer, transaction);
5295 }
5296
5297 for (buffer, transaction) in &project_transaction.0 {
5298 buffer
5299 .update(&mut cx, |buffer, _| {
5300 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5301 })
5302 .await;
5303
5304 if push_to_history {
5305 buffer.update(&mut cx, |buffer, _| {
5306 buffer.push_transaction(transaction.clone(), Instant::now());
5307 });
5308 }
5309 }
5310
5311 Ok(project_transaction)
5312 })
5313 }
5314
5315 fn serialize_buffer_for_peer(
5316 &mut self,
5317 buffer: &ModelHandle<Buffer>,
5318 peer_id: PeerId,
5319 cx: &AppContext,
5320 ) -> proto::Buffer {
5321 let buffer_id = buffer.read(cx).remote_id();
5322 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5323 if shared_buffers.insert(buffer_id) {
5324 proto::Buffer {
5325 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5326 }
5327 } else {
5328 proto::Buffer {
5329 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5330 }
5331 }
5332 }
5333
5334 fn deserialize_buffer(
5335 &mut self,
5336 buffer: proto::Buffer,
5337 cx: &mut ModelContext<Self>,
5338 ) -> Task<Result<ModelHandle<Buffer>>> {
5339 let replica_id = self.replica_id();
5340
5341 let opened_buffer_tx = self.opened_buffer.0.clone();
5342 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5343 cx.spawn(|this, mut cx| async move {
5344 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5345 proto::buffer::Variant::Id(id) => {
5346 let buffer = loop {
5347 let buffer = this.read_with(&cx, |this, cx| {
5348 this.opened_buffers
5349 .get(&id)
5350 .and_then(|buffer| buffer.upgrade(cx))
5351 });
5352 if let Some(buffer) = buffer {
5353 break buffer;
5354 }
5355 opened_buffer_rx
5356 .next()
5357 .await
5358 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5359 };
5360 Ok(buffer)
5361 }
5362 proto::buffer::Variant::State(mut buffer) => {
5363 let mut buffer_worktree = None;
5364 let mut buffer_file = None;
5365 if let Some(file) = buffer.file.take() {
5366 this.read_with(&cx, |this, cx| {
5367 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5368 let worktree =
5369 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5370 anyhow!("no worktree found for id {}", file.worktree_id)
5371 })?;
5372 buffer_file =
5373 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5374 as Arc<dyn language::File>);
5375 buffer_worktree = Some(worktree);
5376 Ok::<_, anyhow::Error>(())
5377 })?;
5378 }
5379
5380 let buffer = cx.add_model(|cx| {
5381 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5382 });
5383
5384 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5385
5386 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5387 Ok(buffer)
5388 }
5389 }
5390 })
5391 }
5392
5393 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5394 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5395 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5396 let start = serialized_symbol
5397 .start
5398 .ok_or_else(|| anyhow!("invalid start"))?;
5399 let end = serialized_symbol
5400 .end
5401 .ok_or_else(|| anyhow!("invalid end"))?;
5402 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5403 let path = PathBuf::from(serialized_symbol.path);
5404 let language = self.languages.select_language(&path);
5405 Ok(Symbol {
5406 source_worktree_id,
5407 worktree_id,
5408 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5409 label: language
5410 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5411 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5412 name: serialized_symbol.name,
5413 path,
5414 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5415 kind,
5416 signature: serialized_symbol
5417 .signature
5418 .try_into()
5419 .map_err(|_| anyhow!("invalid signature"))?,
5420 })
5421 }
5422
5423 async fn handle_buffer_saved(
5424 this: ModelHandle<Self>,
5425 envelope: TypedEnvelope<proto::BufferSaved>,
5426 _: Arc<Client>,
5427 mut cx: AsyncAppContext,
5428 ) -> Result<()> {
5429 let version = deserialize_version(envelope.payload.version);
5430 let mtime = envelope
5431 .payload
5432 .mtime
5433 .ok_or_else(|| anyhow!("missing mtime"))?
5434 .into();
5435
5436 this.update(&mut cx, |this, cx| {
5437 let buffer = this
5438 .opened_buffers
5439 .get(&envelope.payload.buffer_id)
5440 .and_then(|buffer| buffer.upgrade(cx));
5441 if let Some(buffer) = buffer {
5442 buffer.update(cx, |buffer, cx| {
5443 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5444 });
5445 }
5446 Ok(())
5447 })
5448 }
5449
5450 async fn handle_buffer_reloaded(
5451 this: ModelHandle<Self>,
5452 envelope: TypedEnvelope<proto::BufferReloaded>,
5453 _: Arc<Client>,
5454 mut cx: AsyncAppContext,
5455 ) -> Result<()> {
5456 let payload = envelope.payload.clone();
5457 let version = deserialize_version(payload.version);
5458 let mtime = payload
5459 .mtime
5460 .ok_or_else(|| anyhow!("missing mtime"))?
5461 .into();
5462 this.update(&mut cx, |this, cx| {
5463 let buffer = this
5464 .opened_buffers
5465 .get(&payload.buffer_id)
5466 .and_then(|buffer| buffer.upgrade(cx));
5467 if let Some(buffer) = buffer {
5468 buffer.update(cx, |buffer, cx| {
5469 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5470 });
5471 }
5472 Ok(())
5473 })
5474 }
5475
5476 pub fn match_paths<'a>(
5477 &self,
5478 query: &'a str,
5479 include_ignored: bool,
5480 smart_case: bool,
5481 max_results: usize,
5482 cancel_flag: &'a AtomicBool,
5483 cx: &AppContext,
5484 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5485 let worktrees = self
5486 .worktrees(cx)
5487 .filter(|worktree| worktree.read(cx).is_visible())
5488 .collect::<Vec<_>>();
5489 let include_root_name = worktrees.len() > 1;
5490 let candidate_sets = worktrees
5491 .into_iter()
5492 .map(|worktree| CandidateSet {
5493 snapshot: worktree.read(cx).snapshot(),
5494 include_ignored,
5495 include_root_name,
5496 })
5497 .collect::<Vec<_>>();
5498
5499 let background = cx.background().clone();
5500 async move {
5501 fuzzy::match_paths(
5502 candidate_sets.as_slice(),
5503 query,
5504 smart_case,
5505 max_results,
5506 cancel_flag,
5507 background,
5508 )
5509 .await
5510 }
5511 }
5512
5513 fn edits_from_lsp(
5514 &mut self,
5515 buffer: &ModelHandle<Buffer>,
5516 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5517 version: Option<i32>,
5518 cx: &mut ModelContext<Self>,
5519 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5520 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5521 cx.background().spawn(async move {
5522 let snapshot = snapshot?;
5523 let mut lsp_edits = lsp_edits
5524 .into_iter()
5525 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5526 .collect::<Vec<_>>();
5527 lsp_edits.sort_by_key(|(range, _)| range.start);
5528
5529 let mut lsp_edits = lsp_edits.into_iter().peekable();
5530 let mut edits = Vec::new();
5531 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5532 // Combine any LSP edits that are adjacent.
5533 //
5534 // Also, combine LSP edits that are separated from each other by only
5535 // a newline. This is important because for some code actions,
5536 // Rust-analyzer rewrites the entire buffer via a series of edits that
5537 // are separated by unchanged newline characters.
5538 //
5539 // In order for the diffing logic below to work properly, any edits that
5540 // cancel each other out must be combined into one.
5541 while let Some((next_range, next_text)) = lsp_edits.peek() {
5542 if next_range.start > range.end {
5543 if next_range.start.row > range.end.row + 1
5544 || next_range.start.column > 0
5545 || snapshot.clip_point_utf16(
5546 PointUtf16::new(range.end.row, u32::MAX),
5547 Bias::Left,
5548 ) > range.end
5549 {
5550 break;
5551 }
5552 new_text.push('\n');
5553 }
5554 range.end = next_range.end;
5555 new_text.push_str(&next_text);
5556 lsp_edits.next();
5557 }
5558
5559 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5560 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5561 {
5562 return Err(anyhow!("invalid edits received from language server"));
5563 }
5564
5565 // For multiline edits, perform a diff of the old and new text so that
5566 // we can identify the changes more precisely, preserving the locations
5567 // of any anchors positioned in the unchanged regions.
5568 if range.end.row > range.start.row {
5569 let mut offset = range.start.to_offset(&snapshot);
5570 let old_text = snapshot.text_for_range(range).collect::<String>();
5571
5572 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5573 let mut moved_since_edit = true;
5574 for change in diff.iter_all_changes() {
5575 let tag = change.tag();
5576 let value = change.value();
5577 match tag {
5578 ChangeTag::Equal => {
5579 offset += value.len();
5580 moved_since_edit = true;
5581 }
5582 ChangeTag::Delete => {
5583 let start = snapshot.anchor_after(offset);
5584 let end = snapshot.anchor_before(offset + value.len());
5585 if moved_since_edit {
5586 edits.push((start..end, String::new()));
5587 } else {
5588 edits.last_mut().unwrap().0.end = end;
5589 }
5590 offset += value.len();
5591 moved_since_edit = false;
5592 }
5593 ChangeTag::Insert => {
5594 if moved_since_edit {
5595 let anchor = snapshot.anchor_after(offset);
5596 edits.push((anchor.clone()..anchor, value.to_string()));
5597 } else {
5598 edits.last_mut().unwrap().1.push_str(value);
5599 }
5600 moved_since_edit = false;
5601 }
5602 }
5603 }
5604 } else if range.end == range.start {
5605 let anchor = snapshot.anchor_after(range.start);
5606 edits.push((anchor.clone()..anchor, new_text));
5607 } else {
5608 let edit_start = snapshot.anchor_after(range.start);
5609 let edit_end = snapshot.anchor_before(range.end);
5610 edits.push((edit_start..edit_end, new_text));
5611 }
5612 }
5613
5614 Ok(edits)
5615 })
5616 }
5617
5618 fn buffer_snapshot_for_lsp_version(
5619 &mut self,
5620 buffer: &ModelHandle<Buffer>,
5621 version: Option<i32>,
5622 cx: &AppContext,
5623 ) -> Result<TextBufferSnapshot> {
5624 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5625
5626 if let Some(version) = version {
5627 let buffer_id = buffer.read(cx).remote_id();
5628 let snapshots = self
5629 .buffer_snapshots
5630 .get_mut(&buffer_id)
5631 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5632 let mut found_snapshot = None;
5633 snapshots.retain(|(snapshot_version, snapshot)| {
5634 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5635 false
5636 } else {
5637 if *snapshot_version == version {
5638 found_snapshot = Some(snapshot.clone());
5639 }
5640 true
5641 }
5642 });
5643
5644 found_snapshot.ok_or_else(|| {
5645 anyhow!(
5646 "snapshot not found for buffer {} at version {}",
5647 buffer_id,
5648 version
5649 )
5650 })
5651 } else {
5652 Ok((buffer.read(cx)).text_snapshot())
5653 }
5654 }
5655
5656 fn language_server_for_buffer(
5657 &self,
5658 buffer: &Buffer,
5659 cx: &AppContext,
5660 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5661 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5662 let worktree_id = file.worktree_id(cx);
5663 self.language_servers
5664 .get(&(worktree_id, language.lsp_adapter()?.name()))
5665 } else {
5666 None
5667 }
5668 }
5669}
5670
5671impl ProjectStore {
5672 pub fn new(db: Arc<Db>) -> Self {
5673 Self {
5674 db,
5675 projects: Default::default(),
5676 }
5677 }
5678
5679 pub fn projects<'a>(
5680 &'a self,
5681 cx: &'a AppContext,
5682 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5683 self.projects
5684 .iter()
5685 .filter_map(|project| project.upgrade(cx))
5686 }
5687
5688 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5689 if let Err(ix) = self
5690 .projects
5691 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5692 {
5693 self.projects.insert(ix, project);
5694 }
5695 cx.notify();
5696 }
5697
5698 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5699 let mut did_change = false;
5700 self.projects.retain(|project| {
5701 if project.is_upgradable(cx) {
5702 true
5703 } else {
5704 did_change = true;
5705 false
5706 }
5707 });
5708 if did_change {
5709 cx.notify();
5710 }
5711 }
5712}
5713
5714impl WorktreeHandle {
5715 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5716 match self {
5717 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5718 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5719 }
5720 }
5721}
5722
5723impl OpenBuffer {
5724 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5725 match self {
5726 OpenBuffer::Strong(handle) => Some(handle.clone()),
5727 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5728 OpenBuffer::Loading(_) => None,
5729 }
5730 }
5731}
5732
5733struct CandidateSet {
5734 snapshot: Snapshot,
5735 include_ignored: bool,
5736 include_root_name: bool,
5737}
5738
5739impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5740 type Candidates = CandidateSetIter<'a>;
5741
5742 fn id(&self) -> usize {
5743 self.snapshot.id().to_usize()
5744 }
5745
5746 fn len(&self) -> usize {
5747 if self.include_ignored {
5748 self.snapshot.file_count()
5749 } else {
5750 self.snapshot.visible_file_count()
5751 }
5752 }
5753
5754 fn prefix(&self) -> Arc<str> {
5755 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5756 self.snapshot.root_name().into()
5757 } else if self.include_root_name {
5758 format!("{}/", self.snapshot.root_name()).into()
5759 } else {
5760 "".into()
5761 }
5762 }
5763
5764 fn candidates(&'a self, start: usize) -> Self::Candidates {
5765 CandidateSetIter {
5766 traversal: self.snapshot.files(self.include_ignored, start),
5767 }
5768 }
5769}
5770
5771struct CandidateSetIter<'a> {
5772 traversal: Traversal<'a>,
5773}
5774
5775impl<'a> Iterator for CandidateSetIter<'a> {
5776 type Item = PathMatchCandidate<'a>;
5777
5778 fn next(&mut self) -> Option<Self::Item> {
5779 self.traversal.next().map(|entry| {
5780 if let EntryKind::File(char_bag) = entry.kind {
5781 PathMatchCandidate {
5782 path: &entry.path,
5783 char_bag,
5784 }
5785 } else {
5786 unreachable!()
5787 }
5788 })
5789 }
5790}
5791
5792impl Entity for ProjectStore {
5793 type Event = ();
5794}
5795
5796impl Entity for Project {
5797 type Event = Event;
5798
5799 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5800 self.project_store.update(cx, ProjectStore::prune_projects);
5801
5802 match &self.client_state {
5803 ProjectClientState::Local { remote_id_rx, .. } => {
5804 if let Some(project_id) = *remote_id_rx.borrow() {
5805 self.client
5806 .send(proto::UnregisterProject { project_id })
5807 .log_err();
5808 }
5809 }
5810 ProjectClientState::Remote { remote_id, .. } => {
5811 self.client
5812 .send(proto::LeaveProject {
5813 project_id: *remote_id,
5814 })
5815 .log_err();
5816 }
5817 }
5818 }
5819
5820 fn app_will_quit(
5821 &mut self,
5822 _: &mut MutableAppContext,
5823 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5824 let shutdown_futures = self
5825 .language_servers
5826 .drain()
5827 .filter_map(|(_, (_, server))| server.shutdown())
5828 .collect::<Vec<_>>();
5829 Some(
5830 async move {
5831 futures::future::join_all(shutdown_futures).await;
5832 }
5833 .boxed(),
5834 )
5835 }
5836}
5837
5838impl Collaborator {
5839 fn from_proto(
5840 message: proto::Collaborator,
5841 user_store: &ModelHandle<UserStore>,
5842 cx: &mut AsyncAppContext,
5843 ) -> impl Future<Output = Result<Self>> {
5844 let user = user_store.update(cx, |user_store, cx| {
5845 user_store.fetch_user(message.user_id, cx)
5846 });
5847
5848 async move {
5849 Ok(Self {
5850 peer_id: PeerId(message.peer_id),
5851 user: user.await?,
5852 replica_id: message.replica_id as ReplicaId,
5853 })
5854 }
5855 }
5856}
5857
5858impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5859 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5860 Self {
5861 worktree_id,
5862 path: path.as_ref().into(),
5863 }
5864 }
5865}
5866
5867impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5868 fn from(options: lsp::CreateFileOptions) -> Self {
5869 Self {
5870 overwrite: options.overwrite.unwrap_or(false),
5871 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5872 }
5873 }
5874}
5875
5876impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5877 fn from(options: lsp::RenameFileOptions) -> Self {
5878 Self {
5879 overwrite: options.overwrite.unwrap_or(false),
5880 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5881 }
5882 }
5883}
5884
5885impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5886 fn from(options: lsp::DeleteFileOptions) -> Self {
5887 Self {
5888 recursive: options.recursive.unwrap_or(false),
5889 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5890 }
5891 }
5892}
5893
5894fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5895 proto::Symbol {
5896 source_worktree_id: symbol.source_worktree_id.to_proto(),
5897 worktree_id: symbol.worktree_id.to_proto(),
5898 language_server_name: symbol.language_server_name.0.to_string(),
5899 name: symbol.name.clone(),
5900 kind: unsafe { mem::transmute(symbol.kind) },
5901 path: symbol.path.to_string_lossy().to_string(),
5902 start: Some(proto::Point {
5903 row: symbol.range.start.row,
5904 column: symbol.range.start.column,
5905 }),
5906 end: Some(proto::Point {
5907 row: symbol.range.end.row,
5908 column: symbol.range.end.column,
5909 }),
5910 signature: symbol.signature.to_vec(),
5911 }
5912}
5913
5914fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5915 let mut path_components = path.components();
5916 let mut base_components = base.components();
5917 let mut components: Vec<Component> = Vec::new();
5918 loop {
5919 match (path_components.next(), base_components.next()) {
5920 (None, None) => break,
5921 (Some(a), None) => {
5922 components.push(a);
5923 components.extend(path_components.by_ref());
5924 break;
5925 }
5926 (None, _) => components.push(Component::ParentDir),
5927 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5928 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5929 (Some(a), Some(_)) => {
5930 components.push(Component::ParentDir);
5931 for _ in base_components {
5932 components.push(Component::ParentDir);
5933 }
5934 components.push(a);
5935 components.extend(path_components.by_ref());
5936 break;
5937 }
5938 }
5939 }
5940 components.iter().map(|c| c.as_os_str()).collect()
5941}
5942
5943impl Item for Buffer {
5944 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5945 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5946 }
5947}
5948
5949#[cfg(test)]
5950mod tests {
5951 use crate::worktree::WorktreeHandle;
5952
5953 use super::{Event, *};
5954 use fs::RealFs;
5955 use futures::{future, StreamExt};
5956 use gpui::{executor::Deterministic, test::subscribe};
5957 use language::{
5958 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5959 OffsetRangeExt, Point, ToPoint,
5960 };
5961 use lsp::Url;
5962 use serde_json::json;
5963 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5964 use unindent::Unindent as _;
5965 use util::{assert_set_eq, test::temp_tree};
5966
5967 #[gpui::test]
5968 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5969 let dir = temp_tree(json!({
5970 "root": {
5971 "apple": "",
5972 "banana": {
5973 "carrot": {
5974 "date": "",
5975 "endive": "",
5976 }
5977 },
5978 "fennel": {
5979 "grape": "",
5980 }
5981 }
5982 }));
5983
5984 let root_link_path = dir.path().join("root_link");
5985 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5986 unix::fs::symlink(
5987 &dir.path().join("root/fennel"),
5988 &dir.path().join("root/finnochio"),
5989 )
5990 .unwrap();
5991
5992 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5993
5994 project.read_with(cx, |project, cx| {
5995 let tree = project.worktrees(cx).next().unwrap().read(cx);
5996 assert_eq!(tree.file_count(), 5);
5997 assert_eq!(
5998 tree.inode_for_path("fennel/grape"),
5999 tree.inode_for_path("finnochio/grape")
6000 );
6001 });
6002
6003 let cancel_flag = Default::default();
6004 let results = project
6005 .read_with(cx, |project, cx| {
6006 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
6007 })
6008 .await;
6009 assert_eq!(
6010 results
6011 .into_iter()
6012 .map(|result| result.path)
6013 .collect::<Vec<Arc<Path>>>(),
6014 vec![
6015 PathBuf::from("banana/carrot/date").into(),
6016 PathBuf::from("banana/carrot/endive").into(),
6017 ]
6018 );
6019 }
6020
6021 #[gpui::test]
6022 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
6023 cx.foreground().forbid_parking();
6024
6025 let mut rust_language = Language::new(
6026 LanguageConfig {
6027 name: "Rust".into(),
6028 path_suffixes: vec!["rs".to_string()],
6029 ..Default::default()
6030 },
6031 Some(tree_sitter_rust::language()),
6032 );
6033 let mut json_language = Language::new(
6034 LanguageConfig {
6035 name: "JSON".into(),
6036 path_suffixes: vec!["json".to_string()],
6037 ..Default::default()
6038 },
6039 None,
6040 );
6041 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
6042 name: "the-rust-language-server",
6043 capabilities: lsp::ServerCapabilities {
6044 completion_provider: Some(lsp::CompletionOptions {
6045 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
6046 ..Default::default()
6047 }),
6048 ..Default::default()
6049 },
6050 ..Default::default()
6051 });
6052 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
6053 name: "the-json-language-server",
6054 capabilities: lsp::ServerCapabilities {
6055 completion_provider: Some(lsp::CompletionOptions {
6056 trigger_characters: Some(vec![":".to_string()]),
6057 ..Default::default()
6058 }),
6059 ..Default::default()
6060 },
6061 ..Default::default()
6062 });
6063
6064 let fs = FakeFs::new(cx.background());
6065 fs.insert_tree(
6066 "/the-root",
6067 json!({
6068 "test.rs": "const A: i32 = 1;",
6069 "test2.rs": "",
6070 "Cargo.toml": "a = 1",
6071 "package.json": "{\"a\": 1}",
6072 }),
6073 )
6074 .await;
6075
6076 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
6077 project.update(cx, |project, _| {
6078 project.languages.add(Arc::new(rust_language));
6079 project.languages.add(Arc::new(json_language));
6080 });
6081
6082 // Open a buffer without an associated language server.
6083 let toml_buffer = project
6084 .update(cx, |project, cx| {
6085 project.open_local_buffer("/the-root/Cargo.toml", cx)
6086 })
6087 .await
6088 .unwrap();
6089
6090 // Open a buffer with an associated language server.
6091 let rust_buffer = project
6092 .update(cx, |project, cx| {
6093 project.open_local_buffer("/the-root/test.rs", cx)
6094 })
6095 .await
6096 .unwrap();
6097
6098 // A server is started up, and it is notified about Rust files.
6099 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6100 assert_eq!(
6101 fake_rust_server
6102 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6103 .await
6104 .text_document,
6105 lsp::TextDocumentItem {
6106 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6107 version: 0,
6108 text: "const A: i32 = 1;".to_string(),
6109 language_id: Default::default()
6110 }
6111 );
6112
6113 // The buffer is configured based on the language server's capabilities.
6114 rust_buffer.read_with(cx, |buffer, _| {
6115 assert_eq!(
6116 buffer.completion_triggers(),
6117 &[".".to_string(), "::".to_string()]
6118 );
6119 });
6120 toml_buffer.read_with(cx, |buffer, _| {
6121 assert!(buffer.completion_triggers().is_empty());
6122 });
6123
6124 // Edit a buffer. The changes are reported to the language server.
6125 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
6126 assert_eq!(
6127 fake_rust_server
6128 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6129 .await
6130 .text_document,
6131 lsp::VersionedTextDocumentIdentifier::new(
6132 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6133 1
6134 )
6135 );
6136
6137 // Open a third buffer with a different associated language server.
6138 let json_buffer = project
6139 .update(cx, |project, cx| {
6140 project.open_local_buffer("/the-root/package.json", cx)
6141 })
6142 .await
6143 .unwrap();
6144
6145 // A json language server is started up and is only notified about the json buffer.
6146 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6147 assert_eq!(
6148 fake_json_server
6149 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6150 .await
6151 .text_document,
6152 lsp::TextDocumentItem {
6153 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6154 version: 0,
6155 text: "{\"a\": 1}".to_string(),
6156 language_id: Default::default()
6157 }
6158 );
6159
6160 // This buffer is configured based on the second language server's
6161 // capabilities.
6162 json_buffer.read_with(cx, |buffer, _| {
6163 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6164 });
6165
6166 // When opening another buffer whose language server is already running,
6167 // it is also configured based on the existing language server's capabilities.
6168 let rust_buffer2 = project
6169 .update(cx, |project, cx| {
6170 project.open_local_buffer("/the-root/test2.rs", cx)
6171 })
6172 .await
6173 .unwrap();
6174 rust_buffer2.read_with(cx, |buffer, _| {
6175 assert_eq!(
6176 buffer.completion_triggers(),
6177 &[".".to_string(), "::".to_string()]
6178 );
6179 });
6180
6181 // Changes are reported only to servers matching the buffer's language.
6182 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6183 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6184 assert_eq!(
6185 fake_rust_server
6186 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6187 .await
6188 .text_document,
6189 lsp::VersionedTextDocumentIdentifier::new(
6190 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6191 1
6192 )
6193 );
6194
6195 // Save notifications are reported to all servers.
6196 toml_buffer
6197 .update(cx, |buffer, cx| buffer.save(cx))
6198 .await
6199 .unwrap();
6200 assert_eq!(
6201 fake_rust_server
6202 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6203 .await
6204 .text_document,
6205 lsp::TextDocumentIdentifier::new(
6206 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6207 )
6208 );
6209 assert_eq!(
6210 fake_json_server
6211 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6212 .await
6213 .text_document,
6214 lsp::TextDocumentIdentifier::new(
6215 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6216 )
6217 );
6218
6219 // Renames are reported only to servers matching the buffer's language.
6220 fs.rename(
6221 Path::new("/the-root/test2.rs"),
6222 Path::new("/the-root/test3.rs"),
6223 Default::default(),
6224 )
6225 .await
6226 .unwrap();
6227 assert_eq!(
6228 fake_rust_server
6229 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6230 .await
6231 .text_document,
6232 lsp::TextDocumentIdentifier::new(
6233 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6234 ),
6235 );
6236 assert_eq!(
6237 fake_rust_server
6238 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6239 .await
6240 .text_document,
6241 lsp::TextDocumentItem {
6242 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6243 version: 0,
6244 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6245 language_id: Default::default()
6246 },
6247 );
6248
6249 rust_buffer2.update(cx, |buffer, cx| {
6250 buffer.update_diagnostics(
6251 DiagnosticSet::from_sorted_entries(
6252 vec![DiagnosticEntry {
6253 diagnostic: Default::default(),
6254 range: Anchor::MIN..Anchor::MAX,
6255 }],
6256 &buffer.snapshot(),
6257 ),
6258 cx,
6259 );
6260 assert_eq!(
6261 buffer
6262 .snapshot()
6263 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6264 .count(),
6265 1
6266 );
6267 });
6268
6269 // When the rename changes the extension of the file, the buffer gets closed on the old
6270 // language server and gets opened on the new one.
6271 fs.rename(
6272 Path::new("/the-root/test3.rs"),
6273 Path::new("/the-root/test3.json"),
6274 Default::default(),
6275 )
6276 .await
6277 .unwrap();
6278 assert_eq!(
6279 fake_rust_server
6280 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6281 .await
6282 .text_document,
6283 lsp::TextDocumentIdentifier::new(
6284 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6285 ),
6286 );
6287 assert_eq!(
6288 fake_json_server
6289 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6290 .await
6291 .text_document,
6292 lsp::TextDocumentItem {
6293 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6294 version: 0,
6295 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6296 language_id: Default::default()
6297 },
6298 );
6299
6300 // We clear the diagnostics, since the language has changed.
6301 rust_buffer2.read_with(cx, |buffer, _| {
6302 assert_eq!(
6303 buffer
6304 .snapshot()
6305 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6306 .count(),
6307 0
6308 );
6309 });
6310
6311 // The renamed file's version resets after changing language server.
6312 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6313 assert_eq!(
6314 fake_json_server
6315 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6316 .await
6317 .text_document,
6318 lsp::VersionedTextDocumentIdentifier::new(
6319 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6320 1
6321 )
6322 );
6323
6324 // Restart language servers
6325 project.update(cx, |project, cx| {
6326 project.restart_language_servers_for_buffers(
6327 vec![rust_buffer.clone(), json_buffer.clone()],
6328 cx,
6329 );
6330 });
6331
6332 let mut rust_shutdown_requests = fake_rust_server
6333 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6334 let mut json_shutdown_requests = fake_json_server
6335 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6336 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6337
6338 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6339 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6340
6341 // Ensure rust document is reopened in new rust language server
6342 assert_eq!(
6343 fake_rust_server
6344 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6345 .await
6346 .text_document,
6347 lsp::TextDocumentItem {
6348 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6349 version: 1,
6350 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6351 language_id: Default::default()
6352 }
6353 );
6354
6355 // Ensure json documents are reopened in new json language server
6356 assert_set_eq!(
6357 [
6358 fake_json_server
6359 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6360 .await
6361 .text_document,
6362 fake_json_server
6363 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6364 .await
6365 .text_document,
6366 ],
6367 [
6368 lsp::TextDocumentItem {
6369 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6370 version: 0,
6371 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6372 language_id: Default::default()
6373 },
6374 lsp::TextDocumentItem {
6375 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6376 version: 1,
6377 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6378 language_id: Default::default()
6379 }
6380 ]
6381 );
6382
6383 // Close notifications are reported only to servers matching the buffer's language.
6384 cx.update(|_| drop(json_buffer));
6385 let close_message = lsp::DidCloseTextDocumentParams {
6386 text_document: lsp::TextDocumentIdentifier::new(
6387 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6388 ),
6389 };
6390 assert_eq!(
6391 fake_json_server
6392 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6393 .await,
6394 close_message,
6395 );
6396 }
6397
6398 #[gpui::test]
6399 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6400 cx.foreground().forbid_parking();
6401
6402 let fs = FakeFs::new(cx.background());
6403 fs.insert_tree(
6404 "/dir",
6405 json!({
6406 "a.rs": "let a = 1;",
6407 "b.rs": "let b = 2;"
6408 }),
6409 )
6410 .await;
6411
6412 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6413
6414 let buffer_a = project
6415 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6416 .await
6417 .unwrap();
6418 let buffer_b = project
6419 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6420 .await
6421 .unwrap();
6422
6423 project.update(cx, |project, cx| {
6424 project
6425 .update_diagnostics(
6426 0,
6427 lsp::PublishDiagnosticsParams {
6428 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6429 version: None,
6430 diagnostics: vec![lsp::Diagnostic {
6431 range: lsp::Range::new(
6432 lsp::Position::new(0, 4),
6433 lsp::Position::new(0, 5),
6434 ),
6435 severity: Some(lsp::DiagnosticSeverity::ERROR),
6436 message: "error 1".to_string(),
6437 ..Default::default()
6438 }],
6439 },
6440 &[],
6441 cx,
6442 )
6443 .unwrap();
6444 project
6445 .update_diagnostics(
6446 0,
6447 lsp::PublishDiagnosticsParams {
6448 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6449 version: None,
6450 diagnostics: vec![lsp::Diagnostic {
6451 range: lsp::Range::new(
6452 lsp::Position::new(0, 4),
6453 lsp::Position::new(0, 5),
6454 ),
6455 severity: Some(lsp::DiagnosticSeverity::WARNING),
6456 message: "error 2".to_string(),
6457 ..Default::default()
6458 }],
6459 },
6460 &[],
6461 cx,
6462 )
6463 .unwrap();
6464 });
6465
6466 buffer_a.read_with(cx, |buffer, _| {
6467 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6468 assert_eq!(
6469 chunks
6470 .iter()
6471 .map(|(s, d)| (s.as_str(), *d))
6472 .collect::<Vec<_>>(),
6473 &[
6474 ("let ", None),
6475 ("a", Some(DiagnosticSeverity::ERROR)),
6476 (" = 1;", None),
6477 ]
6478 );
6479 });
6480 buffer_b.read_with(cx, |buffer, _| {
6481 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6482 assert_eq!(
6483 chunks
6484 .iter()
6485 .map(|(s, d)| (s.as_str(), *d))
6486 .collect::<Vec<_>>(),
6487 &[
6488 ("let ", None),
6489 ("b", Some(DiagnosticSeverity::WARNING)),
6490 (" = 2;", None),
6491 ]
6492 );
6493 });
6494 }
6495
6496 #[gpui::test]
6497 async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6498 cx.foreground().forbid_parking();
6499
6500 let fs = FakeFs::new(cx.background());
6501 fs.insert_tree(
6502 "/root",
6503 json!({
6504 "dir": {
6505 "a.rs": "let a = 1;",
6506 },
6507 "other.rs": "let b = c;"
6508 }),
6509 )
6510 .await;
6511
6512 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
6513
6514 let (worktree, _) = project
6515 .update(cx, |project, cx| {
6516 project.find_or_create_local_worktree("/root/other.rs", false, cx)
6517 })
6518 .await
6519 .unwrap();
6520 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6521
6522 project.update(cx, |project, cx| {
6523 project
6524 .update_diagnostics(
6525 0,
6526 lsp::PublishDiagnosticsParams {
6527 uri: Url::from_file_path("/root/other.rs").unwrap(),
6528 version: None,
6529 diagnostics: vec![lsp::Diagnostic {
6530 range: lsp::Range::new(
6531 lsp::Position::new(0, 8),
6532 lsp::Position::new(0, 9),
6533 ),
6534 severity: Some(lsp::DiagnosticSeverity::ERROR),
6535 message: "unknown variable 'c'".to_string(),
6536 ..Default::default()
6537 }],
6538 },
6539 &[],
6540 cx,
6541 )
6542 .unwrap();
6543 });
6544
6545 let buffer = project
6546 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6547 .await
6548 .unwrap();
6549 buffer.read_with(cx, |buffer, _| {
6550 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6551 assert_eq!(
6552 chunks
6553 .iter()
6554 .map(|(s, d)| (s.as_str(), *d))
6555 .collect::<Vec<_>>(),
6556 &[
6557 ("let b = ", None),
6558 ("c", Some(DiagnosticSeverity::ERROR)),
6559 (";", None),
6560 ]
6561 );
6562 });
6563
6564 project.read_with(cx, |project, cx| {
6565 assert_eq!(project.diagnostic_summaries(cx).next(), None);
6566 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
6567 });
6568 }
6569
6570 #[gpui::test]
6571 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6572 cx.foreground().forbid_parking();
6573
6574 let progress_token = "the-progress-token";
6575 let mut language = Language::new(
6576 LanguageConfig {
6577 name: "Rust".into(),
6578 path_suffixes: vec!["rs".to_string()],
6579 ..Default::default()
6580 },
6581 Some(tree_sitter_rust::language()),
6582 );
6583 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6584 disk_based_diagnostics_progress_token: Some(progress_token),
6585 disk_based_diagnostics_sources: &["disk"],
6586 ..Default::default()
6587 });
6588
6589 let fs = FakeFs::new(cx.background());
6590 fs.insert_tree(
6591 "/dir",
6592 json!({
6593 "a.rs": "fn a() { A }",
6594 "b.rs": "const y: i32 = 1",
6595 }),
6596 )
6597 .await;
6598
6599 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6600 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6601 let worktree_id =
6602 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6603
6604 // Cause worktree to start the fake language server
6605 let _buffer = project
6606 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6607 .await
6608 .unwrap();
6609
6610 let mut events = subscribe(&project, cx);
6611
6612 let fake_server = fake_servers.next().await.unwrap();
6613 fake_server.start_progress(progress_token).await;
6614 assert_eq!(
6615 events.next().await.unwrap(),
6616 Event::DiskBasedDiagnosticsStarted {
6617 language_server_id: 0,
6618 }
6619 );
6620
6621 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6622 lsp::PublishDiagnosticsParams {
6623 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6624 version: None,
6625 diagnostics: vec![lsp::Diagnostic {
6626 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6627 severity: Some(lsp::DiagnosticSeverity::ERROR),
6628 message: "undefined variable 'A'".to_string(),
6629 ..Default::default()
6630 }],
6631 },
6632 );
6633 assert_eq!(
6634 events.next().await.unwrap(),
6635 Event::DiagnosticsUpdated {
6636 language_server_id: 0,
6637 path: (worktree_id, Path::new("a.rs")).into()
6638 }
6639 );
6640
6641 fake_server.end_progress(progress_token);
6642 assert_eq!(
6643 events.next().await.unwrap(),
6644 Event::DiskBasedDiagnosticsFinished {
6645 language_server_id: 0
6646 }
6647 );
6648
6649 let buffer = project
6650 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6651 .await
6652 .unwrap();
6653
6654 buffer.read_with(cx, |buffer, _| {
6655 let snapshot = buffer.snapshot();
6656 let diagnostics = snapshot
6657 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6658 .collect::<Vec<_>>();
6659 assert_eq!(
6660 diagnostics,
6661 &[DiagnosticEntry {
6662 range: Point::new(0, 9)..Point::new(0, 10),
6663 diagnostic: Diagnostic {
6664 severity: lsp::DiagnosticSeverity::ERROR,
6665 message: "undefined variable 'A'".to_string(),
6666 group_id: 0,
6667 is_primary: true,
6668 ..Default::default()
6669 }
6670 }]
6671 )
6672 });
6673
6674 // Ensure publishing empty diagnostics twice only results in one update event.
6675 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6676 lsp::PublishDiagnosticsParams {
6677 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6678 version: None,
6679 diagnostics: Default::default(),
6680 },
6681 );
6682 assert_eq!(
6683 events.next().await.unwrap(),
6684 Event::DiagnosticsUpdated {
6685 language_server_id: 0,
6686 path: (worktree_id, Path::new("a.rs")).into()
6687 }
6688 );
6689
6690 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6691 lsp::PublishDiagnosticsParams {
6692 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6693 version: None,
6694 diagnostics: Default::default(),
6695 },
6696 );
6697 cx.foreground().run_until_parked();
6698 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6699 }
6700
6701 #[gpui::test]
6702 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6703 cx.foreground().forbid_parking();
6704
6705 let progress_token = "the-progress-token";
6706 let mut language = Language::new(
6707 LanguageConfig {
6708 path_suffixes: vec!["rs".to_string()],
6709 ..Default::default()
6710 },
6711 None,
6712 );
6713 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6714 disk_based_diagnostics_sources: &["disk"],
6715 disk_based_diagnostics_progress_token: Some(progress_token),
6716 ..Default::default()
6717 });
6718
6719 let fs = FakeFs::new(cx.background());
6720 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6721
6722 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6723 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6724
6725 let buffer = project
6726 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6727 .await
6728 .unwrap();
6729
6730 // Simulate diagnostics starting to update.
6731 let fake_server = fake_servers.next().await.unwrap();
6732 fake_server.start_progress(progress_token).await;
6733
6734 // Restart the server before the diagnostics finish updating.
6735 project.update(cx, |project, cx| {
6736 project.restart_language_servers_for_buffers([buffer], cx);
6737 });
6738 let mut events = subscribe(&project, cx);
6739
6740 // Simulate the newly started server sending more diagnostics.
6741 let fake_server = fake_servers.next().await.unwrap();
6742 fake_server.start_progress(progress_token).await;
6743 assert_eq!(
6744 events.next().await.unwrap(),
6745 Event::DiskBasedDiagnosticsStarted {
6746 language_server_id: 1
6747 }
6748 );
6749 project.read_with(cx, |project, _| {
6750 assert_eq!(
6751 project
6752 .language_servers_running_disk_based_diagnostics()
6753 .collect::<Vec<_>>(),
6754 [1]
6755 );
6756 });
6757
6758 // All diagnostics are considered done, despite the old server's diagnostic
6759 // task never completing.
6760 fake_server.end_progress(progress_token);
6761 assert_eq!(
6762 events.next().await.unwrap(),
6763 Event::DiskBasedDiagnosticsFinished {
6764 language_server_id: 1
6765 }
6766 );
6767 project.read_with(cx, |project, _| {
6768 assert_eq!(
6769 project
6770 .language_servers_running_disk_based_diagnostics()
6771 .collect::<Vec<_>>(),
6772 [0; 0]
6773 );
6774 });
6775 }
6776
6777 #[gpui::test]
6778 async fn test_toggling_enable_language_server(
6779 deterministic: Arc<Deterministic>,
6780 cx: &mut gpui::TestAppContext,
6781 ) {
6782 deterministic.forbid_parking();
6783
6784 let mut rust = Language::new(
6785 LanguageConfig {
6786 name: Arc::from("Rust"),
6787 path_suffixes: vec!["rs".to_string()],
6788 ..Default::default()
6789 },
6790 None,
6791 );
6792 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6793 name: "rust-lsp",
6794 ..Default::default()
6795 });
6796 let mut js = Language::new(
6797 LanguageConfig {
6798 name: Arc::from("JavaScript"),
6799 path_suffixes: vec!["js".to_string()],
6800 ..Default::default()
6801 },
6802 None,
6803 );
6804 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6805 name: "js-lsp",
6806 ..Default::default()
6807 });
6808
6809 let fs = FakeFs::new(cx.background());
6810 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6811 .await;
6812
6813 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6814 project.update(cx, |project, _| {
6815 project.languages.add(Arc::new(rust));
6816 project.languages.add(Arc::new(js));
6817 });
6818
6819 let _rs_buffer = project
6820 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6821 .await
6822 .unwrap();
6823 let _js_buffer = project
6824 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6825 .await
6826 .unwrap();
6827
6828 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6829 assert_eq!(
6830 fake_rust_server_1
6831 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6832 .await
6833 .text_document
6834 .uri
6835 .as_str(),
6836 "file:///dir/a.rs"
6837 );
6838
6839 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6840 assert_eq!(
6841 fake_js_server
6842 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6843 .await
6844 .text_document
6845 .uri
6846 .as_str(),
6847 "file:///dir/b.js"
6848 );
6849
6850 // Disable Rust language server, ensuring only that server gets stopped.
6851 cx.update(|cx| {
6852 cx.update_global(|settings: &mut Settings, _| {
6853 settings.language_overrides.insert(
6854 Arc::from("Rust"),
6855 settings::LanguageSettings {
6856 enable_language_server: Some(false),
6857 ..Default::default()
6858 },
6859 );
6860 })
6861 });
6862 fake_rust_server_1
6863 .receive_notification::<lsp::notification::Exit>()
6864 .await;
6865
6866 // Enable Rust and disable JavaScript language servers, ensuring that the
6867 // former gets started again and that the latter stops.
6868 cx.update(|cx| {
6869 cx.update_global(|settings: &mut Settings, _| {
6870 settings.language_overrides.insert(
6871 Arc::from("Rust"),
6872 settings::LanguageSettings {
6873 enable_language_server: Some(true),
6874 ..Default::default()
6875 },
6876 );
6877 settings.language_overrides.insert(
6878 Arc::from("JavaScript"),
6879 settings::LanguageSettings {
6880 enable_language_server: Some(false),
6881 ..Default::default()
6882 },
6883 );
6884 })
6885 });
6886 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6887 assert_eq!(
6888 fake_rust_server_2
6889 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6890 .await
6891 .text_document
6892 .uri
6893 .as_str(),
6894 "file:///dir/a.rs"
6895 );
6896 fake_js_server
6897 .receive_notification::<lsp::notification::Exit>()
6898 .await;
6899 }
6900
6901 #[gpui::test]
6902 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6903 cx.foreground().forbid_parking();
6904
6905 let mut language = Language::new(
6906 LanguageConfig {
6907 name: "Rust".into(),
6908 path_suffixes: vec!["rs".to_string()],
6909 ..Default::default()
6910 },
6911 Some(tree_sitter_rust::language()),
6912 );
6913 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6914 disk_based_diagnostics_sources: &["disk"],
6915 ..Default::default()
6916 });
6917
6918 let text = "
6919 fn a() { A }
6920 fn b() { BB }
6921 fn c() { CCC }
6922 "
6923 .unindent();
6924
6925 let fs = FakeFs::new(cx.background());
6926 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6927
6928 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6929 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6930
6931 let buffer = project
6932 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6933 .await
6934 .unwrap();
6935
6936 let mut fake_server = fake_servers.next().await.unwrap();
6937 let open_notification = fake_server
6938 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6939 .await;
6940
6941 // Edit the buffer, moving the content down
6942 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6943 let change_notification_1 = fake_server
6944 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6945 .await;
6946 assert!(
6947 change_notification_1.text_document.version > open_notification.text_document.version
6948 );
6949
6950 // Report some diagnostics for the initial version of the buffer
6951 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6952 lsp::PublishDiagnosticsParams {
6953 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6954 version: Some(open_notification.text_document.version),
6955 diagnostics: vec![
6956 lsp::Diagnostic {
6957 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6958 severity: Some(DiagnosticSeverity::ERROR),
6959 message: "undefined variable 'A'".to_string(),
6960 source: Some("disk".to_string()),
6961 ..Default::default()
6962 },
6963 lsp::Diagnostic {
6964 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6965 severity: Some(DiagnosticSeverity::ERROR),
6966 message: "undefined variable 'BB'".to_string(),
6967 source: Some("disk".to_string()),
6968 ..Default::default()
6969 },
6970 lsp::Diagnostic {
6971 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6972 severity: Some(DiagnosticSeverity::ERROR),
6973 source: Some("disk".to_string()),
6974 message: "undefined variable 'CCC'".to_string(),
6975 ..Default::default()
6976 },
6977 ],
6978 },
6979 );
6980
6981 // The diagnostics have moved down since they were created.
6982 buffer.next_notification(cx).await;
6983 buffer.read_with(cx, |buffer, _| {
6984 assert_eq!(
6985 buffer
6986 .snapshot()
6987 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6988 .collect::<Vec<_>>(),
6989 &[
6990 DiagnosticEntry {
6991 range: Point::new(3, 9)..Point::new(3, 11),
6992 diagnostic: Diagnostic {
6993 severity: DiagnosticSeverity::ERROR,
6994 message: "undefined variable 'BB'".to_string(),
6995 is_disk_based: true,
6996 group_id: 1,
6997 is_primary: true,
6998 ..Default::default()
6999 },
7000 },
7001 DiagnosticEntry {
7002 range: Point::new(4, 9)..Point::new(4, 12),
7003 diagnostic: Diagnostic {
7004 severity: DiagnosticSeverity::ERROR,
7005 message: "undefined variable 'CCC'".to_string(),
7006 is_disk_based: true,
7007 group_id: 2,
7008 is_primary: true,
7009 ..Default::default()
7010 }
7011 }
7012 ]
7013 );
7014 assert_eq!(
7015 chunks_with_diagnostics(buffer, 0..buffer.len()),
7016 [
7017 ("\n\nfn a() { ".to_string(), None),
7018 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7019 (" }\nfn b() { ".to_string(), None),
7020 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
7021 (" }\nfn c() { ".to_string(), None),
7022 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
7023 (" }\n".to_string(), None),
7024 ]
7025 );
7026 assert_eq!(
7027 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
7028 [
7029 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
7030 (" }\nfn c() { ".to_string(), None),
7031 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
7032 ]
7033 );
7034 });
7035
7036 // Ensure overlapping diagnostics are highlighted correctly.
7037 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7038 lsp::PublishDiagnosticsParams {
7039 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7040 version: Some(open_notification.text_document.version),
7041 diagnostics: vec![
7042 lsp::Diagnostic {
7043 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7044 severity: Some(DiagnosticSeverity::ERROR),
7045 message: "undefined variable 'A'".to_string(),
7046 source: Some("disk".to_string()),
7047 ..Default::default()
7048 },
7049 lsp::Diagnostic {
7050 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
7051 severity: Some(DiagnosticSeverity::WARNING),
7052 message: "unreachable statement".to_string(),
7053 source: Some("disk".to_string()),
7054 ..Default::default()
7055 },
7056 ],
7057 },
7058 );
7059
7060 buffer.next_notification(cx).await;
7061 buffer.read_with(cx, |buffer, _| {
7062 assert_eq!(
7063 buffer
7064 .snapshot()
7065 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
7066 .collect::<Vec<_>>(),
7067 &[
7068 DiagnosticEntry {
7069 range: Point::new(2, 9)..Point::new(2, 12),
7070 diagnostic: Diagnostic {
7071 severity: DiagnosticSeverity::WARNING,
7072 message: "unreachable statement".to_string(),
7073 is_disk_based: true,
7074 group_id: 4,
7075 is_primary: true,
7076 ..Default::default()
7077 }
7078 },
7079 DiagnosticEntry {
7080 range: Point::new(2, 9)..Point::new(2, 10),
7081 diagnostic: Diagnostic {
7082 severity: DiagnosticSeverity::ERROR,
7083 message: "undefined variable 'A'".to_string(),
7084 is_disk_based: true,
7085 group_id: 3,
7086 is_primary: true,
7087 ..Default::default()
7088 },
7089 }
7090 ]
7091 );
7092 assert_eq!(
7093 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
7094 [
7095 ("fn a() { ".to_string(), None),
7096 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7097 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7098 ("\n".to_string(), None),
7099 ]
7100 );
7101 assert_eq!(
7102 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
7103 [
7104 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7105 ("\n".to_string(), None),
7106 ]
7107 );
7108 });
7109
7110 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
7111 // changes since the last save.
7112 buffer.update(cx, |buffer, cx| {
7113 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
7114 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
7115 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
7116 });
7117 let change_notification_2 = fake_server
7118 .receive_notification::<lsp::notification::DidChangeTextDocument>()
7119 .await;
7120 assert!(
7121 change_notification_2.text_document.version
7122 > change_notification_1.text_document.version
7123 );
7124
7125 // Handle out-of-order diagnostics
7126 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7127 lsp::PublishDiagnosticsParams {
7128 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7129 version: Some(change_notification_2.text_document.version),
7130 diagnostics: vec![
7131 lsp::Diagnostic {
7132 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
7133 severity: Some(DiagnosticSeverity::ERROR),
7134 message: "undefined variable 'BB'".to_string(),
7135 source: Some("disk".to_string()),
7136 ..Default::default()
7137 },
7138 lsp::Diagnostic {
7139 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7140 severity: Some(DiagnosticSeverity::WARNING),
7141 message: "undefined variable 'A'".to_string(),
7142 source: Some("disk".to_string()),
7143 ..Default::default()
7144 },
7145 ],
7146 },
7147 );
7148
7149 buffer.next_notification(cx).await;
7150 buffer.read_with(cx, |buffer, _| {
7151 assert_eq!(
7152 buffer
7153 .snapshot()
7154 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7155 .collect::<Vec<_>>(),
7156 &[
7157 DiagnosticEntry {
7158 range: Point::new(2, 21)..Point::new(2, 22),
7159 diagnostic: Diagnostic {
7160 severity: DiagnosticSeverity::WARNING,
7161 message: "undefined variable 'A'".to_string(),
7162 is_disk_based: true,
7163 group_id: 6,
7164 is_primary: true,
7165 ..Default::default()
7166 }
7167 },
7168 DiagnosticEntry {
7169 range: Point::new(3, 9)..Point::new(3, 14),
7170 diagnostic: Diagnostic {
7171 severity: DiagnosticSeverity::ERROR,
7172 message: "undefined variable 'BB'".to_string(),
7173 is_disk_based: true,
7174 group_id: 5,
7175 is_primary: true,
7176 ..Default::default()
7177 },
7178 }
7179 ]
7180 );
7181 });
7182 }
7183
7184 #[gpui::test]
7185 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
7186 cx.foreground().forbid_parking();
7187
7188 let text = concat!(
7189 "let one = ;\n", //
7190 "let two = \n",
7191 "let three = 3;\n",
7192 );
7193
7194 let fs = FakeFs::new(cx.background());
7195 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7196
7197 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7198 let buffer = project
7199 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7200 .await
7201 .unwrap();
7202
7203 project.update(cx, |project, cx| {
7204 project
7205 .update_buffer_diagnostics(
7206 &buffer,
7207 vec![
7208 DiagnosticEntry {
7209 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7210 diagnostic: Diagnostic {
7211 severity: DiagnosticSeverity::ERROR,
7212 message: "syntax error 1".to_string(),
7213 ..Default::default()
7214 },
7215 },
7216 DiagnosticEntry {
7217 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7218 diagnostic: Diagnostic {
7219 severity: DiagnosticSeverity::ERROR,
7220 message: "syntax error 2".to_string(),
7221 ..Default::default()
7222 },
7223 },
7224 ],
7225 None,
7226 cx,
7227 )
7228 .unwrap();
7229 });
7230
7231 // An empty range is extended forward to include the following character.
7232 // At the end of a line, an empty range is extended backward to include
7233 // the preceding character.
7234 buffer.read_with(cx, |buffer, _| {
7235 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7236 assert_eq!(
7237 chunks
7238 .iter()
7239 .map(|(s, d)| (s.as_str(), *d))
7240 .collect::<Vec<_>>(),
7241 &[
7242 ("let one = ", None),
7243 (";", Some(DiagnosticSeverity::ERROR)),
7244 ("\nlet two =", None),
7245 (" ", Some(DiagnosticSeverity::ERROR)),
7246 ("\nlet three = 3;\n", None)
7247 ]
7248 );
7249 });
7250 }
7251
7252 #[gpui::test]
7253 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7254 cx.foreground().forbid_parking();
7255
7256 let mut language = Language::new(
7257 LanguageConfig {
7258 name: "Rust".into(),
7259 path_suffixes: vec!["rs".to_string()],
7260 ..Default::default()
7261 },
7262 Some(tree_sitter_rust::language()),
7263 );
7264 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7265
7266 let text = "
7267 fn a() {
7268 f1();
7269 }
7270 fn b() {
7271 f2();
7272 }
7273 fn c() {
7274 f3();
7275 }
7276 "
7277 .unindent();
7278
7279 let fs = FakeFs::new(cx.background());
7280 fs.insert_tree(
7281 "/dir",
7282 json!({
7283 "a.rs": text.clone(),
7284 }),
7285 )
7286 .await;
7287
7288 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7289 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7290 let buffer = project
7291 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7292 .await
7293 .unwrap();
7294
7295 let mut fake_server = fake_servers.next().await.unwrap();
7296 let lsp_document_version = fake_server
7297 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7298 .await
7299 .text_document
7300 .version;
7301
7302 // Simulate editing the buffer after the language server computes some edits.
7303 buffer.update(cx, |buffer, cx| {
7304 buffer.edit(
7305 [(
7306 Point::new(0, 0)..Point::new(0, 0),
7307 "// above first function\n",
7308 )],
7309 cx,
7310 );
7311 buffer.edit(
7312 [(
7313 Point::new(2, 0)..Point::new(2, 0),
7314 " // inside first function\n",
7315 )],
7316 cx,
7317 );
7318 buffer.edit(
7319 [(
7320 Point::new(6, 4)..Point::new(6, 4),
7321 "// inside second function ",
7322 )],
7323 cx,
7324 );
7325
7326 assert_eq!(
7327 buffer.text(),
7328 "
7329 // above first function
7330 fn a() {
7331 // inside first function
7332 f1();
7333 }
7334 fn b() {
7335 // inside second function f2();
7336 }
7337 fn c() {
7338 f3();
7339 }
7340 "
7341 .unindent()
7342 );
7343 });
7344
7345 let edits = project
7346 .update(cx, |project, cx| {
7347 project.edits_from_lsp(
7348 &buffer,
7349 vec![
7350 // replace body of first function
7351 lsp::TextEdit {
7352 range: lsp::Range::new(
7353 lsp::Position::new(0, 0),
7354 lsp::Position::new(3, 0),
7355 ),
7356 new_text: "
7357 fn a() {
7358 f10();
7359 }
7360 "
7361 .unindent(),
7362 },
7363 // edit inside second function
7364 lsp::TextEdit {
7365 range: lsp::Range::new(
7366 lsp::Position::new(4, 6),
7367 lsp::Position::new(4, 6),
7368 ),
7369 new_text: "00".into(),
7370 },
7371 // edit inside third function via two distinct edits
7372 lsp::TextEdit {
7373 range: lsp::Range::new(
7374 lsp::Position::new(7, 5),
7375 lsp::Position::new(7, 5),
7376 ),
7377 new_text: "4000".into(),
7378 },
7379 lsp::TextEdit {
7380 range: lsp::Range::new(
7381 lsp::Position::new(7, 5),
7382 lsp::Position::new(7, 6),
7383 ),
7384 new_text: "".into(),
7385 },
7386 ],
7387 Some(lsp_document_version),
7388 cx,
7389 )
7390 })
7391 .await
7392 .unwrap();
7393
7394 buffer.update(cx, |buffer, cx| {
7395 for (range, new_text) in edits {
7396 buffer.edit([(range, new_text)], cx);
7397 }
7398 assert_eq!(
7399 buffer.text(),
7400 "
7401 // above first function
7402 fn a() {
7403 // inside first function
7404 f10();
7405 }
7406 fn b() {
7407 // inside second function f200();
7408 }
7409 fn c() {
7410 f4000();
7411 }
7412 "
7413 .unindent()
7414 );
7415 });
7416 }
7417
7418 #[gpui::test]
7419 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7420 cx.foreground().forbid_parking();
7421
7422 let text = "
7423 use a::b;
7424 use a::c;
7425
7426 fn f() {
7427 b();
7428 c();
7429 }
7430 "
7431 .unindent();
7432
7433 let fs = FakeFs::new(cx.background());
7434 fs.insert_tree(
7435 "/dir",
7436 json!({
7437 "a.rs": text.clone(),
7438 }),
7439 )
7440 .await;
7441
7442 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7443 let buffer = project
7444 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7445 .await
7446 .unwrap();
7447
7448 // Simulate the language server sending us a small edit in the form of a very large diff.
7449 // Rust-analyzer does this when performing a merge-imports code action.
7450 let edits = project
7451 .update(cx, |project, cx| {
7452 project.edits_from_lsp(
7453 &buffer,
7454 [
7455 // Replace the first use statement without editing the semicolon.
7456 lsp::TextEdit {
7457 range: lsp::Range::new(
7458 lsp::Position::new(0, 4),
7459 lsp::Position::new(0, 8),
7460 ),
7461 new_text: "a::{b, c}".into(),
7462 },
7463 // Reinsert the remainder of the file between the semicolon and the final
7464 // newline of the file.
7465 lsp::TextEdit {
7466 range: lsp::Range::new(
7467 lsp::Position::new(0, 9),
7468 lsp::Position::new(0, 9),
7469 ),
7470 new_text: "\n\n".into(),
7471 },
7472 lsp::TextEdit {
7473 range: lsp::Range::new(
7474 lsp::Position::new(0, 9),
7475 lsp::Position::new(0, 9),
7476 ),
7477 new_text: "
7478 fn f() {
7479 b();
7480 c();
7481 }"
7482 .unindent(),
7483 },
7484 // Delete everything after the first newline of the file.
7485 lsp::TextEdit {
7486 range: lsp::Range::new(
7487 lsp::Position::new(1, 0),
7488 lsp::Position::new(7, 0),
7489 ),
7490 new_text: "".into(),
7491 },
7492 ],
7493 None,
7494 cx,
7495 )
7496 })
7497 .await
7498 .unwrap();
7499
7500 buffer.update(cx, |buffer, cx| {
7501 let edits = edits
7502 .into_iter()
7503 .map(|(range, text)| {
7504 (
7505 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7506 text,
7507 )
7508 })
7509 .collect::<Vec<_>>();
7510
7511 assert_eq!(
7512 edits,
7513 [
7514 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7515 (Point::new(1, 0)..Point::new(2, 0), "".into())
7516 ]
7517 );
7518
7519 for (range, new_text) in edits {
7520 buffer.edit([(range, new_text)], cx);
7521 }
7522 assert_eq!(
7523 buffer.text(),
7524 "
7525 use a::{b, c};
7526
7527 fn f() {
7528 b();
7529 c();
7530 }
7531 "
7532 .unindent()
7533 );
7534 });
7535 }
7536
7537 #[gpui::test]
7538 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7539 cx.foreground().forbid_parking();
7540
7541 let text = "
7542 use a::b;
7543 use a::c;
7544
7545 fn f() {
7546 b();
7547 c();
7548 }
7549 "
7550 .unindent();
7551
7552 let fs = FakeFs::new(cx.background());
7553 fs.insert_tree(
7554 "/dir",
7555 json!({
7556 "a.rs": text.clone(),
7557 }),
7558 )
7559 .await;
7560
7561 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7562 let buffer = project
7563 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7564 .await
7565 .unwrap();
7566
7567 // Simulate the language server sending us edits in a non-ordered fashion,
7568 // with ranges sometimes being inverted.
7569 let edits = project
7570 .update(cx, |project, cx| {
7571 project.edits_from_lsp(
7572 &buffer,
7573 [
7574 lsp::TextEdit {
7575 range: lsp::Range::new(
7576 lsp::Position::new(0, 9),
7577 lsp::Position::new(0, 9),
7578 ),
7579 new_text: "\n\n".into(),
7580 },
7581 lsp::TextEdit {
7582 range: lsp::Range::new(
7583 lsp::Position::new(0, 8),
7584 lsp::Position::new(0, 4),
7585 ),
7586 new_text: "a::{b, c}".into(),
7587 },
7588 lsp::TextEdit {
7589 range: lsp::Range::new(
7590 lsp::Position::new(1, 0),
7591 lsp::Position::new(7, 0),
7592 ),
7593 new_text: "".into(),
7594 },
7595 lsp::TextEdit {
7596 range: lsp::Range::new(
7597 lsp::Position::new(0, 9),
7598 lsp::Position::new(0, 9),
7599 ),
7600 new_text: "
7601 fn f() {
7602 b();
7603 c();
7604 }"
7605 .unindent(),
7606 },
7607 ],
7608 None,
7609 cx,
7610 )
7611 })
7612 .await
7613 .unwrap();
7614
7615 buffer.update(cx, |buffer, cx| {
7616 let edits = edits
7617 .into_iter()
7618 .map(|(range, text)| {
7619 (
7620 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7621 text,
7622 )
7623 })
7624 .collect::<Vec<_>>();
7625
7626 assert_eq!(
7627 edits,
7628 [
7629 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7630 (Point::new(1, 0)..Point::new(2, 0), "".into())
7631 ]
7632 );
7633
7634 for (range, new_text) in edits {
7635 buffer.edit([(range, new_text)], cx);
7636 }
7637 assert_eq!(
7638 buffer.text(),
7639 "
7640 use a::{b, c};
7641
7642 fn f() {
7643 b();
7644 c();
7645 }
7646 "
7647 .unindent()
7648 );
7649 });
7650 }
7651
7652 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7653 buffer: &Buffer,
7654 range: Range<T>,
7655 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7656 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7657 for chunk in buffer.snapshot().chunks(range, true) {
7658 if chunks.last().map_or(false, |prev_chunk| {
7659 prev_chunk.1 == chunk.diagnostic_severity
7660 }) {
7661 chunks.last_mut().unwrap().0.push_str(chunk.text);
7662 } else {
7663 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7664 }
7665 }
7666 chunks
7667 }
7668
7669 #[gpui::test]
7670 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7671 let dir = temp_tree(json!({
7672 "root": {
7673 "dir1": {},
7674 "dir2": {
7675 "dir3": {}
7676 }
7677 }
7678 }));
7679
7680 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7681 let cancel_flag = Default::default();
7682 let results = project
7683 .read_with(cx, |project, cx| {
7684 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7685 })
7686 .await;
7687
7688 assert!(results.is_empty());
7689 }
7690
7691 #[gpui::test(iterations = 10)]
7692 async fn test_definition(cx: &mut gpui::TestAppContext) {
7693 let mut language = Language::new(
7694 LanguageConfig {
7695 name: "Rust".into(),
7696 path_suffixes: vec!["rs".to_string()],
7697 ..Default::default()
7698 },
7699 Some(tree_sitter_rust::language()),
7700 );
7701 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7702
7703 let fs = FakeFs::new(cx.background());
7704 fs.insert_tree(
7705 "/dir",
7706 json!({
7707 "a.rs": "const fn a() { A }",
7708 "b.rs": "const y: i32 = crate::a()",
7709 }),
7710 )
7711 .await;
7712
7713 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7714 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7715
7716 let buffer = project
7717 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7718 .await
7719 .unwrap();
7720
7721 let fake_server = fake_servers.next().await.unwrap();
7722 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7723 let params = params.text_document_position_params;
7724 assert_eq!(
7725 params.text_document.uri.to_file_path().unwrap(),
7726 Path::new("/dir/b.rs"),
7727 );
7728 assert_eq!(params.position, lsp::Position::new(0, 22));
7729
7730 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7731 lsp::Location::new(
7732 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7733 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7734 ),
7735 )))
7736 });
7737
7738 let mut definitions = project
7739 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7740 .await
7741 .unwrap();
7742
7743 assert_eq!(definitions.len(), 1);
7744 let definition = definitions.pop().unwrap();
7745 cx.update(|cx| {
7746 let target_buffer = definition.target.buffer.read(cx);
7747 assert_eq!(
7748 target_buffer
7749 .file()
7750 .unwrap()
7751 .as_local()
7752 .unwrap()
7753 .abs_path(cx),
7754 Path::new("/dir/a.rs"),
7755 );
7756 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
7757 assert_eq!(
7758 list_worktrees(&project, cx),
7759 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7760 );
7761
7762 drop(definition);
7763 });
7764 cx.read(|cx| {
7765 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7766 });
7767
7768 fn list_worktrees<'a>(
7769 project: &'a ModelHandle<Project>,
7770 cx: &'a AppContext,
7771 ) -> Vec<(&'a Path, bool)> {
7772 project
7773 .read(cx)
7774 .worktrees(cx)
7775 .map(|worktree| {
7776 let worktree = worktree.read(cx);
7777 (
7778 worktree.as_local().unwrap().abs_path().as_ref(),
7779 worktree.is_visible(),
7780 )
7781 })
7782 .collect::<Vec<_>>()
7783 }
7784 }
7785
7786 #[gpui::test]
7787 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7788 let mut language = Language::new(
7789 LanguageConfig {
7790 name: "TypeScript".into(),
7791 path_suffixes: vec!["ts".to_string()],
7792 ..Default::default()
7793 },
7794 Some(tree_sitter_typescript::language_typescript()),
7795 );
7796 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7797
7798 let fs = FakeFs::new(cx.background());
7799 fs.insert_tree(
7800 "/dir",
7801 json!({
7802 "a.ts": "",
7803 }),
7804 )
7805 .await;
7806
7807 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7808 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7809 let buffer = project
7810 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7811 .await
7812 .unwrap();
7813
7814 let fake_server = fake_language_servers.next().await.unwrap();
7815
7816 let text = "let a = b.fqn";
7817 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7818 let completions = project.update(cx, |project, cx| {
7819 project.completions(&buffer, text.len(), cx)
7820 });
7821
7822 fake_server
7823 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7824 Ok(Some(lsp::CompletionResponse::Array(vec![
7825 lsp::CompletionItem {
7826 label: "fullyQualifiedName?".into(),
7827 insert_text: Some("fullyQualifiedName".into()),
7828 ..Default::default()
7829 },
7830 ])))
7831 })
7832 .next()
7833 .await;
7834 let completions = completions.await.unwrap();
7835 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7836 assert_eq!(completions.len(), 1);
7837 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7838 assert_eq!(
7839 completions[0].old_range.to_offset(&snapshot),
7840 text.len() - 3..text.len()
7841 );
7842
7843 let text = "let a = \"atoms/cmp\"";
7844 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7845 let completions = project.update(cx, |project, cx| {
7846 project.completions(&buffer, text.len() - 1, cx)
7847 });
7848
7849 fake_server
7850 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7851 Ok(Some(lsp::CompletionResponse::Array(vec![
7852 lsp::CompletionItem {
7853 label: "component".into(),
7854 ..Default::default()
7855 },
7856 ])))
7857 })
7858 .next()
7859 .await;
7860 let completions = completions.await.unwrap();
7861 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7862 assert_eq!(completions.len(), 1);
7863 assert_eq!(completions[0].new_text, "component");
7864 assert_eq!(
7865 completions[0].old_range.to_offset(&snapshot),
7866 text.len() - 4..text.len() - 1
7867 );
7868 }
7869
7870 #[gpui::test(iterations = 10)]
7871 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7872 let mut language = Language::new(
7873 LanguageConfig {
7874 name: "TypeScript".into(),
7875 path_suffixes: vec!["ts".to_string()],
7876 ..Default::default()
7877 },
7878 None,
7879 );
7880 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7881
7882 let fs = FakeFs::new(cx.background());
7883 fs.insert_tree(
7884 "/dir",
7885 json!({
7886 "a.ts": "a",
7887 }),
7888 )
7889 .await;
7890
7891 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7892 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7893 let buffer = project
7894 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7895 .await
7896 .unwrap();
7897
7898 let fake_server = fake_language_servers.next().await.unwrap();
7899
7900 // Language server returns code actions that contain commands, and not edits.
7901 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7902 fake_server
7903 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7904 Ok(Some(vec![
7905 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7906 title: "The code action".into(),
7907 command: Some(lsp::Command {
7908 title: "The command".into(),
7909 command: "_the/command".into(),
7910 arguments: Some(vec![json!("the-argument")]),
7911 }),
7912 ..Default::default()
7913 }),
7914 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7915 title: "two".into(),
7916 ..Default::default()
7917 }),
7918 ]))
7919 })
7920 .next()
7921 .await;
7922
7923 let action = actions.await.unwrap()[0].clone();
7924 let apply = project.update(cx, |project, cx| {
7925 project.apply_code_action(buffer.clone(), action, true, cx)
7926 });
7927
7928 // Resolving the code action does not populate its edits. In absence of
7929 // edits, we must execute the given command.
7930 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7931 |action, _| async move { Ok(action) },
7932 );
7933
7934 // While executing the command, the language server sends the editor
7935 // a `workspaceEdit` request.
7936 fake_server
7937 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7938 let fake = fake_server.clone();
7939 move |params, _| {
7940 assert_eq!(params.command, "_the/command");
7941 let fake = fake.clone();
7942 async move {
7943 fake.server
7944 .request::<lsp::request::ApplyWorkspaceEdit>(
7945 lsp::ApplyWorkspaceEditParams {
7946 label: None,
7947 edit: lsp::WorkspaceEdit {
7948 changes: Some(
7949 [(
7950 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7951 vec![lsp::TextEdit {
7952 range: lsp::Range::new(
7953 lsp::Position::new(0, 0),
7954 lsp::Position::new(0, 0),
7955 ),
7956 new_text: "X".into(),
7957 }],
7958 )]
7959 .into_iter()
7960 .collect(),
7961 ),
7962 ..Default::default()
7963 },
7964 },
7965 )
7966 .await
7967 .unwrap();
7968 Ok(Some(json!(null)))
7969 }
7970 }
7971 })
7972 .next()
7973 .await;
7974
7975 // Applying the code action returns a project transaction containing the edits
7976 // sent by the language server in its `workspaceEdit` request.
7977 let transaction = apply.await.unwrap();
7978 assert!(transaction.0.contains_key(&buffer));
7979 buffer.update(cx, |buffer, cx| {
7980 assert_eq!(buffer.text(), "Xa");
7981 buffer.undo(cx);
7982 assert_eq!(buffer.text(), "a");
7983 });
7984 }
7985
7986 #[gpui::test]
7987 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7988 let fs = FakeFs::new(cx.background());
7989 fs.insert_tree(
7990 "/dir",
7991 json!({
7992 "file1": "the old contents",
7993 }),
7994 )
7995 .await;
7996
7997 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7998 let buffer = project
7999 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8000 .await
8001 .unwrap();
8002 buffer
8003 .update(cx, |buffer, cx| {
8004 assert_eq!(buffer.text(), "the old contents");
8005 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8006 buffer.save(cx)
8007 })
8008 .await
8009 .unwrap();
8010
8011 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8012 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8013 }
8014
8015 #[gpui::test]
8016 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
8017 let fs = FakeFs::new(cx.background());
8018 fs.insert_tree(
8019 "/dir",
8020 json!({
8021 "file1": "the old contents",
8022 }),
8023 )
8024 .await;
8025
8026 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
8027 let buffer = project
8028 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8029 .await
8030 .unwrap();
8031 buffer
8032 .update(cx, |buffer, cx| {
8033 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8034 buffer.save(cx)
8035 })
8036 .await
8037 .unwrap();
8038
8039 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8040 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8041 }
8042
8043 #[gpui::test]
8044 async fn test_save_as(cx: &mut gpui::TestAppContext) {
8045 let fs = FakeFs::new(cx.background());
8046 fs.insert_tree("/dir", json!({})).await;
8047
8048 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8049 let buffer = project.update(cx, |project, cx| {
8050 project.create_buffer("", None, cx).unwrap()
8051 });
8052 buffer.update(cx, |buffer, cx| {
8053 buffer.edit([(0..0, "abc")], cx);
8054 assert!(buffer.is_dirty());
8055 assert!(!buffer.has_conflict());
8056 });
8057 project
8058 .update(cx, |project, cx| {
8059 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
8060 })
8061 .await
8062 .unwrap();
8063 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
8064 buffer.read_with(cx, |buffer, cx| {
8065 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
8066 assert!(!buffer.is_dirty());
8067 assert!(!buffer.has_conflict());
8068 });
8069
8070 let opened_buffer = project
8071 .update(cx, |project, cx| {
8072 project.open_local_buffer("/dir/file1", cx)
8073 })
8074 .await
8075 .unwrap();
8076 assert_eq!(opened_buffer, buffer);
8077 }
8078
8079 #[gpui::test(retries = 5)]
8080 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
8081 let dir = temp_tree(json!({
8082 "a": {
8083 "file1": "",
8084 "file2": "",
8085 "file3": "",
8086 },
8087 "b": {
8088 "c": {
8089 "file4": "",
8090 "file5": "",
8091 }
8092 }
8093 }));
8094
8095 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
8096 let rpc = project.read_with(cx, |p, _| p.client.clone());
8097
8098 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
8099 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
8100 async move { buffer.await.unwrap() }
8101 };
8102 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
8103 project.read_with(cx, |project, cx| {
8104 let tree = project.worktrees(cx).next().unwrap();
8105 tree.read(cx)
8106 .entry_for_path(path)
8107 .expect(&format!("no entry for path {}", path))
8108 .id
8109 })
8110 };
8111
8112 let buffer2 = buffer_for_path("a/file2", cx).await;
8113 let buffer3 = buffer_for_path("a/file3", cx).await;
8114 let buffer4 = buffer_for_path("b/c/file4", cx).await;
8115 let buffer5 = buffer_for_path("b/c/file5", cx).await;
8116
8117 let file2_id = id_for_path("a/file2", &cx);
8118 let file3_id = id_for_path("a/file3", &cx);
8119 let file4_id = id_for_path("b/c/file4", &cx);
8120
8121 // Create a remote copy of this worktree.
8122 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8123 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
8124 let (remote, load_task) = cx.update(|cx| {
8125 Worktree::remote(
8126 1,
8127 1,
8128 initial_snapshot.to_proto(&Default::default(), true),
8129 rpc.clone(),
8130 cx,
8131 )
8132 });
8133 // tree
8134 load_task.await;
8135
8136 cx.read(|cx| {
8137 assert!(!buffer2.read(cx).is_dirty());
8138 assert!(!buffer3.read(cx).is_dirty());
8139 assert!(!buffer4.read(cx).is_dirty());
8140 assert!(!buffer5.read(cx).is_dirty());
8141 });
8142
8143 // Rename and delete files and directories.
8144 tree.flush_fs_events(&cx).await;
8145 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
8146 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
8147 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
8148 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
8149 tree.flush_fs_events(&cx).await;
8150
8151 let expected_paths = vec![
8152 "a",
8153 "a/file1",
8154 "a/file2.new",
8155 "b",
8156 "d",
8157 "d/file3",
8158 "d/file4",
8159 ];
8160
8161 cx.read(|app| {
8162 assert_eq!(
8163 tree.read(app)
8164 .paths()
8165 .map(|p| p.to_str().unwrap())
8166 .collect::<Vec<_>>(),
8167 expected_paths
8168 );
8169
8170 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
8171 assert_eq!(id_for_path("d/file3", &cx), file3_id);
8172 assert_eq!(id_for_path("d/file4", &cx), file4_id);
8173
8174 assert_eq!(
8175 buffer2.read(app).file().unwrap().path().as_ref(),
8176 Path::new("a/file2.new")
8177 );
8178 assert_eq!(
8179 buffer3.read(app).file().unwrap().path().as_ref(),
8180 Path::new("d/file3")
8181 );
8182 assert_eq!(
8183 buffer4.read(app).file().unwrap().path().as_ref(),
8184 Path::new("d/file4")
8185 );
8186 assert_eq!(
8187 buffer5.read(app).file().unwrap().path().as_ref(),
8188 Path::new("b/c/file5")
8189 );
8190
8191 assert!(!buffer2.read(app).file().unwrap().is_deleted());
8192 assert!(!buffer3.read(app).file().unwrap().is_deleted());
8193 assert!(!buffer4.read(app).file().unwrap().is_deleted());
8194 assert!(buffer5.read(app).file().unwrap().is_deleted());
8195 });
8196
8197 // Update the remote worktree. Check that it becomes consistent with the
8198 // local worktree.
8199 remote.update(cx, |remote, cx| {
8200 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
8201 &initial_snapshot,
8202 1,
8203 1,
8204 true,
8205 );
8206 remote
8207 .as_remote_mut()
8208 .unwrap()
8209 .snapshot
8210 .apply_remote_update(update_message)
8211 .unwrap();
8212
8213 assert_eq!(
8214 remote
8215 .paths()
8216 .map(|p| p.to_str().unwrap())
8217 .collect::<Vec<_>>(),
8218 expected_paths
8219 );
8220 });
8221 }
8222
8223 #[gpui::test]
8224 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8225 let fs = FakeFs::new(cx.background());
8226 fs.insert_tree(
8227 "/dir",
8228 json!({
8229 "a.txt": "a-contents",
8230 "b.txt": "b-contents",
8231 }),
8232 )
8233 .await;
8234
8235 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8236
8237 // Spawn multiple tasks to open paths, repeating some paths.
8238 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8239 (
8240 p.open_local_buffer("/dir/a.txt", cx),
8241 p.open_local_buffer("/dir/b.txt", cx),
8242 p.open_local_buffer("/dir/a.txt", cx),
8243 )
8244 });
8245
8246 let buffer_a_1 = buffer_a_1.await.unwrap();
8247 let buffer_a_2 = buffer_a_2.await.unwrap();
8248 let buffer_b = buffer_b.await.unwrap();
8249 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8250 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8251
8252 // There is only one buffer per path.
8253 let buffer_a_id = buffer_a_1.id();
8254 assert_eq!(buffer_a_2.id(), buffer_a_id);
8255
8256 // Open the same path again while it is still open.
8257 drop(buffer_a_1);
8258 let buffer_a_3 = project
8259 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8260 .await
8261 .unwrap();
8262
8263 // There's still only one buffer per path.
8264 assert_eq!(buffer_a_3.id(), buffer_a_id);
8265 }
8266
8267 #[gpui::test]
8268 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8269 let fs = FakeFs::new(cx.background());
8270 fs.insert_tree(
8271 "/dir",
8272 json!({
8273 "file1": "abc",
8274 "file2": "def",
8275 "file3": "ghi",
8276 }),
8277 )
8278 .await;
8279
8280 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8281
8282 let buffer1 = project
8283 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8284 .await
8285 .unwrap();
8286 let events = Rc::new(RefCell::new(Vec::new()));
8287
8288 // initially, the buffer isn't dirty.
8289 buffer1.update(cx, |buffer, cx| {
8290 cx.subscribe(&buffer1, {
8291 let events = events.clone();
8292 move |_, _, event, _| match event {
8293 BufferEvent::Operation(_) => {}
8294 _ => events.borrow_mut().push(event.clone()),
8295 }
8296 })
8297 .detach();
8298
8299 assert!(!buffer.is_dirty());
8300 assert!(events.borrow().is_empty());
8301
8302 buffer.edit([(1..2, "")], cx);
8303 });
8304
8305 // after the first edit, the buffer is dirty, and emits a dirtied event.
8306 buffer1.update(cx, |buffer, cx| {
8307 assert!(buffer.text() == "ac");
8308 assert!(buffer.is_dirty());
8309 assert_eq!(
8310 *events.borrow(),
8311 &[language::Event::Edited, language::Event::DirtyChanged]
8312 );
8313 events.borrow_mut().clear();
8314 buffer.did_save(
8315 buffer.version(),
8316 buffer.as_rope().fingerprint(),
8317 buffer.file().unwrap().mtime(),
8318 None,
8319 cx,
8320 );
8321 });
8322
8323 // after saving, the buffer is not dirty, and emits a saved event.
8324 buffer1.update(cx, |buffer, cx| {
8325 assert!(!buffer.is_dirty());
8326 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8327 events.borrow_mut().clear();
8328
8329 buffer.edit([(1..1, "B")], cx);
8330 buffer.edit([(2..2, "D")], cx);
8331 });
8332
8333 // after editing again, the buffer is dirty, and emits another dirty event.
8334 buffer1.update(cx, |buffer, cx| {
8335 assert!(buffer.text() == "aBDc");
8336 assert!(buffer.is_dirty());
8337 assert_eq!(
8338 *events.borrow(),
8339 &[
8340 language::Event::Edited,
8341 language::Event::DirtyChanged,
8342 language::Event::Edited,
8343 ],
8344 );
8345 events.borrow_mut().clear();
8346
8347 // After restoring the buffer to its previously-saved state,
8348 // the buffer is not considered dirty anymore.
8349 buffer.edit([(1..3, "")], cx);
8350 assert!(buffer.text() == "ac");
8351 assert!(!buffer.is_dirty());
8352 });
8353
8354 assert_eq!(
8355 *events.borrow(),
8356 &[language::Event::Edited, language::Event::DirtyChanged]
8357 );
8358
8359 // When a file is deleted, the buffer is considered dirty.
8360 let events = Rc::new(RefCell::new(Vec::new()));
8361 let buffer2 = project
8362 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8363 .await
8364 .unwrap();
8365 buffer2.update(cx, |_, cx| {
8366 cx.subscribe(&buffer2, {
8367 let events = events.clone();
8368 move |_, _, event, _| events.borrow_mut().push(event.clone())
8369 })
8370 .detach();
8371 });
8372
8373 fs.remove_file("/dir/file2".as_ref(), Default::default())
8374 .await
8375 .unwrap();
8376 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8377 assert_eq!(
8378 *events.borrow(),
8379 &[
8380 language::Event::DirtyChanged,
8381 language::Event::FileHandleChanged
8382 ]
8383 );
8384
8385 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8386 let events = Rc::new(RefCell::new(Vec::new()));
8387 let buffer3 = project
8388 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8389 .await
8390 .unwrap();
8391 buffer3.update(cx, |_, cx| {
8392 cx.subscribe(&buffer3, {
8393 let events = events.clone();
8394 move |_, _, event, _| events.borrow_mut().push(event.clone())
8395 })
8396 .detach();
8397 });
8398
8399 buffer3.update(cx, |buffer, cx| {
8400 buffer.edit([(0..0, "x")], cx);
8401 });
8402 events.borrow_mut().clear();
8403 fs.remove_file("/dir/file3".as_ref(), Default::default())
8404 .await
8405 .unwrap();
8406 buffer3
8407 .condition(&cx, |_, _| !events.borrow().is_empty())
8408 .await;
8409 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8410 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8411 }
8412
8413 #[gpui::test]
8414 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8415 let initial_contents = "aaa\nbbbbb\nc\n";
8416 let fs = FakeFs::new(cx.background());
8417 fs.insert_tree(
8418 "/dir",
8419 json!({
8420 "the-file": initial_contents,
8421 }),
8422 )
8423 .await;
8424 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8425 let buffer = project
8426 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8427 .await
8428 .unwrap();
8429
8430 let anchors = (0..3)
8431 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8432 .collect::<Vec<_>>();
8433
8434 // Change the file on disk, adding two new lines of text, and removing
8435 // one line.
8436 buffer.read_with(cx, |buffer, _| {
8437 assert!(!buffer.is_dirty());
8438 assert!(!buffer.has_conflict());
8439 });
8440 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8441 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8442 .await
8443 .unwrap();
8444
8445 // Because the buffer was not modified, it is reloaded from disk. Its
8446 // contents are edited according to the diff between the old and new
8447 // file contents.
8448 buffer
8449 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8450 .await;
8451
8452 buffer.update(cx, |buffer, _| {
8453 assert_eq!(buffer.text(), new_contents);
8454 assert!(!buffer.is_dirty());
8455 assert!(!buffer.has_conflict());
8456
8457 let anchor_positions = anchors
8458 .iter()
8459 .map(|anchor| anchor.to_point(&*buffer))
8460 .collect::<Vec<_>>();
8461 assert_eq!(
8462 anchor_positions,
8463 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8464 );
8465 });
8466
8467 // Modify the buffer
8468 buffer.update(cx, |buffer, cx| {
8469 buffer.edit([(0..0, " ")], cx);
8470 assert!(buffer.is_dirty());
8471 assert!(!buffer.has_conflict());
8472 });
8473
8474 // Change the file on disk again, adding blank lines to the beginning.
8475 fs.save(
8476 "/dir/the-file".as_ref(),
8477 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8478 )
8479 .await
8480 .unwrap();
8481
8482 // Because the buffer is modified, it doesn't reload from disk, but is
8483 // marked as having a conflict.
8484 buffer
8485 .condition(&cx, |buffer, _| buffer.has_conflict())
8486 .await;
8487 }
8488
8489 #[gpui::test]
8490 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8491 cx.foreground().forbid_parking();
8492
8493 let fs = FakeFs::new(cx.background());
8494 fs.insert_tree(
8495 "/the-dir",
8496 json!({
8497 "a.rs": "
8498 fn foo(mut v: Vec<usize>) {
8499 for x in &v {
8500 v.push(1);
8501 }
8502 }
8503 "
8504 .unindent(),
8505 }),
8506 )
8507 .await;
8508
8509 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8510 let buffer = project
8511 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8512 .await
8513 .unwrap();
8514
8515 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8516 let message = lsp::PublishDiagnosticsParams {
8517 uri: buffer_uri.clone(),
8518 diagnostics: vec![
8519 lsp::Diagnostic {
8520 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8521 severity: Some(DiagnosticSeverity::WARNING),
8522 message: "error 1".to_string(),
8523 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8524 location: lsp::Location {
8525 uri: buffer_uri.clone(),
8526 range: lsp::Range::new(
8527 lsp::Position::new(1, 8),
8528 lsp::Position::new(1, 9),
8529 ),
8530 },
8531 message: "error 1 hint 1".to_string(),
8532 }]),
8533 ..Default::default()
8534 },
8535 lsp::Diagnostic {
8536 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8537 severity: Some(DiagnosticSeverity::HINT),
8538 message: "error 1 hint 1".to_string(),
8539 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8540 location: lsp::Location {
8541 uri: buffer_uri.clone(),
8542 range: lsp::Range::new(
8543 lsp::Position::new(1, 8),
8544 lsp::Position::new(1, 9),
8545 ),
8546 },
8547 message: "original diagnostic".to_string(),
8548 }]),
8549 ..Default::default()
8550 },
8551 lsp::Diagnostic {
8552 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8553 severity: Some(DiagnosticSeverity::ERROR),
8554 message: "error 2".to_string(),
8555 related_information: Some(vec![
8556 lsp::DiagnosticRelatedInformation {
8557 location: lsp::Location {
8558 uri: buffer_uri.clone(),
8559 range: lsp::Range::new(
8560 lsp::Position::new(1, 13),
8561 lsp::Position::new(1, 15),
8562 ),
8563 },
8564 message: "error 2 hint 1".to_string(),
8565 },
8566 lsp::DiagnosticRelatedInformation {
8567 location: lsp::Location {
8568 uri: buffer_uri.clone(),
8569 range: lsp::Range::new(
8570 lsp::Position::new(1, 13),
8571 lsp::Position::new(1, 15),
8572 ),
8573 },
8574 message: "error 2 hint 2".to_string(),
8575 },
8576 ]),
8577 ..Default::default()
8578 },
8579 lsp::Diagnostic {
8580 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8581 severity: Some(DiagnosticSeverity::HINT),
8582 message: "error 2 hint 1".to_string(),
8583 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8584 location: lsp::Location {
8585 uri: buffer_uri.clone(),
8586 range: lsp::Range::new(
8587 lsp::Position::new(2, 8),
8588 lsp::Position::new(2, 17),
8589 ),
8590 },
8591 message: "original diagnostic".to_string(),
8592 }]),
8593 ..Default::default()
8594 },
8595 lsp::Diagnostic {
8596 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8597 severity: Some(DiagnosticSeverity::HINT),
8598 message: "error 2 hint 2".to_string(),
8599 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8600 location: lsp::Location {
8601 uri: buffer_uri.clone(),
8602 range: lsp::Range::new(
8603 lsp::Position::new(2, 8),
8604 lsp::Position::new(2, 17),
8605 ),
8606 },
8607 message: "original diagnostic".to_string(),
8608 }]),
8609 ..Default::default()
8610 },
8611 ],
8612 version: None,
8613 };
8614
8615 project
8616 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8617 .unwrap();
8618 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8619
8620 assert_eq!(
8621 buffer
8622 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8623 .collect::<Vec<_>>(),
8624 &[
8625 DiagnosticEntry {
8626 range: Point::new(1, 8)..Point::new(1, 9),
8627 diagnostic: Diagnostic {
8628 severity: DiagnosticSeverity::WARNING,
8629 message: "error 1".to_string(),
8630 group_id: 0,
8631 is_primary: true,
8632 ..Default::default()
8633 }
8634 },
8635 DiagnosticEntry {
8636 range: Point::new(1, 8)..Point::new(1, 9),
8637 diagnostic: Diagnostic {
8638 severity: DiagnosticSeverity::HINT,
8639 message: "error 1 hint 1".to_string(),
8640 group_id: 0,
8641 is_primary: false,
8642 ..Default::default()
8643 }
8644 },
8645 DiagnosticEntry {
8646 range: Point::new(1, 13)..Point::new(1, 15),
8647 diagnostic: Diagnostic {
8648 severity: DiagnosticSeverity::HINT,
8649 message: "error 2 hint 1".to_string(),
8650 group_id: 1,
8651 is_primary: false,
8652 ..Default::default()
8653 }
8654 },
8655 DiagnosticEntry {
8656 range: Point::new(1, 13)..Point::new(1, 15),
8657 diagnostic: Diagnostic {
8658 severity: DiagnosticSeverity::HINT,
8659 message: "error 2 hint 2".to_string(),
8660 group_id: 1,
8661 is_primary: false,
8662 ..Default::default()
8663 }
8664 },
8665 DiagnosticEntry {
8666 range: Point::new(2, 8)..Point::new(2, 17),
8667 diagnostic: Diagnostic {
8668 severity: DiagnosticSeverity::ERROR,
8669 message: "error 2".to_string(),
8670 group_id: 1,
8671 is_primary: true,
8672 ..Default::default()
8673 }
8674 }
8675 ]
8676 );
8677
8678 assert_eq!(
8679 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8680 &[
8681 DiagnosticEntry {
8682 range: Point::new(1, 8)..Point::new(1, 9),
8683 diagnostic: Diagnostic {
8684 severity: DiagnosticSeverity::WARNING,
8685 message: "error 1".to_string(),
8686 group_id: 0,
8687 is_primary: true,
8688 ..Default::default()
8689 }
8690 },
8691 DiagnosticEntry {
8692 range: Point::new(1, 8)..Point::new(1, 9),
8693 diagnostic: Diagnostic {
8694 severity: DiagnosticSeverity::HINT,
8695 message: "error 1 hint 1".to_string(),
8696 group_id: 0,
8697 is_primary: false,
8698 ..Default::default()
8699 }
8700 },
8701 ]
8702 );
8703 assert_eq!(
8704 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8705 &[
8706 DiagnosticEntry {
8707 range: Point::new(1, 13)..Point::new(1, 15),
8708 diagnostic: Diagnostic {
8709 severity: DiagnosticSeverity::HINT,
8710 message: "error 2 hint 1".to_string(),
8711 group_id: 1,
8712 is_primary: false,
8713 ..Default::default()
8714 }
8715 },
8716 DiagnosticEntry {
8717 range: Point::new(1, 13)..Point::new(1, 15),
8718 diagnostic: Diagnostic {
8719 severity: DiagnosticSeverity::HINT,
8720 message: "error 2 hint 2".to_string(),
8721 group_id: 1,
8722 is_primary: false,
8723 ..Default::default()
8724 }
8725 },
8726 DiagnosticEntry {
8727 range: Point::new(2, 8)..Point::new(2, 17),
8728 diagnostic: Diagnostic {
8729 severity: DiagnosticSeverity::ERROR,
8730 message: "error 2".to_string(),
8731 group_id: 1,
8732 is_primary: true,
8733 ..Default::default()
8734 }
8735 }
8736 ]
8737 );
8738 }
8739
8740 #[gpui::test]
8741 async fn test_rename(cx: &mut gpui::TestAppContext) {
8742 cx.foreground().forbid_parking();
8743
8744 let mut language = Language::new(
8745 LanguageConfig {
8746 name: "Rust".into(),
8747 path_suffixes: vec!["rs".to_string()],
8748 ..Default::default()
8749 },
8750 Some(tree_sitter_rust::language()),
8751 );
8752 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8753 capabilities: lsp::ServerCapabilities {
8754 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8755 prepare_provider: Some(true),
8756 work_done_progress_options: Default::default(),
8757 })),
8758 ..Default::default()
8759 },
8760 ..Default::default()
8761 });
8762
8763 let fs = FakeFs::new(cx.background());
8764 fs.insert_tree(
8765 "/dir",
8766 json!({
8767 "one.rs": "const ONE: usize = 1;",
8768 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8769 }),
8770 )
8771 .await;
8772
8773 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8774 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8775 let buffer = project
8776 .update(cx, |project, cx| {
8777 project.open_local_buffer("/dir/one.rs", cx)
8778 })
8779 .await
8780 .unwrap();
8781
8782 let fake_server = fake_servers.next().await.unwrap();
8783
8784 let response = project.update(cx, |project, cx| {
8785 project.prepare_rename(buffer.clone(), 7, cx)
8786 });
8787 fake_server
8788 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8789 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8790 assert_eq!(params.position, lsp::Position::new(0, 7));
8791 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8792 lsp::Position::new(0, 6),
8793 lsp::Position::new(0, 9),
8794 ))))
8795 })
8796 .next()
8797 .await
8798 .unwrap();
8799 let range = response.await.unwrap().unwrap();
8800 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8801 assert_eq!(range, 6..9);
8802
8803 let response = project.update(cx, |project, cx| {
8804 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8805 });
8806 fake_server
8807 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8808 assert_eq!(
8809 params.text_document_position.text_document.uri.as_str(),
8810 "file:///dir/one.rs"
8811 );
8812 assert_eq!(
8813 params.text_document_position.position,
8814 lsp::Position::new(0, 7)
8815 );
8816 assert_eq!(params.new_name, "THREE");
8817 Ok(Some(lsp::WorkspaceEdit {
8818 changes: Some(
8819 [
8820 (
8821 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8822 vec![lsp::TextEdit::new(
8823 lsp::Range::new(
8824 lsp::Position::new(0, 6),
8825 lsp::Position::new(0, 9),
8826 ),
8827 "THREE".to_string(),
8828 )],
8829 ),
8830 (
8831 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8832 vec![
8833 lsp::TextEdit::new(
8834 lsp::Range::new(
8835 lsp::Position::new(0, 24),
8836 lsp::Position::new(0, 27),
8837 ),
8838 "THREE".to_string(),
8839 ),
8840 lsp::TextEdit::new(
8841 lsp::Range::new(
8842 lsp::Position::new(0, 35),
8843 lsp::Position::new(0, 38),
8844 ),
8845 "THREE".to_string(),
8846 ),
8847 ],
8848 ),
8849 ]
8850 .into_iter()
8851 .collect(),
8852 ),
8853 ..Default::default()
8854 }))
8855 })
8856 .next()
8857 .await
8858 .unwrap();
8859 let mut transaction = response.await.unwrap().0;
8860 assert_eq!(transaction.len(), 2);
8861 assert_eq!(
8862 transaction
8863 .remove_entry(&buffer)
8864 .unwrap()
8865 .0
8866 .read_with(cx, |buffer, _| buffer.text()),
8867 "const THREE: usize = 1;"
8868 );
8869 assert_eq!(
8870 transaction
8871 .into_keys()
8872 .next()
8873 .unwrap()
8874 .read_with(cx, |buffer, _| buffer.text()),
8875 "const TWO: usize = one::THREE + one::THREE;"
8876 );
8877 }
8878
8879 #[gpui::test]
8880 async fn test_search(cx: &mut gpui::TestAppContext) {
8881 let fs = FakeFs::new(cx.background());
8882 fs.insert_tree(
8883 "/dir",
8884 json!({
8885 "one.rs": "const ONE: usize = 1;",
8886 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8887 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8888 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8889 }),
8890 )
8891 .await;
8892 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8893 assert_eq!(
8894 search(&project, SearchQuery::text("TWO", false, true), cx)
8895 .await
8896 .unwrap(),
8897 HashMap::from_iter([
8898 ("two.rs".to_string(), vec![6..9]),
8899 ("three.rs".to_string(), vec![37..40])
8900 ])
8901 );
8902
8903 let buffer_4 = project
8904 .update(cx, |project, cx| {
8905 project.open_local_buffer("/dir/four.rs", cx)
8906 })
8907 .await
8908 .unwrap();
8909 buffer_4.update(cx, |buffer, cx| {
8910 let text = "two::TWO";
8911 buffer.edit([(20..28, text), (31..43, text)], cx);
8912 });
8913
8914 assert_eq!(
8915 search(&project, SearchQuery::text("TWO", false, true), cx)
8916 .await
8917 .unwrap(),
8918 HashMap::from_iter([
8919 ("two.rs".to_string(), vec![6..9]),
8920 ("three.rs".to_string(), vec![37..40]),
8921 ("four.rs".to_string(), vec![25..28, 36..39])
8922 ])
8923 );
8924
8925 async fn search(
8926 project: &ModelHandle<Project>,
8927 query: SearchQuery,
8928 cx: &mut gpui::TestAppContext,
8929 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8930 let results = project
8931 .update(cx, |project, cx| project.search(query, cx))
8932 .await?;
8933
8934 Ok(results
8935 .into_iter()
8936 .map(|(buffer, ranges)| {
8937 buffer.read_with(cx, |buffer, _| {
8938 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8939 let ranges = ranges
8940 .into_iter()
8941 .map(|range| range.to_offset(buffer))
8942 .collect::<Vec<_>>();
8943 (path, ranges)
8944 })
8945 })
8946 .collect())
8947 }
8948 }
8949}