1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use thiserror::Error;
53use util::{post_inc, ResultExt, TryFutureExt as _};
54
55pub use fs::*;
56pub use worktree::*;
57
58pub trait Item: Entity {
59 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
60}
61
62pub struct Project {
63 worktrees: Vec<WorktreeHandle>,
64 active_entry: Option<ProjectEntryId>,
65 languages: Arc<LanguageRegistry>,
66 language_servers:
67 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
68 started_language_servers:
69 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
70 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
71 language_server_settings: Arc<Mutex<serde_json::Value>>,
72 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
73 next_language_server_id: usize,
74 client: Arc<client::Client>,
75 next_entry_id: Arc<AtomicUsize>,
76 next_diagnostic_group_id: usize,
77 user_store: ModelHandle<UserStore>,
78 fs: Arc<dyn Fs>,
79 client_state: ProjectClientState,
80 collaborators: HashMap<PeerId, Collaborator>,
81 subscriptions: Vec<client::Subscription>,
82 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
83 shared_buffers: HashMap<PeerId, HashSet<u64>>,
84 loading_buffers: HashMap<
85 ProjectPath,
86 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
87 >,
88 loading_local_worktrees:
89 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
90 opened_buffers: HashMap<u64, OpenBuffer>,
91 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
92 nonce: u128,
93}
94
95#[derive(Error, Debug)]
96pub enum JoinProjectError {
97 #[error("host declined join request")]
98 HostDeclined,
99 #[error("host closed the project")]
100 HostClosedProject,
101 #[error("host went offline")]
102 HostWentOffline,
103 #[error("{0}")]
104 Other(#[from] anyhow::Error),
105}
106
107enum OpenBuffer {
108 Strong(ModelHandle<Buffer>),
109 Weak(WeakModelHandle<Buffer>),
110 Loading(Vec<Operation>),
111}
112
113enum WorktreeHandle {
114 Strong(ModelHandle<Worktree>),
115 Weak(WeakModelHandle<Worktree>),
116}
117
118enum ProjectClientState {
119 Local {
120 is_shared: bool,
121 remote_id_tx: watch::Sender<Option<u64>>,
122 remote_id_rx: watch::Receiver<Option<u64>>,
123 _maintain_remote_id_task: Task<Option<()>>,
124 },
125 Remote {
126 sharing_has_stopped: bool,
127 remote_id: u64,
128 replica_id: ReplicaId,
129 _detect_unshare_task: Task<Option<()>>,
130 },
131}
132
133#[derive(Clone, Debug)]
134pub struct Collaborator {
135 pub user: Arc<User>,
136 pub peer_id: PeerId,
137 pub replica_id: ReplicaId,
138}
139
140#[derive(Clone, Debug, PartialEq, Eq)]
141pub enum Event {
142 ActiveEntryChanged(Option<ProjectEntryId>),
143 WorktreeAdded,
144 WorktreeRemoved(WorktreeId),
145 DiskBasedDiagnosticsStarted,
146 DiskBasedDiagnosticsUpdated,
147 DiskBasedDiagnosticsFinished,
148 DiagnosticsUpdated(ProjectPath),
149 RemoteIdChanged(Option<u64>),
150 CollaboratorLeft(PeerId),
151 ContactRequestedJoin(Arc<User>),
152 ContactCancelledJoinRequest(Arc<User>),
153}
154
155#[derive(Serialize)]
156pub struct LanguageServerStatus {
157 pub name: String,
158 pub pending_work: BTreeMap<String, LanguageServerProgress>,
159 pub pending_diagnostic_updates: isize,
160}
161
162#[derive(Clone, Debug, Serialize)]
163pub struct LanguageServerProgress {
164 pub message: Option<String>,
165 pub percentage: Option<usize>,
166 #[serde(skip_serializing)]
167 pub last_update_at: Instant,
168}
169
170#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
171pub struct ProjectPath {
172 pub worktree_id: WorktreeId,
173 pub path: Arc<Path>,
174}
175
176#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
177pub struct DiagnosticSummary {
178 pub error_count: usize,
179 pub warning_count: usize,
180}
181
182#[derive(Debug)]
183pub struct Location {
184 pub buffer: ModelHandle<Buffer>,
185 pub range: Range<language::Anchor>,
186}
187
188#[derive(Debug)]
189pub struct DocumentHighlight {
190 pub range: Range<language::Anchor>,
191 pub kind: DocumentHighlightKind,
192}
193
194#[derive(Clone, Debug)]
195pub struct Symbol {
196 pub source_worktree_id: WorktreeId,
197 pub worktree_id: WorktreeId,
198 pub language_server_name: LanguageServerName,
199 pub path: PathBuf,
200 pub label: CodeLabel,
201 pub name: String,
202 pub kind: lsp::SymbolKind,
203 pub range: Range<PointUtf16>,
204 pub signature: [u8; 32],
205}
206
207#[derive(Default)]
208pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
209
210impl DiagnosticSummary {
211 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
212 let mut this = Self {
213 error_count: 0,
214 warning_count: 0,
215 };
216
217 for entry in diagnostics {
218 if entry.diagnostic.is_primary {
219 match entry.diagnostic.severity {
220 DiagnosticSeverity::ERROR => this.error_count += 1,
221 DiagnosticSeverity::WARNING => this.warning_count += 1,
222 _ => {}
223 }
224 }
225 }
226
227 this
228 }
229
230 pub fn is_empty(&self) -> bool {
231 self.error_count == 0 && self.warning_count == 0
232 }
233
234 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
235 proto::DiagnosticSummary {
236 path: path.to_string_lossy().to_string(),
237 error_count: self.error_count as u32,
238 warning_count: self.warning_count as u32,
239 }
240 }
241}
242
243#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
244pub struct ProjectEntryId(usize);
245
246impl ProjectEntryId {
247 pub const MAX: Self = Self(usize::MAX);
248
249 pub fn new(counter: &AtomicUsize) -> Self {
250 Self(counter.fetch_add(1, SeqCst))
251 }
252
253 pub fn from_proto(id: u64) -> Self {
254 Self(id as usize)
255 }
256
257 pub fn to_proto(&self) -> u64 {
258 self.0 as u64
259 }
260
261 pub fn to_usize(&self) -> usize {
262 self.0
263 }
264}
265
266impl Project {
267 pub fn init(client: &Arc<Client>) {
268 client.add_model_message_handler(Self::handle_request_join_project);
269 client.add_model_message_handler(Self::handle_add_collaborator);
270 client.add_model_message_handler(Self::handle_buffer_reloaded);
271 client.add_model_message_handler(Self::handle_buffer_saved);
272 client.add_model_message_handler(Self::handle_start_language_server);
273 client.add_model_message_handler(Self::handle_update_language_server);
274 client.add_model_message_handler(Self::handle_remove_collaborator);
275 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
276 client.add_model_message_handler(Self::handle_register_worktree);
277 client.add_model_message_handler(Self::handle_unregister_worktree);
278 client.add_model_message_handler(Self::handle_unregister_project);
279 client.add_model_message_handler(Self::handle_project_unshared);
280 client.add_model_message_handler(Self::handle_update_buffer_file);
281 client.add_model_message_handler(Self::handle_update_buffer);
282 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
283 client.add_model_message_handler(Self::handle_update_worktree);
284 client.add_model_request_handler(Self::handle_create_project_entry);
285 client.add_model_request_handler(Self::handle_rename_project_entry);
286 client.add_model_request_handler(Self::handle_copy_project_entry);
287 client.add_model_request_handler(Self::handle_delete_project_entry);
288 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
289 client.add_model_request_handler(Self::handle_apply_code_action);
290 client.add_model_request_handler(Self::handle_reload_buffers);
291 client.add_model_request_handler(Self::handle_format_buffers);
292 client.add_model_request_handler(Self::handle_get_code_actions);
293 client.add_model_request_handler(Self::handle_get_completions);
294 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
295 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
296 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
297 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
298 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
299 client.add_model_request_handler(Self::handle_search_project);
300 client.add_model_request_handler(Self::handle_get_project_symbols);
301 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
302 client.add_model_request_handler(Self::handle_open_buffer_by_id);
303 client.add_model_request_handler(Self::handle_open_buffer_by_path);
304 client.add_model_request_handler(Self::handle_save_buffer);
305 }
306
307 pub fn local(
308 client: Arc<Client>,
309 user_store: ModelHandle<UserStore>,
310 languages: Arc<LanguageRegistry>,
311 fs: Arc<dyn Fs>,
312 cx: &mut MutableAppContext,
313 ) -> ModelHandle<Self> {
314 cx.add_model(|cx: &mut ModelContext<Self>| {
315 let (remote_id_tx, remote_id_rx) = watch::channel();
316 let _maintain_remote_id_task = cx.spawn_weak({
317 let rpc = client.clone();
318 move |this, mut cx| {
319 async move {
320 let mut status = rpc.status();
321 while let Some(status) = status.next().await {
322 if let Some(this) = this.upgrade(&cx) {
323 if status.is_connected() {
324 this.update(&mut cx, |this, cx| this.register(cx)).await?;
325 } else {
326 this.update(&mut cx, |this, cx| this.unregister(cx));
327 }
328 }
329 }
330 Ok(())
331 }
332 .log_err()
333 }
334 });
335
336 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
337 Self {
338 worktrees: Default::default(),
339 collaborators: Default::default(),
340 opened_buffers: Default::default(),
341 shared_buffers: Default::default(),
342 loading_buffers: Default::default(),
343 loading_local_worktrees: Default::default(),
344 buffer_snapshots: Default::default(),
345 client_state: ProjectClientState::Local {
346 is_shared: false,
347 remote_id_tx,
348 remote_id_rx,
349 _maintain_remote_id_task,
350 },
351 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
352 subscriptions: Vec::new(),
353 active_entry: None,
354 languages,
355 client,
356 user_store,
357 fs,
358 next_entry_id: Default::default(),
359 next_diagnostic_group_id: Default::default(),
360 language_servers: Default::default(),
361 started_language_servers: Default::default(),
362 language_server_statuses: Default::default(),
363 last_workspace_edits_by_language_server: Default::default(),
364 language_server_settings: Default::default(),
365 next_language_server_id: 0,
366 nonce: StdRng::from_entropy().gen(),
367 }
368 })
369 }
370
371 pub async fn remote(
372 remote_id: u64,
373 client: Arc<Client>,
374 user_store: ModelHandle<UserStore>,
375 languages: Arc<LanguageRegistry>,
376 fs: Arc<dyn Fs>,
377 cx: &mut AsyncAppContext,
378 ) -> Result<ModelHandle<Self>, JoinProjectError> {
379 client.authenticate_and_connect(true, &cx).await?;
380
381 let response = client
382 .request(proto::JoinProject {
383 project_id: remote_id,
384 })
385 .await?;
386
387 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
388 proto::join_project_response::Variant::Accept(response) => response,
389 proto::join_project_response::Variant::Decline(decline) => {
390 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
391 Some(proto::join_project_response::decline::Reason::Declined) => {
392 Err(JoinProjectError::HostDeclined)?
393 }
394 Some(proto::join_project_response::decline::Reason::Closed) => {
395 Err(JoinProjectError::HostClosedProject)?
396 }
397 Some(proto::join_project_response::decline::Reason::WentOffline) => {
398 Err(JoinProjectError::HostWentOffline)?
399 }
400 None => Err(anyhow!("missing decline reason"))?,
401 }
402 }
403 };
404
405 let replica_id = response.replica_id as ReplicaId;
406
407 let mut worktrees = Vec::new();
408 for worktree in response.worktrees {
409 let (worktree, load_task) = cx
410 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
411 worktrees.push(worktree);
412 load_task.detach();
413 }
414
415 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
416 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
417 let mut this = Self {
418 worktrees: Vec::new(),
419 loading_buffers: Default::default(),
420 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
421 shared_buffers: Default::default(),
422 loading_local_worktrees: Default::default(),
423 active_entry: None,
424 collaborators: Default::default(),
425 languages,
426 user_store: user_store.clone(),
427 fs,
428 next_entry_id: Default::default(),
429 next_diagnostic_group_id: Default::default(),
430 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
431 client: client.clone(),
432 client_state: ProjectClientState::Remote {
433 sharing_has_stopped: false,
434 remote_id,
435 replica_id,
436 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
437 async move {
438 let mut status = client.status();
439 let is_connected =
440 status.next().await.map_or(false, |s| s.is_connected());
441 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
442 if !is_connected || status.next().await.is_some() {
443 if let Some(this) = this.upgrade(&cx) {
444 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
445 }
446 }
447 Ok(())
448 }
449 .log_err()
450 }),
451 },
452 language_servers: Default::default(),
453 started_language_servers: Default::default(),
454 language_server_settings: Default::default(),
455 language_server_statuses: response
456 .language_servers
457 .into_iter()
458 .map(|server| {
459 (
460 server.id as usize,
461 LanguageServerStatus {
462 name: server.name,
463 pending_work: Default::default(),
464 pending_diagnostic_updates: 0,
465 },
466 )
467 })
468 .collect(),
469 last_workspace_edits_by_language_server: Default::default(),
470 next_language_server_id: 0,
471 opened_buffers: Default::default(),
472 buffer_snapshots: Default::default(),
473 nonce: StdRng::from_entropy().gen(),
474 };
475 for worktree in worktrees {
476 this.add_worktree(&worktree, cx);
477 }
478 this
479 });
480
481 let user_ids = response
482 .collaborators
483 .iter()
484 .map(|peer| peer.user_id)
485 .collect();
486 user_store
487 .update(cx, |user_store, cx| user_store.get_users(user_ids, cx))
488 .await?;
489 let mut collaborators = HashMap::default();
490 for message in response.collaborators {
491 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
492 collaborators.insert(collaborator.peer_id, collaborator);
493 }
494
495 this.update(cx, |this, _| {
496 this.collaborators = collaborators;
497 });
498
499 Ok(this)
500 }
501
502 #[cfg(any(test, feature = "test-support"))]
503 pub async fn test(
504 fs: Arc<dyn Fs>,
505 root_paths: impl IntoIterator<Item = &Path>,
506 cx: &mut gpui::TestAppContext,
507 ) -> ModelHandle<Project> {
508 let languages = Arc::new(LanguageRegistry::test());
509 let http_client = client::test::FakeHttpClient::with_404_response();
510 let client = client::Client::new(http_client.clone());
511 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
512 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
513 for path in root_paths {
514 let (tree, _) = project
515 .update(cx, |project, cx| {
516 project.find_or_create_local_worktree(path, true, cx)
517 })
518 .await
519 .unwrap();
520 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
521 .await;
522 }
523 project
524 }
525
526 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
527 self.opened_buffers
528 .get(&remote_id)
529 .and_then(|buffer| buffer.upgrade(cx))
530 }
531
532 pub fn languages(&self) -> &Arc<LanguageRegistry> {
533 &self.languages
534 }
535
536 pub fn client(&self) -> Arc<Client> {
537 self.client.clone()
538 }
539
540 pub fn user_store(&self) -> ModelHandle<UserStore> {
541 self.user_store.clone()
542 }
543
544 #[cfg(any(test, feature = "test-support"))]
545 pub fn check_invariants(&self, cx: &AppContext) {
546 if self.is_local() {
547 let mut worktree_root_paths = HashMap::default();
548 for worktree in self.worktrees(cx) {
549 let worktree = worktree.read(cx);
550 let abs_path = worktree.as_local().unwrap().abs_path().clone();
551 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
552 assert_eq!(
553 prev_worktree_id,
554 None,
555 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
556 abs_path,
557 worktree.id(),
558 prev_worktree_id
559 )
560 }
561 } else {
562 let replica_id = self.replica_id();
563 for buffer in self.opened_buffers.values() {
564 if let Some(buffer) = buffer.upgrade(cx) {
565 let buffer = buffer.read(cx);
566 assert_eq!(
567 buffer.deferred_ops_len(),
568 0,
569 "replica {}, buffer {} has deferred operations",
570 replica_id,
571 buffer.remote_id()
572 );
573 }
574 }
575 }
576 }
577
578 #[cfg(any(test, feature = "test-support"))]
579 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
580 let path = path.into();
581 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
582 self.opened_buffers.iter().any(|(_, buffer)| {
583 if let Some(buffer) = buffer.upgrade(cx) {
584 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
585 if file.worktree == worktree && file.path() == &path.path {
586 return true;
587 }
588 }
589 }
590 false
591 })
592 } else {
593 false
594 }
595 }
596
597 pub fn fs(&self) -> &Arc<dyn Fs> {
598 &self.fs
599 }
600
601 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
602 self.unshared(cx);
603 for worktree in &self.worktrees {
604 if let Some(worktree) = worktree.upgrade(cx) {
605 worktree.update(cx, |worktree, _| {
606 worktree.as_local_mut().unwrap().unregister();
607 });
608 }
609 }
610
611 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
612 *remote_id_tx.borrow_mut() = None;
613 }
614
615 self.subscriptions.clear();
616 }
617
618 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
619 self.unregister(cx);
620
621 let response = self.client.request(proto::RegisterProject {});
622 cx.spawn(|this, mut cx| async move {
623 let remote_id = response.await?.project_id;
624
625 let mut registrations = Vec::new();
626 this.update(&mut cx, |this, cx| {
627 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
628 *remote_id_tx.borrow_mut() = Some(remote_id);
629 }
630
631 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
632
633 this.subscriptions
634 .push(this.client.add_model_for_remote_entity(remote_id, cx));
635
636 for worktree in &this.worktrees {
637 if let Some(worktree) = worktree.upgrade(cx) {
638 registrations.push(worktree.update(cx, |worktree, cx| {
639 let worktree = worktree.as_local_mut().unwrap();
640 worktree.register(remote_id, cx)
641 }));
642 }
643 }
644 });
645
646 futures::future::try_join_all(registrations).await?;
647 Ok(())
648 })
649 }
650
651 pub fn remote_id(&self) -> Option<u64> {
652 match &self.client_state {
653 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
654 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
655 }
656 }
657
658 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
659 let mut id = None;
660 let mut watch = None;
661 match &self.client_state {
662 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
663 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
664 }
665
666 async move {
667 if let Some(id) = id {
668 return id;
669 }
670 let mut watch = watch.unwrap();
671 loop {
672 let id = *watch.borrow();
673 if let Some(id) = id {
674 return id;
675 }
676 watch.next().await;
677 }
678 }
679 }
680
681 pub fn shared_remote_id(&self) -> Option<u64> {
682 match &self.client_state {
683 ProjectClientState::Local {
684 remote_id_rx,
685 is_shared,
686 ..
687 } => {
688 if *is_shared {
689 *remote_id_rx.borrow()
690 } else {
691 None
692 }
693 }
694 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
695 }
696 }
697
698 pub fn replica_id(&self) -> ReplicaId {
699 match &self.client_state {
700 ProjectClientState::Local { .. } => 0,
701 ProjectClientState::Remote { replica_id, .. } => *replica_id,
702 }
703 }
704
705 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
706 &self.collaborators
707 }
708
709 pub fn worktrees<'a>(
710 &'a self,
711 cx: &'a AppContext,
712 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
713 self.worktrees
714 .iter()
715 .filter_map(move |worktree| worktree.upgrade(cx))
716 }
717
718 pub fn visible_worktrees<'a>(
719 &'a self,
720 cx: &'a AppContext,
721 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
722 self.worktrees.iter().filter_map(|worktree| {
723 worktree.upgrade(cx).and_then(|worktree| {
724 if worktree.read(cx).is_visible() {
725 Some(worktree)
726 } else {
727 None
728 }
729 })
730 })
731 }
732
733 pub fn worktree_for_id(
734 &self,
735 id: WorktreeId,
736 cx: &AppContext,
737 ) -> Option<ModelHandle<Worktree>> {
738 self.worktrees(cx)
739 .find(|worktree| worktree.read(cx).id() == id)
740 }
741
742 pub fn worktree_for_entry(
743 &self,
744 entry_id: ProjectEntryId,
745 cx: &AppContext,
746 ) -> Option<ModelHandle<Worktree>> {
747 self.worktrees(cx)
748 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
749 }
750
751 pub fn worktree_id_for_entry(
752 &self,
753 entry_id: ProjectEntryId,
754 cx: &AppContext,
755 ) -> Option<WorktreeId> {
756 self.worktree_for_entry(entry_id, cx)
757 .map(|worktree| worktree.read(cx).id())
758 }
759
760 pub fn create_entry(
761 &mut self,
762 project_path: impl Into<ProjectPath>,
763 is_directory: bool,
764 cx: &mut ModelContext<Self>,
765 ) -> Option<Task<Result<Entry>>> {
766 let project_path = project_path.into();
767 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
768 if self.is_local() {
769 Some(worktree.update(cx, |worktree, cx| {
770 worktree
771 .as_local_mut()
772 .unwrap()
773 .create_entry(project_path.path, is_directory, cx)
774 }))
775 } else {
776 let client = self.client.clone();
777 let project_id = self.remote_id().unwrap();
778 Some(cx.spawn_weak(|_, mut cx| async move {
779 let response = client
780 .request(proto::CreateProjectEntry {
781 worktree_id: project_path.worktree_id.to_proto(),
782 project_id,
783 path: project_path.path.as_os_str().as_bytes().to_vec(),
784 is_directory,
785 })
786 .await?;
787 let entry = response
788 .entry
789 .ok_or_else(|| anyhow!("missing entry in response"))?;
790 worktree
791 .update(&mut cx, |worktree, cx| {
792 worktree.as_remote().unwrap().insert_entry(
793 entry,
794 response.worktree_scan_id as usize,
795 cx,
796 )
797 })
798 .await
799 }))
800 }
801 }
802
803 pub fn copy_entry(
804 &mut self,
805 entry_id: ProjectEntryId,
806 new_path: impl Into<Arc<Path>>,
807 cx: &mut ModelContext<Self>,
808 ) -> Option<Task<Result<Entry>>> {
809 let worktree = self.worktree_for_entry(entry_id, cx)?;
810 let new_path = new_path.into();
811 if self.is_local() {
812 worktree.update(cx, |worktree, cx| {
813 worktree
814 .as_local_mut()
815 .unwrap()
816 .copy_entry(entry_id, new_path, cx)
817 })
818 } else {
819 let client = self.client.clone();
820 let project_id = self.remote_id().unwrap();
821
822 Some(cx.spawn_weak(|_, mut cx| async move {
823 let response = client
824 .request(proto::CopyProjectEntry {
825 project_id,
826 entry_id: entry_id.to_proto(),
827 new_path: new_path.as_os_str().as_bytes().to_vec(),
828 })
829 .await?;
830 let entry = response
831 .entry
832 .ok_or_else(|| anyhow!("missing entry in response"))?;
833 worktree
834 .update(&mut cx, |worktree, cx| {
835 worktree.as_remote().unwrap().insert_entry(
836 entry,
837 response.worktree_scan_id as usize,
838 cx,
839 )
840 })
841 .await
842 }))
843 }
844 }
845
846 pub fn rename_entry(
847 &mut self,
848 entry_id: ProjectEntryId,
849 new_path: impl Into<Arc<Path>>,
850 cx: &mut ModelContext<Self>,
851 ) -> Option<Task<Result<Entry>>> {
852 let worktree = self.worktree_for_entry(entry_id, cx)?;
853 let new_path = new_path.into();
854 if self.is_local() {
855 worktree.update(cx, |worktree, cx| {
856 worktree
857 .as_local_mut()
858 .unwrap()
859 .rename_entry(entry_id, new_path, cx)
860 })
861 } else {
862 let client = self.client.clone();
863 let project_id = self.remote_id().unwrap();
864
865 Some(cx.spawn_weak(|_, mut cx| async move {
866 let response = client
867 .request(proto::RenameProjectEntry {
868 project_id,
869 entry_id: entry_id.to_proto(),
870 new_path: new_path.as_os_str().as_bytes().to_vec(),
871 })
872 .await?;
873 let entry = response
874 .entry
875 .ok_or_else(|| anyhow!("missing entry in response"))?;
876 worktree
877 .update(&mut cx, |worktree, cx| {
878 worktree.as_remote().unwrap().insert_entry(
879 entry,
880 response.worktree_scan_id as usize,
881 cx,
882 )
883 })
884 .await
885 }))
886 }
887 }
888
889 pub fn delete_entry(
890 &mut self,
891 entry_id: ProjectEntryId,
892 cx: &mut ModelContext<Self>,
893 ) -> Option<Task<Result<()>>> {
894 let worktree = self.worktree_for_entry(entry_id, cx)?;
895 if self.is_local() {
896 worktree.update(cx, |worktree, cx| {
897 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
898 })
899 } else {
900 let client = self.client.clone();
901 let project_id = self.remote_id().unwrap();
902 Some(cx.spawn_weak(|_, mut cx| async move {
903 let response = client
904 .request(proto::DeleteProjectEntry {
905 project_id,
906 entry_id: entry_id.to_proto(),
907 })
908 .await?;
909 worktree
910 .update(&mut cx, move |worktree, cx| {
911 worktree.as_remote().unwrap().delete_entry(
912 entry_id,
913 response.worktree_scan_id as usize,
914 cx,
915 )
916 })
917 .await
918 }))
919 }
920 }
921
922 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
923 let project_id;
924 if let ProjectClientState::Local {
925 remote_id_rx,
926 is_shared,
927 ..
928 } = &mut self.client_state
929 {
930 if *is_shared {
931 return Task::ready(Ok(()));
932 }
933 *is_shared = true;
934 if let Some(id) = *remote_id_rx.borrow() {
935 project_id = id;
936 } else {
937 return Task::ready(Err(anyhow!("project hasn't been registered")));
938 }
939 } else {
940 return Task::ready(Err(anyhow!("can't share a remote project")));
941 };
942
943 for open_buffer in self.opened_buffers.values_mut() {
944 match open_buffer {
945 OpenBuffer::Strong(_) => {}
946 OpenBuffer::Weak(buffer) => {
947 if let Some(buffer) = buffer.upgrade(cx) {
948 *open_buffer = OpenBuffer::Strong(buffer);
949 }
950 }
951 OpenBuffer::Loading(_) => unreachable!(),
952 }
953 }
954
955 for worktree_handle in self.worktrees.iter_mut() {
956 match worktree_handle {
957 WorktreeHandle::Strong(_) => {}
958 WorktreeHandle::Weak(worktree) => {
959 if let Some(worktree) = worktree.upgrade(cx) {
960 *worktree_handle = WorktreeHandle::Strong(worktree);
961 }
962 }
963 }
964 }
965
966 let mut tasks = Vec::new();
967 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
968 worktree.update(cx, |worktree, cx| {
969 let worktree = worktree.as_local_mut().unwrap();
970 tasks.push(worktree.share(project_id, cx));
971 });
972 }
973
974 cx.spawn(|this, mut cx| async move {
975 for task in tasks {
976 task.await?;
977 }
978 this.update(&mut cx, |_, cx| cx.notify());
979 Ok(())
980 })
981 }
982
983 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
984 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
985 if !*is_shared {
986 return;
987 }
988
989 *is_shared = false;
990 self.collaborators.clear();
991 self.shared_buffers.clear();
992 for worktree_handle in self.worktrees.iter_mut() {
993 if let WorktreeHandle::Strong(worktree) = worktree_handle {
994 let is_visible = worktree.update(cx, |worktree, _| {
995 worktree.as_local_mut().unwrap().unshare();
996 worktree.is_visible()
997 });
998 if !is_visible {
999 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1000 }
1001 }
1002 }
1003
1004 for open_buffer in self.opened_buffers.values_mut() {
1005 match open_buffer {
1006 OpenBuffer::Strong(buffer) => {
1007 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1008 }
1009 _ => {}
1010 }
1011 }
1012
1013 cx.notify();
1014 } else {
1015 log::error!("attempted to unshare a remote project");
1016 }
1017 }
1018
1019 pub fn respond_to_join_request(
1020 &mut self,
1021 requester_id: u64,
1022 allow: bool,
1023 cx: &mut ModelContext<Self>,
1024 ) {
1025 if let Some(project_id) = self.remote_id() {
1026 let share = self.share(cx);
1027 let client = self.client.clone();
1028 cx.foreground()
1029 .spawn(async move {
1030 share.await?;
1031 client.send(proto::RespondToJoinProjectRequest {
1032 requester_id,
1033 project_id,
1034 allow,
1035 })
1036 })
1037 .detach_and_log_err(cx);
1038 }
1039 }
1040
1041 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1042 if let ProjectClientState::Remote {
1043 sharing_has_stopped,
1044 ..
1045 } = &mut self.client_state
1046 {
1047 *sharing_has_stopped = true;
1048 self.collaborators.clear();
1049 cx.notify();
1050 }
1051 }
1052
1053 pub fn is_read_only(&self) -> bool {
1054 match &self.client_state {
1055 ProjectClientState::Local { .. } => false,
1056 ProjectClientState::Remote {
1057 sharing_has_stopped,
1058 ..
1059 } => *sharing_has_stopped,
1060 }
1061 }
1062
1063 pub fn is_local(&self) -> bool {
1064 match &self.client_state {
1065 ProjectClientState::Local { .. } => true,
1066 ProjectClientState::Remote { .. } => false,
1067 }
1068 }
1069
1070 pub fn is_remote(&self) -> bool {
1071 !self.is_local()
1072 }
1073
1074 pub fn create_buffer(
1075 &mut self,
1076 text: &str,
1077 language: Option<Arc<Language>>,
1078 cx: &mut ModelContext<Self>,
1079 ) -> Result<ModelHandle<Buffer>> {
1080 if self.is_remote() {
1081 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1082 }
1083
1084 let buffer = cx.add_model(|cx| {
1085 Buffer::new(self.replica_id(), text, cx)
1086 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1087 });
1088 self.register_buffer(&buffer, cx)?;
1089 Ok(buffer)
1090 }
1091
1092 pub fn open_path(
1093 &mut self,
1094 path: impl Into<ProjectPath>,
1095 cx: &mut ModelContext<Self>,
1096 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1097 let task = self.open_buffer(path, cx);
1098 cx.spawn_weak(|_, cx| async move {
1099 let buffer = task.await?;
1100 let project_entry_id = buffer
1101 .read_with(&cx, |buffer, cx| {
1102 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1103 })
1104 .ok_or_else(|| anyhow!("no project entry"))?;
1105 Ok((project_entry_id, buffer.into()))
1106 })
1107 }
1108
1109 pub fn open_local_buffer(
1110 &mut self,
1111 abs_path: impl AsRef<Path>,
1112 cx: &mut ModelContext<Self>,
1113 ) -> Task<Result<ModelHandle<Buffer>>> {
1114 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1115 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1116 } else {
1117 Task::ready(Err(anyhow!("no such path")))
1118 }
1119 }
1120
1121 pub fn open_buffer(
1122 &mut self,
1123 path: impl Into<ProjectPath>,
1124 cx: &mut ModelContext<Self>,
1125 ) -> Task<Result<ModelHandle<Buffer>>> {
1126 let project_path = path.into();
1127 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1128 worktree
1129 } else {
1130 return Task::ready(Err(anyhow!("no such worktree")));
1131 };
1132
1133 // If there is already a buffer for the given path, then return it.
1134 let existing_buffer = self.get_open_buffer(&project_path, cx);
1135 if let Some(existing_buffer) = existing_buffer {
1136 return Task::ready(Ok(existing_buffer));
1137 }
1138
1139 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1140 // If the given path is already being loaded, then wait for that existing
1141 // task to complete and return the same buffer.
1142 hash_map::Entry::Occupied(e) => e.get().clone(),
1143
1144 // Otherwise, record the fact that this path is now being loaded.
1145 hash_map::Entry::Vacant(entry) => {
1146 let (mut tx, rx) = postage::watch::channel();
1147 entry.insert(rx.clone());
1148
1149 let load_buffer = if worktree.read(cx).is_local() {
1150 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1151 } else {
1152 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1153 };
1154
1155 cx.spawn(move |this, mut cx| async move {
1156 let load_result = load_buffer.await;
1157 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1158 // Record the fact that the buffer is no longer loading.
1159 this.loading_buffers.remove(&project_path);
1160 let buffer = load_result.map_err(Arc::new)?;
1161 Ok(buffer)
1162 }));
1163 })
1164 .detach();
1165 rx
1166 }
1167 };
1168
1169 cx.foreground().spawn(async move {
1170 loop {
1171 if let Some(result) = loading_watch.borrow().as_ref() {
1172 match result {
1173 Ok(buffer) => return Ok(buffer.clone()),
1174 Err(error) => return Err(anyhow!("{}", error)),
1175 }
1176 }
1177 loading_watch.next().await;
1178 }
1179 })
1180 }
1181
1182 fn open_local_buffer_internal(
1183 &mut self,
1184 path: &Arc<Path>,
1185 worktree: &ModelHandle<Worktree>,
1186 cx: &mut ModelContext<Self>,
1187 ) -> Task<Result<ModelHandle<Buffer>>> {
1188 let load_buffer = worktree.update(cx, |worktree, cx| {
1189 let worktree = worktree.as_local_mut().unwrap();
1190 worktree.load_buffer(path, cx)
1191 });
1192 cx.spawn(|this, mut cx| async move {
1193 let buffer = load_buffer.await?;
1194 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1195 Ok(buffer)
1196 })
1197 }
1198
1199 fn open_remote_buffer_internal(
1200 &mut self,
1201 path: &Arc<Path>,
1202 worktree: &ModelHandle<Worktree>,
1203 cx: &mut ModelContext<Self>,
1204 ) -> Task<Result<ModelHandle<Buffer>>> {
1205 let rpc = self.client.clone();
1206 let project_id = self.remote_id().unwrap();
1207 let remote_worktree_id = worktree.read(cx).id();
1208 let path = path.clone();
1209 let path_string = path.to_string_lossy().to_string();
1210 cx.spawn(|this, mut cx| async move {
1211 let response = rpc
1212 .request(proto::OpenBufferByPath {
1213 project_id,
1214 worktree_id: remote_worktree_id.to_proto(),
1215 path: path_string,
1216 })
1217 .await?;
1218 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1219 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1220 .await
1221 })
1222 }
1223
1224 fn open_local_buffer_via_lsp(
1225 &mut self,
1226 abs_path: lsp::Url,
1227 lsp_adapter: Arc<dyn LspAdapter>,
1228 lsp_server: Arc<LanguageServer>,
1229 cx: &mut ModelContext<Self>,
1230 ) -> Task<Result<ModelHandle<Buffer>>> {
1231 cx.spawn(|this, mut cx| async move {
1232 let abs_path = abs_path
1233 .to_file_path()
1234 .map_err(|_| anyhow!("can't convert URI to path"))?;
1235 let (worktree, relative_path) = if let Some(result) =
1236 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1237 {
1238 result
1239 } else {
1240 let worktree = this
1241 .update(&mut cx, |this, cx| {
1242 this.create_local_worktree(&abs_path, false, cx)
1243 })
1244 .await?;
1245 this.update(&mut cx, |this, cx| {
1246 this.language_servers.insert(
1247 (worktree.read(cx).id(), lsp_adapter.name()),
1248 (lsp_adapter, lsp_server),
1249 );
1250 });
1251 (worktree, PathBuf::new())
1252 };
1253
1254 let project_path = ProjectPath {
1255 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1256 path: relative_path.into(),
1257 };
1258 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1259 .await
1260 })
1261 }
1262
1263 pub fn open_buffer_by_id(
1264 &mut self,
1265 id: u64,
1266 cx: &mut ModelContext<Self>,
1267 ) -> Task<Result<ModelHandle<Buffer>>> {
1268 if let Some(buffer) = self.buffer_for_id(id, cx) {
1269 Task::ready(Ok(buffer))
1270 } else if self.is_local() {
1271 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1272 } else if let Some(project_id) = self.remote_id() {
1273 let request = self
1274 .client
1275 .request(proto::OpenBufferById { project_id, id });
1276 cx.spawn(|this, mut cx| async move {
1277 let buffer = request
1278 .await?
1279 .buffer
1280 .ok_or_else(|| anyhow!("invalid buffer"))?;
1281 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1282 .await
1283 })
1284 } else {
1285 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1286 }
1287 }
1288
1289 pub fn save_buffer_as(
1290 &mut self,
1291 buffer: ModelHandle<Buffer>,
1292 abs_path: PathBuf,
1293 cx: &mut ModelContext<Project>,
1294 ) -> Task<Result<()>> {
1295 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1296 let old_path =
1297 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1298 cx.spawn(|this, mut cx| async move {
1299 if let Some(old_path) = old_path {
1300 this.update(&mut cx, |this, cx| {
1301 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1302 });
1303 }
1304 let (worktree, path) = worktree_task.await?;
1305 worktree
1306 .update(&mut cx, |worktree, cx| {
1307 worktree
1308 .as_local_mut()
1309 .unwrap()
1310 .save_buffer_as(buffer.clone(), path, cx)
1311 })
1312 .await?;
1313 this.update(&mut cx, |this, cx| {
1314 this.assign_language_to_buffer(&buffer, cx);
1315 this.register_buffer_with_language_server(&buffer, cx);
1316 });
1317 Ok(())
1318 })
1319 }
1320
1321 pub fn get_open_buffer(
1322 &mut self,
1323 path: &ProjectPath,
1324 cx: &mut ModelContext<Self>,
1325 ) -> Option<ModelHandle<Buffer>> {
1326 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1327 self.opened_buffers.values().find_map(|buffer| {
1328 let buffer = buffer.upgrade(cx)?;
1329 let file = File::from_dyn(buffer.read(cx).file())?;
1330 if file.worktree == worktree && file.path() == &path.path {
1331 Some(buffer)
1332 } else {
1333 None
1334 }
1335 })
1336 }
1337
1338 fn register_buffer(
1339 &mut self,
1340 buffer: &ModelHandle<Buffer>,
1341 cx: &mut ModelContext<Self>,
1342 ) -> Result<()> {
1343 let remote_id = buffer.read(cx).remote_id();
1344 let open_buffer = if self.is_remote() || self.is_shared() {
1345 OpenBuffer::Strong(buffer.clone())
1346 } else {
1347 OpenBuffer::Weak(buffer.downgrade())
1348 };
1349
1350 match self.opened_buffers.insert(remote_id, open_buffer) {
1351 None => {}
1352 Some(OpenBuffer::Loading(operations)) => {
1353 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1354 }
1355 Some(OpenBuffer::Weak(existing_handle)) => {
1356 if existing_handle.upgrade(cx).is_some() {
1357 Err(anyhow!(
1358 "already registered buffer with remote id {}",
1359 remote_id
1360 ))?
1361 }
1362 }
1363 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1364 "already registered buffer with remote id {}",
1365 remote_id
1366 ))?,
1367 }
1368 cx.subscribe(buffer, |this, buffer, event, cx| {
1369 this.on_buffer_event(buffer, event, cx);
1370 })
1371 .detach();
1372
1373 self.assign_language_to_buffer(buffer, cx);
1374 self.register_buffer_with_language_server(buffer, cx);
1375 cx.observe_release(buffer, |this, buffer, cx| {
1376 if let Some(file) = File::from_dyn(buffer.file()) {
1377 if file.is_local() {
1378 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1379 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1380 server
1381 .notify::<lsp::notification::DidCloseTextDocument>(
1382 lsp::DidCloseTextDocumentParams {
1383 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1384 },
1385 )
1386 .log_err();
1387 }
1388 }
1389 }
1390 })
1391 .detach();
1392
1393 Ok(())
1394 }
1395
1396 fn register_buffer_with_language_server(
1397 &mut self,
1398 buffer_handle: &ModelHandle<Buffer>,
1399 cx: &mut ModelContext<Self>,
1400 ) {
1401 let buffer = buffer_handle.read(cx);
1402 let buffer_id = buffer.remote_id();
1403 if let Some(file) = File::from_dyn(buffer.file()) {
1404 if file.is_local() {
1405 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1406 let initial_snapshot = buffer.text_snapshot();
1407
1408 let mut language_server = None;
1409 let mut language_id = None;
1410 if let Some(language) = buffer.language() {
1411 let worktree_id = file.worktree_id(cx);
1412 if let Some(adapter) = language.lsp_adapter() {
1413 language_id = adapter.id_for_language(language.name().as_ref());
1414 language_server = self
1415 .language_servers
1416 .get(&(worktree_id, adapter.name()))
1417 .cloned();
1418 }
1419 }
1420
1421 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1422 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1423 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1424 .log_err();
1425 }
1426 }
1427
1428 if let Some((_, server)) = language_server {
1429 server
1430 .notify::<lsp::notification::DidOpenTextDocument>(
1431 lsp::DidOpenTextDocumentParams {
1432 text_document: lsp::TextDocumentItem::new(
1433 uri,
1434 language_id.unwrap_or_default(),
1435 0,
1436 initial_snapshot.text(),
1437 ),
1438 }
1439 .clone(),
1440 )
1441 .log_err();
1442 buffer_handle.update(cx, |buffer, cx| {
1443 buffer.set_completion_triggers(
1444 server
1445 .capabilities()
1446 .completion_provider
1447 .as_ref()
1448 .and_then(|provider| provider.trigger_characters.clone())
1449 .unwrap_or(Vec::new()),
1450 cx,
1451 )
1452 });
1453 self.buffer_snapshots
1454 .insert(buffer_id, vec![(0, initial_snapshot)]);
1455 }
1456 }
1457 }
1458 }
1459
1460 fn unregister_buffer_from_language_server(
1461 &mut self,
1462 buffer: &ModelHandle<Buffer>,
1463 old_path: PathBuf,
1464 cx: &mut ModelContext<Self>,
1465 ) {
1466 buffer.update(cx, |buffer, cx| {
1467 buffer.update_diagnostics(Default::default(), cx);
1468 self.buffer_snapshots.remove(&buffer.remote_id());
1469 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1470 language_server
1471 .notify::<lsp::notification::DidCloseTextDocument>(
1472 lsp::DidCloseTextDocumentParams {
1473 text_document: lsp::TextDocumentIdentifier::new(
1474 lsp::Url::from_file_path(old_path).unwrap(),
1475 ),
1476 },
1477 )
1478 .log_err();
1479 }
1480 });
1481 }
1482
1483 fn on_buffer_event(
1484 &mut self,
1485 buffer: ModelHandle<Buffer>,
1486 event: &BufferEvent,
1487 cx: &mut ModelContext<Self>,
1488 ) -> Option<()> {
1489 match event {
1490 BufferEvent::Operation(operation) => {
1491 if let Some(project_id) = self.shared_remote_id() {
1492 let request = self.client.request(proto::UpdateBuffer {
1493 project_id,
1494 buffer_id: buffer.read(cx).remote_id(),
1495 operations: vec![language::proto::serialize_operation(&operation)],
1496 });
1497 cx.background().spawn(request).detach_and_log_err(cx);
1498 }
1499 }
1500 BufferEvent::Edited { .. } => {
1501 let (_, language_server) = self
1502 .language_server_for_buffer(buffer.read(cx), cx)?
1503 .clone();
1504 let buffer = buffer.read(cx);
1505 let file = File::from_dyn(buffer.file())?;
1506 let abs_path = file.as_local()?.abs_path(cx);
1507 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1508 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1509 let (version, prev_snapshot) = buffer_snapshots.last()?;
1510 let next_snapshot = buffer.text_snapshot();
1511 let next_version = version + 1;
1512
1513 let content_changes = buffer
1514 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1515 .map(|edit| {
1516 let edit_start = edit.new.start.0;
1517 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1518 let new_text = next_snapshot
1519 .text_for_range(edit.new.start.1..edit.new.end.1)
1520 .collect();
1521 lsp::TextDocumentContentChangeEvent {
1522 range: Some(lsp::Range::new(
1523 point_to_lsp(edit_start),
1524 point_to_lsp(edit_end),
1525 )),
1526 range_length: None,
1527 text: new_text,
1528 }
1529 })
1530 .collect();
1531
1532 buffer_snapshots.push((next_version, next_snapshot));
1533
1534 language_server
1535 .notify::<lsp::notification::DidChangeTextDocument>(
1536 lsp::DidChangeTextDocumentParams {
1537 text_document: lsp::VersionedTextDocumentIdentifier::new(
1538 uri,
1539 next_version,
1540 ),
1541 content_changes,
1542 },
1543 )
1544 .log_err();
1545 }
1546 BufferEvent::Saved => {
1547 let file = File::from_dyn(buffer.read(cx).file())?;
1548 let worktree_id = file.worktree_id(cx);
1549 let abs_path = file.as_local()?.abs_path(cx);
1550 let text_document = lsp::TextDocumentIdentifier {
1551 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1552 };
1553
1554 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1555 server
1556 .notify::<lsp::notification::DidSaveTextDocument>(
1557 lsp::DidSaveTextDocumentParams {
1558 text_document: text_document.clone(),
1559 text: None,
1560 },
1561 )
1562 .log_err();
1563 }
1564 }
1565 _ => {}
1566 }
1567
1568 None
1569 }
1570
1571 fn language_servers_for_worktree(
1572 &self,
1573 worktree_id: WorktreeId,
1574 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1575 self.language_servers.iter().filter_map(
1576 move |((language_server_worktree_id, _), server)| {
1577 if *language_server_worktree_id == worktree_id {
1578 Some(server)
1579 } else {
1580 None
1581 }
1582 },
1583 )
1584 }
1585
1586 fn assign_language_to_buffer(
1587 &mut self,
1588 buffer: &ModelHandle<Buffer>,
1589 cx: &mut ModelContext<Self>,
1590 ) -> Option<()> {
1591 // If the buffer has a language, set it and start the language server if we haven't already.
1592 let full_path = buffer.read(cx).file()?.full_path(cx);
1593 let language = self.languages.select_language(&full_path)?;
1594 buffer.update(cx, |buffer, cx| {
1595 buffer.set_language(Some(language.clone()), cx);
1596 });
1597
1598 let file = File::from_dyn(buffer.read(cx).file())?;
1599 let worktree = file.worktree.read(cx).as_local()?;
1600 let worktree_id = worktree.id();
1601 let worktree_abs_path = worktree.abs_path().clone();
1602 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1603
1604 None
1605 }
1606
1607 fn start_language_server(
1608 &mut self,
1609 worktree_id: WorktreeId,
1610 worktree_path: Arc<Path>,
1611 language: Arc<Language>,
1612 cx: &mut ModelContext<Self>,
1613 ) {
1614 let adapter = if let Some(adapter) = language.lsp_adapter() {
1615 adapter
1616 } else {
1617 return;
1618 };
1619 let key = (worktree_id, adapter.name());
1620 self.started_language_servers
1621 .entry(key.clone())
1622 .or_insert_with(|| {
1623 let server_id = post_inc(&mut self.next_language_server_id);
1624 let language_server = self.languages.start_language_server(
1625 server_id,
1626 language.clone(),
1627 worktree_path,
1628 self.client.http_client(),
1629 cx,
1630 );
1631 cx.spawn_weak(|this, mut cx| async move {
1632 let language_server = language_server?.await.log_err()?;
1633 let language_server = language_server
1634 .initialize(adapter.initialization_options())
1635 .await
1636 .log_err()?;
1637 let this = this.upgrade(&cx)?;
1638 let disk_based_diagnostics_progress_token =
1639 adapter.disk_based_diagnostics_progress_token();
1640
1641 language_server
1642 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1643 let this = this.downgrade();
1644 let adapter = adapter.clone();
1645 move |params, mut cx| {
1646 if let Some(this) = this.upgrade(&cx) {
1647 this.update(&mut cx, |this, cx| {
1648 this.on_lsp_diagnostics_published(
1649 server_id,
1650 params,
1651 &adapter,
1652 disk_based_diagnostics_progress_token,
1653 cx,
1654 );
1655 });
1656 }
1657 }
1658 })
1659 .detach();
1660
1661 language_server
1662 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1663 let settings = this
1664 .read_with(&cx, |this, _| this.language_server_settings.clone());
1665 move |params, _| {
1666 let settings = settings.lock().clone();
1667 async move {
1668 Ok(params
1669 .items
1670 .into_iter()
1671 .map(|item| {
1672 if let Some(section) = &item.section {
1673 settings
1674 .get(section)
1675 .cloned()
1676 .unwrap_or(serde_json::Value::Null)
1677 } else {
1678 settings.clone()
1679 }
1680 })
1681 .collect())
1682 }
1683 }
1684 })
1685 .detach();
1686
1687 language_server
1688 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1689 let this = this.downgrade();
1690 let adapter = adapter.clone();
1691 let language_server = language_server.clone();
1692 move |params, cx| {
1693 Self::on_lsp_workspace_edit(
1694 this,
1695 params,
1696 server_id,
1697 adapter.clone(),
1698 language_server.clone(),
1699 cx,
1700 )
1701 }
1702 })
1703 .detach();
1704
1705 language_server
1706 .on_notification::<lsp::notification::Progress, _>({
1707 let this = this.downgrade();
1708 move |params, mut cx| {
1709 if let Some(this) = this.upgrade(&cx) {
1710 this.update(&mut cx, |this, cx| {
1711 this.on_lsp_progress(
1712 params,
1713 server_id,
1714 disk_based_diagnostics_progress_token,
1715 cx,
1716 );
1717 });
1718 }
1719 }
1720 })
1721 .detach();
1722
1723 this.update(&mut cx, |this, cx| {
1724 this.language_servers
1725 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1726 this.language_server_statuses.insert(
1727 server_id,
1728 LanguageServerStatus {
1729 name: language_server.name().to_string(),
1730 pending_work: Default::default(),
1731 pending_diagnostic_updates: 0,
1732 },
1733 );
1734 language_server
1735 .notify::<lsp::notification::DidChangeConfiguration>(
1736 lsp::DidChangeConfigurationParams {
1737 settings: this.language_server_settings.lock().clone(),
1738 },
1739 )
1740 .ok();
1741
1742 if let Some(project_id) = this.shared_remote_id() {
1743 this.client
1744 .send(proto::StartLanguageServer {
1745 project_id,
1746 server: Some(proto::LanguageServer {
1747 id: server_id as u64,
1748 name: language_server.name().to_string(),
1749 }),
1750 })
1751 .log_err();
1752 }
1753
1754 // Tell the language server about every open buffer in the worktree that matches the language.
1755 for buffer in this.opened_buffers.values() {
1756 if let Some(buffer_handle) = buffer.upgrade(cx) {
1757 let buffer = buffer_handle.read(cx);
1758 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1759 file
1760 } else {
1761 continue;
1762 };
1763 let language = if let Some(language) = buffer.language() {
1764 language
1765 } else {
1766 continue;
1767 };
1768 if file.worktree.read(cx).id() != key.0
1769 || language.lsp_adapter().map(|a| a.name())
1770 != Some(key.1.clone())
1771 {
1772 continue;
1773 }
1774
1775 let file = file.as_local()?;
1776 let versions = this
1777 .buffer_snapshots
1778 .entry(buffer.remote_id())
1779 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1780 let (version, initial_snapshot) = versions.last().unwrap();
1781 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1782 let language_id = adapter.id_for_language(language.name().as_ref());
1783 language_server
1784 .notify::<lsp::notification::DidOpenTextDocument>(
1785 lsp::DidOpenTextDocumentParams {
1786 text_document: lsp::TextDocumentItem::new(
1787 uri,
1788 language_id.unwrap_or_default(),
1789 *version,
1790 initial_snapshot.text(),
1791 ),
1792 },
1793 )
1794 .log_err()?;
1795 buffer_handle.update(cx, |buffer, cx| {
1796 buffer.set_completion_triggers(
1797 language_server
1798 .capabilities()
1799 .completion_provider
1800 .as_ref()
1801 .and_then(|provider| {
1802 provider.trigger_characters.clone()
1803 })
1804 .unwrap_or(Vec::new()),
1805 cx,
1806 )
1807 });
1808 }
1809 }
1810
1811 cx.notify();
1812 Some(())
1813 });
1814
1815 Some(language_server)
1816 })
1817 });
1818 }
1819
1820 pub fn restart_language_servers_for_buffers(
1821 &mut self,
1822 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1823 cx: &mut ModelContext<Self>,
1824 ) -> Option<()> {
1825 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1826 .into_iter()
1827 .filter_map(|buffer| {
1828 let file = File::from_dyn(buffer.read(cx).file())?;
1829 let worktree = file.worktree.read(cx).as_local()?;
1830 let worktree_id = worktree.id();
1831 let worktree_abs_path = worktree.abs_path().clone();
1832 let full_path = file.full_path(cx);
1833 Some((worktree_id, worktree_abs_path, full_path))
1834 })
1835 .collect();
1836 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1837 let language = self.languages.select_language(&full_path)?;
1838 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1839 }
1840
1841 None
1842 }
1843
1844 fn restart_language_server(
1845 &mut self,
1846 worktree_id: WorktreeId,
1847 worktree_path: Arc<Path>,
1848 language: Arc<Language>,
1849 cx: &mut ModelContext<Self>,
1850 ) {
1851 let adapter = if let Some(adapter) = language.lsp_adapter() {
1852 adapter
1853 } else {
1854 return;
1855 };
1856 let key = (worktree_id, adapter.name());
1857 let server_to_shutdown = self.language_servers.remove(&key);
1858 self.started_language_servers.remove(&key);
1859 server_to_shutdown
1860 .as_ref()
1861 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1862 cx.spawn_weak(|this, mut cx| async move {
1863 if let Some(this) = this.upgrade(&cx) {
1864 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1865 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1866 shutdown_task.await;
1867 }
1868 }
1869
1870 this.update(&mut cx, |this, cx| {
1871 this.start_language_server(worktree_id, worktree_path, language, cx);
1872 });
1873 }
1874 })
1875 .detach();
1876 }
1877
1878 fn on_lsp_diagnostics_published(
1879 &mut self,
1880 server_id: usize,
1881 mut params: lsp::PublishDiagnosticsParams,
1882 adapter: &Arc<dyn LspAdapter>,
1883 disk_based_diagnostics_progress_token: Option<&str>,
1884 cx: &mut ModelContext<Self>,
1885 ) {
1886 adapter.process_diagnostics(&mut params);
1887 if disk_based_diagnostics_progress_token.is_none() {
1888 self.disk_based_diagnostics_started(cx);
1889 self.broadcast_language_server_update(
1890 server_id,
1891 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1892 proto::LspDiskBasedDiagnosticsUpdating {},
1893 ),
1894 );
1895 }
1896 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1897 .log_err();
1898 if disk_based_diagnostics_progress_token.is_none() {
1899 self.disk_based_diagnostics_finished(cx);
1900 self.broadcast_language_server_update(
1901 server_id,
1902 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1903 proto::LspDiskBasedDiagnosticsUpdated {},
1904 ),
1905 );
1906 }
1907 }
1908
1909 fn on_lsp_progress(
1910 &mut self,
1911 progress: lsp::ProgressParams,
1912 server_id: usize,
1913 disk_based_diagnostics_progress_token: Option<&str>,
1914 cx: &mut ModelContext<Self>,
1915 ) {
1916 let token = match progress.token {
1917 lsp::NumberOrString::String(token) => token,
1918 lsp::NumberOrString::Number(token) => {
1919 log::info!("skipping numeric progress token {}", token);
1920 return;
1921 }
1922 };
1923 let progress = match progress.value {
1924 lsp::ProgressParamsValue::WorkDone(value) => value,
1925 };
1926 let language_server_status =
1927 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1928 status
1929 } else {
1930 return;
1931 };
1932 match progress {
1933 lsp::WorkDoneProgress::Begin(_) => {
1934 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1935 language_server_status.pending_diagnostic_updates += 1;
1936 if language_server_status.pending_diagnostic_updates == 1 {
1937 self.disk_based_diagnostics_started(cx);
1938 self.broadcast_language_server_update(
1939 server_id,
1940 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1941 proto::LspDiskBasedDiagnosticsUpdating {},
1942 ),
1943 );
1944 }
1945 } else {
1946 self.on_lsp_work_start(server_id, token.clone(), cx);
1947 self.broadcast_language_server_update(
1948 server_id,
1949 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1950 token,
1951 }),
1952 );
1953 }
1954 }
1955 lsp::WorkDoneProgress::Report(report) => {
1956 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1957 self.on_lsp_work_progress(
1958 server_id,
1959 token.clone(),
1960 LanguageServerProgress {
1961 message: report.message.clone(),
1962 percentage: report.percentage.map(|p| p as usize),
1963 last_update_at: Instant::now(),
1964 },
1965 cx,
1966 );
1967 self.broadcast_language_server_update(
1968 server_id,
1969 proto::update_language_server::Variant::WorkProgress(
1970 proto::LspWorkProgress {
1971 token,
1972 message: report.message,
1973 percentage: report.percentage.map(|p| p as u32),
1974 },
1975 ),
1976 );
1977 }
1978 }
1979 lsp::WorkDoneProgress::End(_) => {
1980 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1981 language_server_status.pending_diagnostic_updates -= 1;
1982 if language_server_status.pending_diagnostic_updates == 0 {
1983 self.disk_based_diagnostics_finished(cx);
1984 self.broadcast_language_server_update(
1985 server_id,
1986 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1987 proto::LspDiskBasedDiagnosticsUpdated {},
1988 ),
1989 );
1990 }
1991 } else {
1992 self.on_lsp_work_end(server_id, token.clone(), cx);
1993 self.broadcast_language_server_update(
1994 server_id,
1995 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1996 token,
1997 }),
1998 );
1999 }
2000 }
2001 }
2002 }
2003
2004 fn on_lsp_work_start(
2005 &mut self,
2006 language_server_id: usize,
2007 token: String,
2008 cx: &mut ModelContext<Self>,
2009 ) {
2010 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2011 status.pending_work.insert(
2012 token,
2013 LanguageServerProgress {
2014 message: None,
2015 percentage: None,
2016 last_update_at: Instant::now(),
2017 },
2018 );
2019 cx.notify();
2020 }
2021 }
2022
2023 fn on_lsp_work_progress(
2024 &mut self,
2025 language_server_id: usize,
2026 token: String,
2027 progress: LanguageServerProgress,
2028 cx: &mut ModelContext<Self>,
2029 ) {
2030 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2031 status.pending_work.insert(token, progress);
2032 cx.notify();
2033 }
2034 }
2035
2036 fn on_lsp_work_end(
2037 &mut self,
2038 language_server_id: usize,
2039 token: String,
2040 cx: &mut ModelContext<Self>,
2041 ) {
2042 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2043 status.pending_work.remove(&token);
2044 cx.notify();
2045 }
2046 }
2047
2048 async fn on_lsp_workspace_edit(
2049 this: WeakModelHandle<Self>,
2050 params: lsp::ApplyWorkspaceEditParams,
2051 server_id: usize,
2052 adapter: Arc<dyn LspAdapter>,
2053 language_server: Arc<LanguageServer>,
2054 mut cx: AsyncAppContext,
2055 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2056 let this = this
2057 .upgrade(&cx)
2058 .ok_or_else(|| anyhow!("project project closed"))?;
2059 let transaction = Self::deserialize_workspace_edit(
2060 this.clone(),
2061 params.edit,
2062 true,
2063 adapter.clone(),
2064 language_server.clone(),
2065 &mut cx,
2066 )
2067 .await
2068 .log_err();
2069 this.update(&mut cx, |this, _| {
2070 if let Some(transaction) = transaction {
2071 this.last_workspace_edits_by_language_server
2072 .insert(server_id, transaction);
2073 }
2074 });
2075 Ok(lsp::ApplyWorkspaceEditResponse {
2076 applied: true,
2077 failed_change: None,
2078 failure_reason: None,
2079 })
2080 }
2081
2082 fn broadcast_language_server_update(
2083 &self,
2084 language_server_id: usize,
2085 event: proto::update_language_server::Variant,
2086 ) {
2087 if let Some(project_id) = self.shared_remote_id() {
2088 self.client
2089 .send(proto::UpdateLanguageServer {
2090 project_id,
2091 language_server_id: language_server_id as u64,
2092 variant: Some(event),
2093 })
2094 .log_err();
2095 }
2096 }
2097
2098 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2099 for (_, server) in self.language_servers.values() {
2100 server
2101 .notify::<lsp::notification::DidChangeConfiguration>(
2102 lsp::DidChangeConfigurationParams {
2103 settings: settings.clone(),
2104 },
2105 )
2106 .ok();
2107 }
2108 *self.language_server_settings.lock() = settings;
2109 }
2110
2111 pub fn language_server_statuses(
2112 &self,
2113 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2114 self.language_server_statuses.values()
2115 }
2116
2117 pub fn update_diagnostics(
2118 &mut self,
2119 params: lsp::PublishDiagnosticsParams,
2120 disk_based_sources: &[&str],
2121 cx: &mut ModelContext<Self>,
2122 ) -> Result<()> {
2123 let abs_path = params
2124 .uri
2125 .to_file_path()
2126 .map_err(|_| anyhow!("URI is not a file"))?;
2127 let mut diagnostics = Vec::default();
2128 let mut primary_diagnostic_group_ids = HashMap::default();
2129 let mut sources_by_group_id = HashMap::default();
2130 let mut supporting_diagnostics = HashMap::default();
2131 for diagnostic in ¶ms.diagnostics {
2132 let source = diagnostic.source.as_ref();
2133 let code = diagnostic.code.as_ref().map(|code| match code {
2134 lsp::NumberOrString::Number(code) => code.to_string(),
2135 lsp::NumberOrString::String(code) => code.clone(),
2136 });
2137 let range = range_from_lsp(diagnostic.range);
2138 let is_supporting = diagnostic
2139 .related_information
2140 .as_ref()
2141 .map_or(false, |infos| {
2142 infos.iter().any(|info| {
2143 primary_diagnostic_group_ids.contains_key(&(
2144 source,
2145 code.clone(),
2146 range_from_lsp(info.location.range),
2147 ))
2148 })
2149 });
2150
2151 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2152 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2153 });
2154
2155 if is_supporting {
2156 supporting_diagnostics.insert(
2157 (source, code.clone(), range),
2158 (diagnostic.severity, is_unnecessary),
2159 );
2160 } else {
2161 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2162 let is_disk_based = source.map_or(false, |source| {
2163 disk_based_sources.contains(&source.as_str())
2164 });
2165
2166 sources_by_group_id.insert(group_id, source);
2167 primary_diagnostic_group_ids
2168 .insert((source, code.clone(), range.clone()), group_id);
2169
2170 diagnostics.push(DiagnosticEntry {
2171 range,
2172 diagnostic: Diagnostic {
2173 code: code.clone(),
2174 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2175 message: diagnostic.message.clone(),
2176 group_id,
2177 is_primary: true,
2178 is_valid: true,
2179 is_disk_based,
2180 is_unnecessary,
2181 },
2182 });
2183 if let Some(infos) = &diagnostic.related_information {
2184 for info in infos {
2185 if info.location.uri == params.uri && !info.message.is_empty() {
2186 let range = range_from_lsp(info.location.range);
2187 diagnostics.push(DiagnosticEntry {
2188 range,
2189 diagnostic: Diagnostic {
2190 code: code.clone(),
2191 severity: DiagnosticSeverity::INFORMATION,
2192 message: info.message.clone(),
2193 group_id,
2194 is_primary: false,
2195 is_valid: true,
2196 is_disk_based,
2197 is_unnecessary: false,
2198 },
2199 });
2200 }
2201 }
2202 }
2203 }
2204 }
2205
2206 for entry in &mut diagnostics {
2207 let diagnostic = &mut entry.diagnostic;
2208 if !diagnostic.is_primary {
2209 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2210 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2211 source,
2212 diagnostic.code.clone(),
2213 entry.range.clone(),
2214 )) {
2215 if let Some(severity) = severity {
2216 diagnostic.severity = severity;
2217 }
2218 diagnostic.is_unnecessary = is_unnecessary;
2219 }
2220 }
2221 }
2222
2223 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2224 Ok(())
2225 }
2226
2227 pub fn update_diagnostic_entries(
2228 &mut self,
2229 abs_path: PathBuf,
2230 version: Option<i32>,
2231 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2232 cx: &mut ModelContext<Project>,
2233 ) -> Result<(), anyhow::Error> {
2234 let (worktree, relative_path) = self
2235 .find_local_worktree(&abs_path, cx)
2236 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2237 if !worktree.read(cx).is_visible() {
2238 return Ok(());
2239 }
2240
2241 let project_path = ProjectPath {
2242 worktree_id: worktree.read(cx).id(),
2243 path: relative_path.into(),
2244 };
2245 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2246 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2247 }
2248
2249 let updated = worktree.update(cx, |worktree, cx| {
2250 worktree
2251 .as_local_mut()
2252 .ok_or_else(|| anyhow!("not a local worktree"))?
2253 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2254 })?;
2255 if updated {
2256 cx.emit(Event::DiagnosticsUpdated(project_path));
2257 }
2258 Ok(())
2259 }
2260
2261 fn update_buffer_diagnostics(
2262 &mut self,
2263 buffer: &ModelHandle<Buffer>,
2264 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2265 version: Option<i32>,
2266 cx: &mut ModelContext<Self>,
2267 ) -> Result<()> {
2268 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2269 Ordering::Equal
2270 .then_with(|| b.is_primary.cmp(&a.is_primary))
2271 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2272 .then_with(|| a.severity.cmp(&b.severity))
2273 .then_with(|| a.message.cmp(&b.message))
2274 }
2275
2276 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2277
2278 diagnostics.sort_unstable_by(|a, b| {
2279 Ordering::Equal
2280 .then_with(|| a.range.start.cmp(&b.range.start))
2281 .then_with(|| b.range.end.cmp(&a.range.end))
2282 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2283 });
2284
2285 let mut sanitized_diagnostics = Vec::new();
2286 let edits_since_save = Patch::new(
2287 snapshot
2288 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2289 .collect(),
2290 );
2291 for entry in diagnostics {
2292 let start;
2293 let end;
2294 if entry.diagnostic.is_disk_based {
2295 // Some diagnostics are based on files on disk instead of buffers'
2296 // current contents. Adjust these diagnostics' ranges to reflect
2297 // any unsaved edits.
2298 start = edits_since_save.old_to_new(entry.range.start);
2299 end = edits_since_save.old_to_new(entry.range.end);
2300 } else {
2301 start = entry.range.start;
2302 end = entry.range.end;
2303 }
2304
2305 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2306 ..snapshot.clip_point_utf16(end, Bias::Right);
2307
2308 // Expand empty ranges by one character
2309 if range.start == range.end {
2310 range.end.column += 1;
2311 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2312 if range.start == range.end && range.end.column > 0 {
2313 range.start.column -= 1;
2314 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2315 }
2316 }
2317
2318 sanitized_diagnostics.push(DiagnosticEntry {
2319 range,
2320 diagnostic: entry.diagnostic,
2321 });
2322 }
2323 drop(edits_since_save);
2324
2325 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2326 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2327 Ok(())
2328 }
2329
2330 pub fn reload_buffers(
2331 &self,
2332 buffers: HashSet<ModelHandle<Buffer>>,
2333 push_to_history: bool,
2334 cx: &mut ModelContext<Self>,
2335 ) -> Task<Result<ProjectTransaction>> {
2336 let mut local_buffers = Vec::new();
2337 let mut remote_buffers = None;
2338 for buffer_handle in buffers {
2339 let buffer = buffer_handle.read(cx);
2340 if buffer.is_dirty() {
2341 if let Some(file) = File::from_dyn(buffer.file()) {
2342 if file.is_local() {
2343 local_buffers.push(buffer_handle);
2344 } else {
2345 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2346 }
2347 }
2348 }
2349 }
2350
2351 let remote_buffers = self.remote_id().zip(remote_buffers);
2352 let client = self.client.clone();
2353
2354 cx.spawn(|this, mut cx| async move {
2355 let mut project_transaction = ProjectTransaction::default();
2356
2357 if let Some((project_id, remote_buffers)) = remote_buffers {
2358 let response = client
2359 .request(proto::ReloadBuffers {
2360 project_id,
2361 buffer_ids: remote_buffers
2362 .iter()
2363 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2364 .collect(),
2365 })
2366 .await?
2367 .transaction
2368 .ok_or_else(|| anyhow!("missing transaction"))?;
2369 project_transaction = this
2370 .update(&mut cx, |this, cx| {
2371 this.deserialize_project_transaction(response, push_to_history, cx)
2372 })
2373 .await?;
2374 }
2375
2376 for buffer in local_buffers {
2377 let transaction = buffer
2378 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2379 .await?;
2380 buffer.update(&mut cx, |buffer, cx| {
2381 if let Some(transaction) = transaction {
2382 if !push_to_history {
2383 buffer.forget_transaction(transaction.id);
2384 }
2385 project_transaction.0.insert(cx.handle(), transaction);
2386 }
2387 });
2388 }
2389
2390 Ok(project_transaction)
2391 })
2392 }
2393
2394 pub fn format(
2395 &self,
2396 buffers: HashSet<ModelHandle<Buffer>>,
2397 push_to_history: bool,
2398 cx: &mut ModelContext<Project>,
2399 ) -> Task<Result<ProjectTransaction>> {
2400 let mut local_buffers = Vec::new();
2401 let mut remote_buffers = None;
2402 for buffer_handle in buffers {
2403 let buffer = buffer_handle.read(cx);
2404 if let Some(file) = File::from_dyn(buffer.file()) {
2405 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2406 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2407 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2408 }
2409 } else {
2410 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2411 }
2412 } else {
2413 return Task::ready(Ok(Default::default()));
2414 }
2415 }
2416
2417 let remote_buffers = self.remote_id().zip(remote_buffers);
2418 let client = self.client.clone();
2419
2420 cx.spawn(|this, mut cx| async move {
2421 let mut project_transaction = ProjectTransaction::default();
2422
2423 if let Some((project_id, remote_buffers)) = remote_buffers {
2424 let response = client
2425 .request(proto::FormatBuffers {
2426 project_id,
2427 buffer_ids: remote_buffers
2428 .iter()
2429 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2430 .collect(),
2431 })
2432 .await?
2433 .transaction
2434 .ok_or_else(|| anyhow!("missing transaction"))?;
2435 project_transaction = this
2436 .update(&mut cx, |this, cx| {
2437 this.deserialize_project_transaction(response, push_to_history, cx)
2438 })
2439 .await?;
2440 }
2441
2442 for (buffer, buffer_abs_path, language_server) in local_buffers {
2443 let text_document = lsp::TextDocumentIdentifier::new(
2444 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2445 );
2446 let capabilities = &language_server.capabilities();
2447 let tab_size = cx.update(|cx| {
2448 let language_name = buffer.read(cx).language().map(|language| language.name());
2449 cx.global::<Settings>().tab_size(language_name.as_deref())
2450 });
2451 let lsp_edits = if capabilities
2452 .document_formatting_provider
2453 .as_ref()
2454 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2455 {
2456 language_server
2457 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2458 text_document,
2459 options: lsp::FormattingOptions {
2460 tab_size,
2461 insert_spaces: true,
2462 insert_final_newline: Some(true),
2463 ..Default::default()
2464 },
2465 work_done_progress_params: Default::default(),
2466 })
2467 .await?
2468 } else if capabilities
2469 .document_range_formatting_provider
2470 .as_ref()
2471 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2472 {
2473 let buffer_start = lsp::Position::new(0, 0);
2474 let buffer_end =
2475 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2476 language_server
2477 .request::<lsp::request::RangeFormatting>(
2478 lsp::DocumentRangeFormattingParams {
2479 text_document,
2480 range: lsp::Range::new(buffer_start, buffer_end),
2481 options: lsp::FormattingOptions {
2482 tab_size: 4,
2483 insert_spaces: true,
2484 insert_final_newline: Some(true),
2485 ..Default::default()
2486 },
2487 work_done_progress_params: Default::default(),
2488 },
2489 )
2490 .await?
2491 } else {
2492 continue;
2493 };
2494
2495 if let Some(lsp_edits) = lsp_edits {
2496 let edits = this
2497 .update(&mut cx, |this, cx| {
2498 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2499 })
2500 .await?;
2501 buffer.update(&mut cx, |buffer, cx| {
2502 buffer.finalize_last_transaction();
2503 buffer.start_transaction();
2504 for (range, text) in edits {
2505 buffer.edit([(range, text)], cx);
2506 }
2507 if buffer.end_transaction(cx).is_some() {
2508 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2509 if !push_to_history {
2510 buffer.forget_transaction(transaction.id);
2511 }
2512 project_transaction.0.insert(cx.handle(), transaction);
2513 }
2514 });
2515 }
2516 }
2517
2518 Ok(project_transaction)
2519 })
2520 }
2521
2522 pub fn definition<T: ToPointUtf16>(
2523 &self,
2524 buffer: &ModelHandle<Buffer>,
2525 position: T,
2526 cx: &mut ModelContext<Self>,
2527 ) -> Task<Result<Vec<Location>>> {
2528 let position = position.to_point_utf16(buffer.read(cx));
2529 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2530 }
2531
2532 pub fn references<T: ToPointUtf16>(
2533 &self,
2534 buffer: &ModelHandle<Buffer>,
2535 position: T,
2536 cx: &mut ModelContext<Self>,
2537 ) -> Task<Result<Vec<Location>>> {
2538 let position = position.to_point_utf16(buffer.read(cx));
2539 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2540 }
2541
2542 pub fn document_highlights<T: ToPointUtf16>(
2543 &self,
2544 buffer: &ModelHandle<Buffer>,
2545 position: T,
2546 cx: &mut ModelContext<Self>,
2547 ) -> Task<Result<Vec<DocumentHighlight>>> {
2548 let position = position.to_point_utf16(buffer.read(cx));
2549
2550 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2551 }
2552
2553 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2554 if self.is_local() {
2555 let mut requests = Vec::new();
2556 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2557 let worktree_id = *worktree_id;
2558 if let Some(worktree) = self
2559 .worktree_for_id(worktree_id, cx)
2560 .and_then(|worktree| worktree.read(cx).as_local())
2561 {
2562 let lsp_adapter = lsp_adapter.clone();
2563 let worktree_abs_path = worktree.abs_path().clone();
2564 requests.push(
2565 language_server
2566 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2567 query: query.to_string(),
2568 ..Default::default()
2569 })
2570 .log_err()
2571 .map(move |response| {
2572 (
2573 lsp_adapter,
2574 worktree_id,
2575 worktree_abs_path,
2576 response.unwrap_or_default(),
2577 )
2578 }),
2579 );
2580 }
2581 }
2582
2583 cx.spawn_weak(|this, cx| async move {
2584 let responses = futures::future::join_all(requests).await;
2585 let this = if let Some(this) = this.upgrade(&cx) {
2586 this
2587 } else {
2588 return Ok(Default::default());
2589 };
2590 this.read_with(&cx, |this, cx| {
2591 let mut symbols = Vec::new();
2592 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2593 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2594 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2595 let mut worktree_id = source_worktree_id;
2596 let path;
2597 if let Some((worktree, rel_path)) =
2598 this.find_local_worktree(&abs_path, cx)
2599 {
2600 worktree_id = worktree.read(cx).id();
2601 path = rel_path;
2602 } else {
2603 path = relativize_path(&worktree_abs_path, &abs_path);
2604 }
2605
2606 let label = this
2607 .languages
2608 .select_language(&path)
2609 .and_then(|language| {
2610 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2611 })
2612 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2613 let signature = this.symbol_signature(worktree_id, &path);
2614
2615 Some(Symbol {
2616 source_worktree_id,
2617 worktree_id,
2618 language_server_name: adapter.name(),
2619 name: lsp_symbol.name,
2620 kind: lsp_symbol.kind,
2621 label,
2622 path,
2623 range: range_from_lsp(lsp_symbol.location.range),
2624 signature,
2625 })
2626 }));
2627 }
2628 Ok(symbols)
2629 })
2630 })
2631 } else if let Some(project_id) = self.remote_id() {
2632 let request = self.client.request(proto::GetProjectSymbols {
2633 project_id,
2634 query: query.to_string(),
2635 });
2636 cx.spawn_weak(|this, cx| async move {
2637 let response = request.await?;
2638 let mut symbols = Vec::new();
2639 if let Some(this) = this.upgrade(&cx) {
2640 this.read_with(&cx, |this, _| {
2641 symbols.extend(
2642 response
2643 .symbols
2644 .into_iter()
2645 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2646 );
2647 })
2648 }
2649 Ok(symbols)
2650 })
2651 } else {
2652 Task::ready(Ok(Default::default()))
2653 }
2654 }
2655
2656 pub fn open_buffer_for_symbol(
2657 &mut self,
2658 symbol: &Symbol,
2659 cx: &mut ModelContext<Self>,
2660 ) -> Task<Result<ModelHandle<Buffer>>> {
2661 if self.is_local() {
2662 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2663 symbol.source_worktree_id,
2664 symbol.language_server_name.clone(),
2665 )) {
2666 server.clone()
2667 } else {
2668 return Task::ready(Err(anyhow!(
2669 "language server for worktree and language not found"
2670 )));
2671 };
2672
2673 let worktree_abs_path = if let Some(worktree_abs_path) = self
2674 .worktree_for_id(symbol.worktree_id, cx)
2675 .and_then(|worktree| worktree.read(cx).as_local())
2676 .map(|local_worktree| local_worktree.abs_path())
2677 {
2678 worktree_abs_path
2679 } else {
2680 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2681 };
2682 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2683 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2684 uri
2685 } else {
2686 return Task::ready(Err(anyhow!("invalid symbol path")));
2687 };
2688
2689 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2690 } else if let Some(project_id) = self.remote_id() {
2691 let request = self.client.request(proto::OpenBufferForSymbol {
2692 project_id,
2693 symbol: Some(serialize_symbol(symbol)),
2694 });
2695 cx.spawn(|this, mut cx| async move {
2696 let response = request.await?;
2697 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2698 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2699 .await
2700 })
2701 } else {
2702 Task::ready(Err(anyhow!("project does not have a remote id")))
2703 }
2704 }
2705
2706 pub fn completions<T: ToPointUtf16>(
2707 &self,
2708 source_buffer_handle: &ModelHandle<Buffer>,
2709 position: T,
2710 cx: &mut ModelContext<Self>,
2711 ) -> Task<Result<Vec<Completion>>> {
2712 let source_buffer_handle = source_buffer_handle.clone();
2713 let source_buffer = source_buffer_handle.read(cx);
2714 let buffer_id = source_buffer.remote_id();
2715 let language = source_buffer.language().cloned();
2716 let worktree;
2717 let buffer_abs_path;
2718 if let Some(file) = File::from_dyn(source_buffer.file()) {
2719 worktree = file.worktree.clone();
2720 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2721 } else {
2722 return Task::ready(Ok(Default::default()));
2723 };
2724
2725 let position = position.to_point_utf16(source_buffer);
2726 let anchor = source_buffer.anchor_after(position);
2727
2728 if worktree.read(cx).as_local().is_some() {
2729 let buffer_abs_path = buffer_abs_path.unwrap();
2730 let (_, lang_server) =
2731 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2732 server.clone()
2733 } else {
2734 return Task::ready(Ok(Default::default()));
2735 };
2736
2737 cx.spawn(|_, cx| async move {
2738 let completions = lang_server
2739 .request::<lsp::request::Completion>(lsp::CompletionParams {
2740 text_document_position: lsp::TextDocumentPositionParams::new(
2741 lsp::TextDocumentIdentifier::new(
2742 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2743 ),
2744 point_to_lsp(position),
2745 ),
2746 context: Default::default(),
2747 work_done_progress_params: Default::default(),
2748 partial_result_params: Default::default(),
2749 })
2750 .await
2751 .context("lsp completion request failed")?;
2752
2753 let completions = if let Some(completions) = completions {
2754 match completions {
2755 lsp::CompletionResponse::Array(completions) => completions,
2756 lsp::CompletionResponse::List(list) => list.items,
2757 }
2758 } else {
2759 Default::default()
2760 };
2761
2762 source_buffer_handle.read_with(&cx, |this, _| {
2763 let snapshot = this.snapshot();
2764 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2765 let mut range_for_token = None;
2766 Ok(completions
2767 .into_iter()
2768 .filter_map(|lsp_completion| {
2769 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2770 // If the language server provides a range to overwrite, then
2771 // check that the range is valid.
2772 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2773 let range = range_from_lsp(edit.range);
2774 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2775 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2776 if start != range.start || end != range.end {
2777 log::info!("completion out of expected range");
2778 return None;
2779 }
2780 (
2781 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2782 edit.new_text.clone(),
2783 )
2784 }
2785 // If the language server does not provide a range, then infer
2786 // the range based on the syntax tree.
2787 None => {
2788 if position != clipped_position {
2789 log::info!("completion out of expected range");
2790 return None;
2791 }
2792 let Range { start, end } = range_for_token
2793 .get_or_insert_with(|| {
2794 let offset = position.to_offset(&snapshot);
2795 snapshot
2796 .range_for_word_token_at(offset)
2797 .unwrap_or_else(|| offset..offset)
2798 })
2799 .clone();
2800 let text = lsp_completion
2801 .insert_text
2802 .as_ref()
2803 .unwrap_or(&lsp_completion.label)
2804 .clone();
2805 (
2806 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2807 text.clone(),
2808 )
2809 }
2810 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2811 log::info!("unsupported insert/replace completion");
2812 return None;
2813 }
2814 };
2815
2816 Some(Completion {
2817 old_range,
2818 new_text,
2819 label: language
2820 .as_ref()
2821 .and_then(|l| l.label_for_completion(&lsp_completion))
2822 .unwrap_or_else(|| {
2823 CodeLabel::plain(
2824 lsp_completion.label.clone(),
2825 lsp_completion.filter_text.as_deref(),
2826 )
2827 }),
2828 lsp_completion,
2829 })
2830 })
2831 .collect())
2832 })
2833 })
2834 } else if let Some(project_id) = self.remote_id() {
2835 let rpc = self.client.clone();
2836 let message = proto::GetCompletions {
2837 project_id,
2838 buffer_id,
2839 position: Some(language::proto::serialize_anchor(&anchor)),
2840 version: serialize_version(&source_buffer.version()),
2841 };
2842 cx.spawn_weak(|_, mut cx| async move {
2843 let response = rpc.request(message).await?;
2844
2845 source_buffer_handle
2846 .update(&mut cx, |buffer, _| {
2847 buffer.wait_for_version(deserialize_version(response.version))
2848 })
2849 .await;
2850
2851 response
2852 .completions
2853 .into_iter()
2854 .map(|completion| {
2855 language::proto::deserialize_completion(completion, language.as_ref())
2856 })
2857 .collect()
2858 })
2859 } else {
2860 Task::ready(Ok(Default::default()))
2861 }
2862 }
2863
2864 pub fn apply_additional_edits_for_completion(
2865 &self,
2866 buffer_handle: ModelHandle<Buffer>,
2867 completion: Completion,
2868 push_to_history: bool,
2869 cx: &mut ModelContext<Self>,
2870 ) -> Task<Result<Option<Transaction>>> {
2871 let buffer = buffer_handle.read(cx);
2872 let buffer_id = buffer.remote_id();
2873
2874 if self.is_local() {
2875 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2876 {
2877 server.clone()
2878 } else {
2879 return Task::ready(Ok(Default::default()));
2880 };
2881
2882 cx.spawn(|this, mut cx| async move {
2883 let resolved_completion = lang_server
2884 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2885 .await?;
2886 if let Some(edits) = resolved_completion.additional_text_edits {
2887 let edits = this
2888 .update(&mut cx, |this, cx| {
2889 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2890 })
2891 .await?;
2892 buffer_handle.update(&mut cx, |buffer, cx| {
2893 buffer.finalize_last_transaction();
2894 buffer.start_transaction();
2895 for (range, text) in edits {
2896 buffer.edit([(range, text)], cx);
2897 }
2898 let transaction = if buffer.end_transaction(cx).is_some() {
2899 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2900 if !push_to_history {
2901 buffer.forget_transaction(transaction.id);
2902 }
2903 Some(transaction)
2904 } else {
2905 None
2906 };
2907 Ok(transaction)
2908 })
2909 } else {
2910 Ok(None)
2911 }
2912 })
2913 } else if let Some(project_id) = self.remote_id() {
2914 let client = self.client.clone();
2915 cx.spawn(|_, mut cx| async move {
2916 let response = client
2917 .request(proto::ApplyCompletionAdditionalEdits {
2918 project_id,
2919 buffer_id,
2920 completion: Some(language::proto::serialize_completion(&completion)),
2921 })
2922 .await?;
2923
2924 if let Some(transaction) = response.transaction {
2925 let transaction = language::proto::deserialize_transaction(transaction)?;
2926 buffer_handle
2927 .update(&mut cx, |buffer, _| {
2928 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2929 })
2930 .await;
2931 if push_to_history {
2932 buffer_handle.update(&mut cx, |buffer, _| {
2933 buffer.push_transaction(transaction.clone(), Instant::now());
2934 });
2935 }
2936 Ok(Some(transaction))
2937 } else {
2938 Ok(None)
2939 }
2940 })
2941 } else {
2942 Task::ready(Err(anyhow!("project does not have a remote id")))
2943 }
2944 }
2945
2946 pub fn code_actions<T: Clone + ToOffset>(
2947 &self,
2948 buffer_handle: &ModelHandle<Buffer>,
2949 range: Range<T>,
2950 cx: &mut ModelContext<Self>,
2951 ) -> Task<Result<Vec<CodeAction>>> {
2952 let buffer_handle = buffer_handle.clone();
2953 let buffer = buffer_handle.read(cx);
2954 let snapshot = buffer.snapshot();
2955 let relevant_diagnostics = snapshot
2956 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2957 .map(|entry| entry.to_lsp_diagnostic_stub())
2958 .collect();
2959 let buffer_id = buffer.remote_id();
2960 let worktree;
2961 let buffer_abs_path;
2962 if let Some(file) = File::from_dyn(buffer.file()) {
2963 worktree = file.worktree.clone();
2964 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2965 } else {
2966 return Task::ready(Ok(Default::default()));
2967 };
2968 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2969
2970 if worktree.read(cx).as_local().is_some() {
2971 let buffer_abs_path = buffer_abs_path.unwrap();
2972 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2973 {
2974 server.clone()
2975 } else {
2976 return Task::ready(Ok(Default::default()));
2977 };
2978
2979 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2980 cx.foreground().spawn(async move {
2981 if !lang_server.capabilities().code_action_provider.is_some() {
2982 return Ok(Default::default());
2983 }
2984
2985 Ok(lang_server
2986 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2987 text_document: lsp::TextDocumentIdentifier::new(
2988 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2989 ),
2990 range: lsp_range,
2991 work_done_progress_params: Default::default(),
2992 partial_result_params: Default::default(),
2993 context: lsp::CodeActionContext {
2994 diagnostics: relevant_diagnostics,
2995 only: Some(vec![
2996 lsp::CodeActionKind::QUICKFIX,
2997 lsp::CodeActionKind::REFACTOR,
2998 lsp::CodeActionKind::REFACTOR_EXTRACT,
2999 lsp::CodeActionKind::SOURCE,
3000 ]),
3001 },
3002 })
3003 .await?
3004 .unwrap_or_default()
3005 .into_iter()
3006 .filter_map(|entry| {
3007 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3008 Some(CodeAction {
3009 range: range.clone(),
3010 lsp_action,
3011 })
3012 } else {
3013 None
3014 }
3015 })
3016 .collect())
3017 })
3018 } else if let Some(project_id) = self.remote_id() {
3019 let rpc = self.client.clone();
3020 let version = buffer.version();
3021 cx.spawn_weak(|_, mut cx| async move {
3022 let response = rpc
3023 .request(proto::GetCodeActions {
3024 project_id,
3025 buffer_id,
3026 start: Some(language::proto::serialize_anchor(&range.start)),
3027 end: Some(language::proto::serialize_anchor(&range.end)),
3028 version: serialize_version(&version),
3029 })
3030 .await?;
3031
3032 buffer_handle
3033 .update(&mut cx, |buffer, _| {
3034 buffer.wait_for_version(deserialize_version(response.version))
3035 })
3036 .await;
3037
3038 response
3039 .actions
3040 .into_iter()
3041 .map(language::proto::deserialize_code_action)
3042 .collect()
3043 })
3044 } else {
3045 Task::ready(Ok(Default::default()))
3046 }
3047 }
3048
3049 pub fn apply_code_action(
3050 &self,
3051 buffer_handle: ModelHandle<Buffer>,
3052 mut action: CodeAction,
3053 push_to_history: bool,
3054 cx: &mut ModelContext<Self>,
3055 ) -> Task<Result<ProjectTransaction>> {
3056 if self.is_local() {
3057 let buffer = buffer_handle.read(cx);
3058 let (lsp_adapter, lang_server) =
3059 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3060 server.clone()
3061 } else {
3062 return Task::ready(Ok(Default::default()));
3063 };
3064 let range = action.range.to_point_utf16(buffer);
3065
3066 cx.spawn(|this, mut cx| async move {
3067 if let Some(lsp_range) = action
3068 .lsp_action
3069 .data
3070 .as_mut()
3071 .and_then(|d| d.get_mut("codeActionParams"))
3072 .and_then(|d| d.get_mut("range"))
3073 {
3074 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3075 action.lsp_action = lang_server
3076 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3077 .await?;
3078 } else {
3079 let actions = this
3080 .update(&mut cx, |this, cx| {
3081 this.code_actions(&buffer_handle, action.range, cx)
3082 })
3083 .await?;
3084 action.lsp_action = actions
3085 .into_iter()
3086 .find(|a| a.lsp_action.title == action.lsp_action.title)
3087 .ok_or_else(|| anyhow!("code action is outdated"))?
3088 .lsp_action;
3089 }
3090
3091 if let Some(edit) = action.lsp_action.edit {
3092 Self::deserialize_workspace_edit(
3093 this,
3094 edit,
3095 push_to_history,
3096 lsp_adapter,
3097 lang_server,
3098 &mut cx,
3099 )
3100 .await
3101 } else if let Some(command) = action.lsp_action.command {
3102 this.update(&mut cx, |this, _| {
3103 this.last_workspace_edits_by_language_server
3104 .remove(&lang_server.server_id());
3105 });
3106 lang_server
3107 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3108 command: command.command,
3109 arguments: command.arguments.unwrap_or_default(),
3110 ..Default::default()
3111 })
3112 .await?;
3113 Ok(this.update(&mut cx, |this, _| {
3114 this.last_workspace_edits_by_language_server
3115 .remove(&lang_server.server_id())
3116 .unwrap_or_default()
3117 }))
3118 } else {
3119 Ok(ProjectTransaction::default())
3120 }
3121 })
3122 } else if let Some(project_id) = self.remote_id() {
3123 let client = self.client.clone();
3124 let request = proto::ApplyCodeAction {
3125 project_id,
3126 buffer_id: buffer_handle.read(cx).remote_id(),
3127 action: Some(language::proto::serialize_code_action(&action)),
3128 };
3129 cx.spawn(|this, mut cx| async move {
3130 let response = client
3131 .request(request)
3132 .await?
3133 .transaction
3134 .ok_or_else(|| anyhow!("missing transaction"))?;
3135 this.update(&mut cx, |this, cx| {
3136 this.deserialize_project_transaction(response, push_to_history, cx)
3137 })
3138 .await
3139 })
3140 } else {
3141 Task::ready(Err(anyhow!("project does not have a remote id")))
3142 }
3143 }
3144
3145 async fn deserialize_workspace_edit(
3146 this: ModelHandle<Self>,
3147 edit: lsp::WorkspaceEdit,
3148 push_to_history: bool,
3149 lsp_adapter: Arc<dyn LspAdapter>,
3150 language_server: Arc<LanguageServer>,
3151 cx: &mut AsyncAppContext,
3152 ) -> Result<ProjectTransaction> {
3153 let fs = this.read_with(cx, |this, _| this.fs.clone());
3154 let mut operations = Vec::new();
3155 if let Some(document_changes) = edit.document_changes {
3156 match document_changes {
3157 lsp::DocumentChanges::Edits(edits) => {
3158 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3159 }
3160 lsp::DocumentChanges::Operations(ops) => operations = ops,
3161 }
3162 } else if let Some(changes) = edit.changes {
3163 operations.extend(changes.into_iter().map(|(uri, edits)| {
3164 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3165 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3166 uri,
3167 version: None,
3168 },
3169 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3170 })
3171 }));
3172 }
3173
3174 let mut project_transaction = ProjectTransaction::default();
3175 for operation in operations {
3176 match operation {
3177 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3178 let abs_path = op
3179 .uri
3180 .to_file_path()
3181 .map_err(|_| anyhow!("can't convert URI to path"))?;
3182
3183 if let Some(parent_path) = abs_path.parent() {
3184 fs.create_dir(parent_path).await?;
3185 }
3186 if abs_path.ends_with("/") {
3187 fs.create_dir(&abs_path).await?;
3188 } else {
3189 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3190 .await?;
3191 }
3192 }
3193 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3194 let source_abs_path = op
3195 .old_uri
3196 .to_file_path()
3197 .map_err(|_| anyhow!("can't convert URI to path"))?;
3198 let target_abs_path = op
3199 .new_uri
3200 .to_file_path()
3201 .map_err(|_| anyhow!("can't convert URI to path"))?;
3202 fs.rename(
3203 &source_abs_path,
3204 &target_abs_path,
3205 op.options.map(Into::into).unwrap_or_default(),
3206 )
3207 .await?;
3208 }
3209 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3210 let abs_path = op
3211 .uri
3212 .to_file_path()
3213 .map_err(|_| anyhow!("can't convert URI to path"))?;
3214 let options = op.options.map(Into::into).unwrap_or_default();
3215 if abs_path.ends_with("/") {
3216 fs.remove_dir(&abs_path, options).await?;
3217 } else {
3218 fs.remove_file(&abs_path, options).await?;
3219 }
3220 }
3221 lsp::DocumentChangeOperation::Edit(op) => {
3222 let buffer_to_edit = this
3223 .update(cx, |this, cx| {
3224 this.open_local_buffer_via_lsp(
3225 op.text_document.uri,
3226 lsp_adapter.clone(),
3227 language_server.clone(),
3228 cx,
3229 )
3230 })
3231 .await?;
3232
3233 let edits = this
3234 .update(cx, |this, cx| {
3235 let edits = op.edits.into_iter().map(|edit| match edit {
3236 lsp::OneOf::Left(edit) => edit,
3237 lsp::OneOf::Right(edit) => edit.text_edit,
3238 });
3239 this.edits_from_lsp(
3240 &buffer_to_edit,
3241 edits,
3242 op.text_document.version,
3243 cx,
3244 )
3245 })
3246 .await?;
3247
3248 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3249 buffer.finalize_last_transaction();
3250 buffer.start_transaction();
3251 for (range, text) in edits {
3252 buffer.edit([(range, text)], cx);
3253 }
3254 let transaction = if buffer.end_transaction(cx).is_some() {
3255 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3256 if !push_to_history {
3257 buffer.forget_transaction(transaction.id);
3258 }
3259 Some(transaction)
3260 } else {
3261 None
3262 };
3263
3264 transaction
3265 });
3266 if let Some(transaction) = transaction {
3267 project_transaction.0.insert(buffer_to_edit, transaction);
3268 }
3269 }
3270 }
3271 }
3272
3273 Ok(project_transaction)
3274 }
3275
3276 pub fn prepare_rename<T: ToPointUtf16>(
3277 &self,
3278 buffer: ModelHandle<Buffer>,
3279 position: T,
3280 cx: &mut ModelContext<Self>,
3281 ) -> Task<Result<Option<Range<Anchor>>>> {
3282 let position = position.to_point_utf16(buffer.read(cx));
3283 self.request_lsp(buffer, PrepareRename { position }, cx)
3284 }
3285
3286 pub fn perform_rename<T: ToPointUtf16>(
3287 &self,
3288 buffer: ModelHandle<Buffer>,
3289 position: T,
3290 new_name: String,
3291 push_to_history: bool,
3292 cx: &mut ModelContext<Self>,
3293 ) -> Task<Result<ProjectTransaction>> {
3294 let position = position.to_point_utf16(buffer.read(cx));
3295 self.request_lsp(
3296 buffer,
3297 PerformRename {
3298 position,
3299 new_name,
3300 push_to_history,
3301 },
3302 cx,
3303 )
3304 }
3305
3306 pub fn search(
3307 &self,
3308 query: SearchQuery,
3309 cx: &mut ModelContext<Self>,
3310 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3311 if self.is_local() {
3312 let snapshots = self
3313 .visible_worktrees(cx)
3314 .filter_map(|tree| {
3315 let tree = tree.read(cx).as_local()?;
3316 Some(tree.snapshot())
3317 })
3318 .collect::<Vec<_>>();
3319
3320 let background = cx.background().clone();
3321 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3322 if path_count == 0 {
3323 return Task::ready(Ok(Default::default()));
3324 }
3325 let workers = background.num_cpus().min(path_count);
3326 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3327 cx.background()
3328 .spawn({
3329 let fs = self.fs.clone();
3330 let background = cx.background().clone();
3331 let query = query.clone();
3332 async move {
3333 let fs = &fs;
3334 let query = &query;
3335 let matching_paths_tx = &matching_paths_tx;
3336 let paths_per_worker = (path_count + workers - 1) / workers;
3337 let snapshots = &snapshots;
3338 background
3339 .scoped(|scope| {
3340 for worker_ix in 0..workers {
3341 let worker_start_ix = worker_ix * paths_per_worker;
3342 let worker_end_ix = worker_start_ix + paths_per_worker;
3343 scope.spawn(async move {
3344 let mut snapshot_start_ix = 0;
3345 let mut abs_path = PathBuf::new();
3346 for snapshot in snapshots {
3347 let snapshot_end_ix =
3348 snapshot_start_ix + snapshot.visible_file_count();
3349 if worker_end_ix <= snapshot_start_ix {
3350 break;
3351 } else if worker_start_ix > snapshot_end_ix {
3352 snapshot_start_ix = snapshot_end_ix;
3353 continue;
3354 } else {
3355 let start_in_snapshot = worker_start_ix
3356 .saturating_sub(snapshot_start_ix);
3357 let end_in_snapshot =
3358 cmp::min(worker_end_ix, snapshot_end_ix)
3359 - snapshot_start_ix;
3360
3361 for entry in snapshot
3362 .files(false, start_in_snapshot)
3363 .take(end_in_snapshot - start_in_snapshot)
3364 {
3365 if matching_paths_tx.is_closed() {
3366 break;
3367 }
3368
3369 abs_path.clear();
3370 abs_path.push(&snapshot.abs_path());
3371 abs_path.push(&entry.path);
3372 let matches = if let Some(file) =
3373 fs.open_sync(&abs_path).await.log_err()
3374 {
3375 query.detect(file).unwrap_or(false)
3376 } else {
3377 false
3378 };
3379
3380 if matches {
3381 let project_path =
3382 (snapshot.id(), entry.path.clone());
3383 if matching_paths_tx
3384 .send(project_path)
3385 .await
3386 .is_err()
3387 {
3388 break;
3389 }
3390 }
3391 }
3392
3393 snapshot_start_ix = snapshot_end_ix;
3394 }
3395 }
3396 });
3397 }
3398 })
3399 .await;
3400 }
3401 })
3402 .detach();
3403
3404 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3405 let open_buffers = self
3406 .opened_buffers
3407 .values()
3408 .filter_map(|b| b.upgrade(cx))
3409 .collect::<HashSet<_>>();
3410 cx.spawn(|this, cx| async move {
3411 for buffer in &open_buffers {
3412 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3413 buffers_tx.send((buffer.clone(), snapshot)).await?;
3414 }
3415
3416 let open_buffers = Rc::new(RefCell::new(open_buffers));
3417 while let Some(project_path) = matching_paths_rx.next().await {
3418 if buffers_tx.is_closed() {
3419 break;
3420 }
3421
3422 let this = this.clone();
3423 let open_buffers = open_buffers.clone();
3424 let buffers_tx = buffers_tx.clone();
3425 cx.spawn(|mut cx| async move {
3426 if let Some(buffer) = this
3427 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3428 .await
3429 .log_err()
3430 {
3431 if open_buffers.borrow_mut().insert(buffer.clone()) {
3432 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3433 buffers_tx.send((buffer, snapshot)).await?;
3434 }
3435 }
3436
3437 Ok::<_, anyhow::Error>(())
3438 })
3439 .detach();
3440 }
3441
3442 Ok::<_, anyhow::Error>(())
3443 })
3444 .detach_and_log_err(cx);
3445
3446 let background = cx.background().clone();
3447 cx.background().spawn(async move {
3448 let query = &query;
3449 let mut matched_buffers = Vec::new();
3450 for _ in 0..workers {
3451 matched_buffers.push(HashMap::default());
3452 }
3453 background
3454 .scoped(|scope| {
3455 for worker_matched_buffers in matched_buffers.iter_mut() {
3456 let mut buffers_rx = buffers_rx.clone();
3457 scope.spawn(async move {
3458 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3459 let buffer_matches = query
3460 .search(snapshot.as_rope())
3461 .await
3462 .iter()
3463 .map(|range| {
3464 snapshot.anchor_before(range.start)
3465 ..snapshot.anchor_after(range.end)
3466 })
3467 .collect::<Vec<_>>();
3468 if !buffer_matches.is_empty() {
3469 worker_matched_buffers
3470 .insert(buffer.clone(), buffer_matches);
3471 }
3472 }
3473 });
3474 }
3475 })
3476 .await;
3477 Ok(matched_buffers.into_iter().flatten().collect())
3478 })
3479 } else if let Some(project_id) = self.remote_id() {
3480 let request = self.client.request(query.to_proto(project_id));
3481 cx.spawn(|this, mut cx| async move {
3482 let response = request.await?;
3483 let mut result = HashMap::default();
3484 for location in response.locations {
3485 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3486 let target_buffer = this
3487 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3488 .await?;
3489 let start = location
3490 .start
3491 .and_then(deserialize_anchor)
3492 .ok_or_else(|| anyhow!("missing target start"))?;
3493 let end = location
3494 .end
3495 .and_then(deserialize_anchor)
3496 .ok_or_else(|| anyhow!("missing target end"))?;
3497 result
3498 .entry(target_buffer)
3499 .or_insert(Vec::new())
3500 .push(start..end)
3501 }
3502 Ok(result)
3503 })
3504 } else {
3505 Task::ready(Ok(Default::default()))
3506 }
3507 }
3508
3509 fn request_lsp<R: LspCommand>(
3510 &self,
3511 buffer_handle: ModelHandle<Buffer>,
3512 request: R,
3513 cx: &mut ModelContext<Self>,
3514 ) -> Task<Result<R::Response>>
3515 where
3516 <R::LspRequest as lsp::request::Request>::Result: Send,
3517 {
3518 let buffer = buffer_handle.read(cx);
3519 if self.is_local() {
3520 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3521 if let Some((file, (_, language_server))) =
3522 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3523 {
3524 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3525 return cx.spawn(|this, cx| async move {
3526 if !request.check_capabilities(&language_server.capabilities()) {
3527 return Ok(Default::default());
3528 }
3529
3530 let response = language_server
3531 .request::<R::LspRequest>(lsp_params)
3532 .await
3533 .context("lsp request failed")?;
3534 request
3535 .response_from_lsp(response, this, buffer_handle, cx)
3536 .await
3537 });
3538 }
3539 } else if let Some(project_id) = self.remote_id() {
3540 let rpc = self.client.clone();
3541 let message = request.to_proto(project_id, buffer);
3542 return cx.spawn(|this, cx| async move {
3543 let response = rpc.request(message).await?;
3544 request
3545 .response_from_proto(response, this, buffer_handle, cx)
3546 .await
3547 });
3548 }
3549 Task::ready(Ok(Default::default()))
3550 }
3551
3552 pub fn find_or_create_local_worktree(
3553 &mut self,
3554 abs_path: impl AsRef<Path>,
3555 visible: bool,
3556 cx: &mut ModelContext<Self>,
3557 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3558 let abs_path = abs_path.as_ref();
3559 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3560 Task::ready(Ok((tree.clone(), relative_path.into())))
3561 } else {
3562 let worktree = self.create_local_worktree(abs_path, visible, cx);
3563 cx.foreground()
3564 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3565 }
3566 }
3567
3568 pub fn find_local_worktree(
3569 &self,
3570 abs_path: &Path,
3571 cx: &AppContext,
3572 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3573 for tree in self.worktrees(cx) {
3574 if let Some(relative_path) = tree
3575 .read(cx)
3576 .as_local()
3577 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3578 {
3579 return Some((tree.clone(), relative_path.into()));
3580 }
3581 }
3582 None
3583 }
3584
3585 pub fn is_shared(&self) -> bool {
3586 match &self.client_state {
3587 ProjectClientState::Local { is_shared, .. } => *is_shared,
3588 ProjectClientState::Remote { .. } => false,
3589 }
3590 }
3591
3592 fn create_local_worktree(
3593 &mut self,
3594 abs_path: impl AsRef<Path>,
3595 visible: bool,
3596 cx: &mut ModelContext<Self>,
3597 ) -> Task<Result<ModelHandle<Worktree>>> {
3598 let fs = self.fs.clone();
3599 let client = self.client.clone();
3600 let next_entry_id = self.next_entry_id.clone();
3601 let path: Arc<Path> = abs_path.as_ref().into();
3602 let task = self
3603 .loading_local_worktrees
3604 .entry(path.clone())
3605 .or_insert_with(|| {
3606 cx.spawn(|project, mut cx| {
3607 async move {
3608 let worktree = Worktree::local(
3609 client.clone(),
3610 path.clone(),
3611 visible,
3612 fs,
3613 next_entry_id,
3614 &mut cx,
3615 )
3616 .await;
3617 project.update(&mut cx, |project, _| {
3618 project.loading_local_worktrees.remove(&path);
3619 });
3620 let worktree = worktree?;
3621
3622 let remote_project_id = project.update(&mut cx, |project, cx| {
3623 project.add_worktree(&worktree, cx);
3624 project.remote_id()
3625 });
3626
3627 if let Some(project_id) = remote_project_id {
3628 // Because sharing is async, we may have *unshared* the project by the time it completes,
3629 // in which case we need to register the worktree instead.
3630 loop {
3631 if project.read_with(&cx, |project, _| project.is_shared()) {
3632 if worktree
3633 .update(&mut cx, |worktree, cx| {
3634 worktree.as_local_mut().unwrap().share(project_id, cx)
3635 })
3636 .await
3637 .is_ok()
3638 {
3639 break;
3640 }
3641 } else {
3642 worktree
3643 .update(&mut cx, |worktree, cx| {
3644 worktree
3645 .as_local_mut()
3646 .unwrap()
3647 .register(project_id, cx)
3648 })
3649 .await?;
3650 break;
3651 }
3652 }
3653 }
3654
3655 Ok(worktree)
3656 }
3657 .map_err(|err| Arc::new(err))
3658 })
3659 .shared()
3660 })
3661 .clone();
3662 cx.foreground().spawn(async move {
3663 match task.await {
3664 Ok(worktree) => Ok(worktree),
3665 Err(err) => Err(anyhow!("{}", err)),
3666 }
3667 })
3668 }
3669
3670 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3671 self.worktrees.retain(|worktree| {
3672 if let Some(worktree) = worktree.upgrade(cx) {
3673 let id = worktree.read(cx).id();
3674 if id == id_to_remove {
3675 cx.emit(Event::WorktreeRemoved(id));
3676 false
3677 } else {
3678 true
3679 }
3680 } else {
3681 false
3682 }
3683 });
3684 cx.notify();
3685 }
3686
3687 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3688 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3689 if worktree.read(cx).is_local() {
3690 cx.subscribe(&worktree, |this, worktree, _, cx| {
3691 this.update_local_worktree_buffers(worktree, cx);
3692 })
3693 .detach();
3694 }
3695
3696 let push_strong_handle = {
3697 let worktree = worktree.read(cx);
3698 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3699 };
3700 if push_strong_handle {
3701 self.worktrees
3702 .push(WorktreeHandle::Strong(worktree.clone()));
3703 } else {
3704 cx.observe_release(&worktree, |this, _, cx| {
3705 this.worktrees
3706 .retain(|worktree| worktree.upgrade(cx).is_some());
3707 cx.notify();
3708 })
3709 .detach();
3710 self.worktrees
3711 .push(WorktreeHandle::Weak(worktree.downgrade()));
3712 }
3713 cx.emit(Event::WorktreeAdded);
3714 cx.notify();
3715 }
3716
3717 fn update_local_worktree_buffers(
3718 &mut self,
3719 worktree_handle: ModelHandle<Worktree>,
3720 cx: &mut ModelContext<Self>,
3721 ) {
3722 let snapshot = worktree_handle.read(cx).snapshot();
3723 let mut buffers_to_delete = Vec::new();
3724 let mut renamed_buffers = Vec::new();
3725 for (buffer_id, buffer) in &self.opened_buffers {
3726 if let Some(buffer) = buffer.upgrade(cx) {
3727 buffer.update(cx, |buffer, cx| {
3728 if let Some(old_file) = File::from_dyn(buffer.file()) {
3729 if old_file.worktree != worktree_handle {
3730 return;
3731 }
3732
3733 let new_file = if let Some(entry) = old_file
3734 .entry_id
3735 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3736 {
3737 File {
3738 is_local: true,
3739 entry_id: Some(entry.id),
3740 mtime: entry.mtime,
3741 path: entry.path.clone(),
3742 worktree: worktree_handle.clone(),
3743 }
3744 } else if let Some(entry) =
3745 snapshot.entry_for_path(old_file.path().as_ref())
3746 {
3747 File {
3748 is_local: true,
3749 entry_id: Some(entry.id),
3750 mtime: entry.mtime,
3751 path: entry.path.clone(),
3752 worktree: worktree_handle.clone(),
3753 }
3754 } else {
3755 File {
3756 is_local: true,
3757 entry_id: None,
3758 path: old_file.path().clone(),
3759 mtime: old_file.mtime(),
3760 worktree: worktree_handle.clone(),
3761 }
3762 };
3763
3764 let old_path = old_file.abs_path(cx);
3765 if new_file.abs_path(cx) != old_path {
3766 renamed_buffers.push((cx.handle(), old_path));
3767 }
3768
3769 if let Some(project_id) = self.shared_remote_id() {
3770 self.client
3771 .send(proto::UpdateBufferFile {
3772 project_id,
3773 buffer_id: *buffer_id as u64,
3774 file: Some(new_file.to_proto()),
3775 })
3776 .log_err();
3777 }
3778 buffer.file_updated(Box::new(new_file), cx).detach();
3779 }
3780 });
3781 } else {
3782 buffers_to_delete.push(*buffer_id);
3783 }
3784 }
3785
3786 for buffer_id in buffers_to_delete {
3787 self.opened_buffers.remove(&buffer_id);
3788 }
3789
3790 for (buffer, old_path) in renamed_buffers {
3791 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3792 self.assign_language_to_buffer(&buffer, cx);
3793 self.register_buffer_with_language_server(&buffer, cx);
3794 }
3795 }
3796
3797 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3798 let new_active_entry = entry.and_then(|project_path| {
3799 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3800 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3801 Some(entry.id)
3802 });
3803 if new_active_entry != self.active_entry {
3804 self.active_entry = new_active_entry;
3805 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3806 }
3807 }
3808
3809 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3810 self.language_server_statuses
3811 .values()
3812 .any(|status| status.pending_diagnostic_updates > 0)
3813 }
3814
3815 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3816 let mut summary = DiagnosticSummary::default();
3817 for (_, path_summary) in self.diagnostic_summaries(cx) {
3818 summary.error_count += path_summary.error_count;
3819 summary.warning_count += path_summary.warning_count;
3820 }
3821 summary
3822 }
3823
3824 pub fn diagnostic_summaries<'a>(
3825 &'a self,
3826 cx: &'a AppContext,
3827 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3828 self.worktrees(cx).flat_map(move |worktree| {
3829 let worktree = worktree.read(cx);
3830 let worktree_id = worktree.id();
3831 worktree
3832 .diagnostic_summaries()
3833 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3834 })
3835 }
3836
3837 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3838 if self
3839 .language_server_statuses
3840 .values()
3841 .map(|status| status.pending_diagnostic_updates)
3842 .sum::<isize>()
3843 == 1
3844 {
3845 cx.emit(Event::DiskBasedDiagnosticsStarted);
3846 }
3847 }
3848
3849 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3850 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3851 if self
3852 .language_server_statuses
3853 .values()
3854 .map(|status| status.pending_diagnostic_updates)
3855 .sum::<isize>()
3856 == 0
3857 {
3858 cx.emit(Event::DiskBasedDiagnosticsFinished);
3859 }
3860 }
3861
3862 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3863 self.active_entry
3864 }
3865
3866 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3867 self.worktree_for_id(path.worktree_id, cx)?
3868 .read(cx)
3869 .entry_for_path(&path.path)
3870 .map(|entry| entry.id)
3871 }
3872
3873 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3874 let worktree = self.worktree_for_entry(entry_id, cx)?;
3875 let worktree = worktree.read(cx);
3876 let worktree_id = worktree.id();
3877 let path = worktree.entry_for_id(entry_id)?.path.clone();
3878 Some(ProjectPath { worktree_id, path })
3879 }
3880
3881 // RPC message handlers
3882
3883 async fn handle_request_join_project(
3884 this: ModelHandle<Self>,
3885 message: TypedEnvelope<proto::RequestJoinProject>,
3886 _: Arc<Client>,
3887 mut cx: AsyncAppContext,
3888 ) -> Result<()> {
3889 let user_id = message.payload.requester_id;
3890 if this.read_with(&cx, |project, _| {
3891 project.collaborators.values().any(|c| c.user.id == user_id)
3892 }) {
3893 this.update(&mut cx, |this, cx| {
3894 this.respond_to_join_request(user_id, true, cx)
3895 });
3896 } else {
3897 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3898 let user = user_store
3899 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3900 .await?;
3901 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3902 }
3903 Ok(())
3904 }
3905
3906 async fn handle_unregister_project(
3907 this: ModelHandle<Self>,
3908 _: TypedEnvelope<proto::UnregisterProject>,
3909 _: Arc<Client>,
3910 mut cx: AsyncAppContext,
3911 ) -> Result<()> {
3912 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3913 Ok(())
3914 }
3915
3916 async fn handle_project_unshared(
3917 this: ModelHandle<Self>,
3918 _: TypedEnvelope<proto::ProjectUnshared>,
3919 _: Arc<Client>,
3920 mut cx: AsyncAppContext,
3921 ) -> Result<()> {
3922 this.update(&mut cx, |this, cx| this.unshared(cx));
3923 Ok(())
3924 }
3925
3926 async fn handle_add_collaborator(
3927 this: ModelHandle<Self>,
3928 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3929 _: Arc<Client>,
3930 mut cx: AsyncAppContext,
3931 ) -> Result<()> {
3932 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3933 let collaborator = envelope
3934 .payload
3935 .collaborator
3936 .take()
3937 .ok_or_else(|| anyhow!("empty collaborator"))?;
3938
3939 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3940 this.update(&mut cx, |this, cx| {
3941 this.collaborators
3942 .insert(collaborator.peer_id, collaborator);
3943 cx.notify();
3944 });
3945
3946 Ok(())
3947 }
3948
3949 async fn handle_remove_collaborator(
3950 this: ModelHandle<Self>,
3951 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3952 _: Arc<Client>,
3953 mut cx: AsyncAppContext,
3954 ) -> Result<()> {
3955 this.update(&mut cx, |this, cx| {
3956 let peer_id = PeerId(envelope.payload.peer_id);
3957 let replica_id = this
3958 .collaborators
3959 .remove(&peer_id)
3960 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3961 .replica_id;
3962 for (_, buffer) in &this.opened_buffers {
3963 if let Some(buffer) = buffer.upgrade(cx) {
3964 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3965 }
3966 }
3967
3968 cx.emit(Event::CollaboratorLeft(peer_id));
3969 cx.notify();
3970 Ok(())
3971 })
3972 }
3973
3974 async fn handle_join_project_request_cancelled(
3975 this: ModelHandle<Self>,
3976 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
3977 _: Arc<Client>,
3978 mut cx: AsyncAppContext,
3979 ) -> Result<()> {
3980 let user = this
3981 .update(&mut cx, |this, cx| {
3982 this.user_store.update(cx, |user_store, cx| {
3983 user_store.fetch_user(envelope.payload.requester_id, cx)
3984 })
3985 })
3986 .await?;
3987
3988 this.update(&mut cx, |_, cx| {
3989 cx.emit(Event::ContactCancelledJoinRequest(user));
3990 });
3991
3992 Ok(())
3993 }
3994
3995 async fn handle_register_worktree(
3996 this: ModelHandle<Self>,
3997 envelope: TypedEnvelope<proto::RegisterWorktree>,
3998 client: Arc<Client>,
3999 mut cx: AsyncAppContext,
4000 ) -> Result<()> {
4001 this.update(&mut cx, |this, cx| {
4002 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4003 let replica_id = this.replica_id();
4004 let worktree = proto::Worktree {
4005 id: envelope.payload.worktree_id,
4006 root_name: envelope.payload.root_name,
4007 entries: Default::default(),
4008 diagnostic_summaries: Default::default(),
4009 visible: envelope.payload.visible,
4010 scan_id: 0,
4011 };
4012 let (worktree, load_task) =
4013 Worktree::remote(remote_id, replica_id, worktree, client, cx);
4014 this.add_worktree(&worktree, cx);
4015 load_task.detach();
4016 Ok(())
4017 })
4018 }
4019
4020 async fn handle_unregister_worktree(
4021 this: ModelHandle<Self>,
4022 envelope: TypedEnvelope<proto::UnregisterWorktree>,
4023 _: Arc<Client>,
4024 mut cx: AsyncAppContext,
4025 ) -> Result<()> {
4026 this.update(&mut cx, |this, cx| {
4027 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4028 this.remove_worktree(worktree_id, cx);
4029 Ok(())
4030 })
4031 }
4032
4033 async fn handle_update_worktree(
4034 this: ModelHandle<Self>,
4035 envelope: TypedEnvelope<proto::UpdateWorktree>,
4036 _: Arc<Client>,
4037 mut cx: AsyncAppContext,
4038 ) -> Result<()> {
4039 this.update(&mut cx, |this, cx| {
4040 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4041 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4042 worktree.update(cx, |worktree, _| {
4043 let worktree = worktree.as_remote_mut().unwrap();
4044 worktree.update_from_remote(envelope)
4045 })?;
4046 }
4047 Ok(())
4048 })
4049 }
4050
4051 async fn handle_create_project_entry(
4052 this: ModelHandle<Self>,
4053 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4054 _: Arc<Client>,
4055 mut cx: AsyncAppContext,
4056 ) -> Result<proto::ProjectEntryResponse> {
4057 let worktree = this.update(&mut cx, |this, cx| {
4058 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4059 this.worktree_for_id(worktree_id, cx)
4060 .ok_or_else(|| anyhow!("worktree not found"))
4061 })?;
4062 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4063 let entry = worktree
4064 .update(&mut cx, |worktree, cx| {
4065 let worktree = worktree.as_local_mut().unwrap();
4066 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4067 worktree.create_entry(path, envelope.payload.is_directory, cx)
4068 })
4069 .await?;
4070 Ok(proto::ProjectEntryResponse {
4071 entry: Some((&entry).into()),
4072 worktree_scan_id: worktree_scan_id as u64,
4073 })
4074 }
4075
4076 async fn handle_rename_project_entry(
4077 this: ModelHandle<Self>,
4078 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4079 _: Arc<Client>,
4080 mut cx: AsyncAppContext,
4081 ) -> Result<proto::ProjectEntryResponse> {
4082 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4083 let worktree = this.read_with(&cx, |this, cx| {
4084 this.worktree_for_entry(entry_id, cx)
4085 .ok_or_else(|| anyhow!("worktree not found"))
4086 })?;
4087 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4088 let entry = worktree
4089 .update(&mut cx, |worktree, cx| {
4090 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4091 worktree
4092 .as_local_mut()
4093 .unwrap()
4094 .rename_entry(entry_id, new_path, cx)
4095 .ok_or_else(|| anyhow!("invalid entry"))
4096 })?
4097 .await?;
4098 Ok(proto::ProjectEntryResponse {
4099 entry: Some((&entry).into()),
4100 worktree_scan_id: worktree_scan_id as u64,
4101 })
4102 }
4103
4104 async fn handle_copy_project_entry(
4105 this: ModelHandle<Self>,
4106 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4107 _: Arc<Client>,
4108 mut cx: AsyncAppContext,
4109 ) -> Result<proto::ProjectEntryResponse> {
4110 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4111 let worktree = this.read_with(&cx, |this, cx| {
4112 this.worktree_for_entry(entry_id, cx)
4113 .ok_or_else(|| anyhow!("worktree not found"))
4114 })?;
4115 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4116 let entry = worktree
4117 .update(&mut cx, |worktree, cx| {
4118 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4119 worktree
4120 .as_local_mut()
4121 .unwrap()
4122 .copy_entry(entry_id, new_path, cx)
4123 .ok_or_else(|| anyhow!("invalid entry"))
4124 })?
4125 .await?;
4126 Ok(proto::ProjectEntryResponse {
4127 entry: Some((&entry).into()),
4128 worktree_scan_id: worktree_scan_id as u64,
4129 })
4130 }
4131
4132 async fn handle_delete_project_entry(
4133 this: ModelHandle<Self>,
4134 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4135 _: Arc<Client>,
4136 mut cx: AsyncAppContext,
4137 ) -> Result<proto::ProjectEntryResponse> {
4138 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4139 let worktree = this.read_with(&cx, |this, cx| {
4140 this.worktree_for_entry(entry_id, cx)
4141 .ok_or_else(|| anyhow!("worktree not found"))
4142 })?;
4143 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4144 worktree
4145 .update(&mut cx, |worktree, cx| {
4146 worktree
4147 .as_local_mut()
4148 .unwrap()
4149 .delete_entry(entry_id, cx)
4150 .ok_or_else(|| anyhow!("invalid entry"))
4151 })?
4152 .await?;
4153 Ok(proto::ProjectEntryResponse {
4154 entry: None,
4155 worktree_scan_id: worktree_scan_id as u64,
4156 })
4157 }
4158
4159 async fn handle_update_diagnostic_summary(
4160 this: ModelHandle<Self>,
4161 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4162 _: Arc<Client>,
4163 mut cx: AsyncAppContext,
4164 ) -> Result<()> {
4165 this.update(&mut cx, |this, cx| {
4166 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4167 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4168 if let Some(summary) = envelope.payload.summary {
4169 let project_path = ProjectPath {
4170 worktree_id,
4171 path: Path::new(&summary.path).into(),
4172 };
4173 worktree.update(cx, |worktree, _| {
4174 worktree
4175 .as_remote_mut()
4176 .unwrap()
4177 .update_diagnostic_summary(project_path.path.clone(), &summary);
4178 });
4179 cx.emit(Event::DiagnosticsUpdated(project_path));
4180 }
4181 }
4182 Ok(())
4183 })
4184 }
4185
4186 async fn handle_start_language_server(
4187 this: ModelHandle<Self>,
4188 envelope: TypedEnvelope<proto::StartLanguageServer>,
4189 _: Arc<Client>,
4190 mut cx: AsyncAppContext,
4191 ) -> Result<()> {
4192 let server = envelope
4193 .payload
4194 .server
4195 .ok_or_else(|| anyhow!("invalid server"))?;
4196 this.update(&mut cx, |this, cx| {
4197 this.language_server_statuses.insert(
4198 server.id as usize,
4199 LanguageServerStatus {
4200 name: server.name,
4201 pending_work: Default::default(),
4202 pending_diagnostic_updates: 0,
4203 },
4204 );
4205 cx.notify();
4206 });
4207 Ok(())
4208 }
4209
4210 async fn handle_update_language_server(
4211 this: ModelHandle<Self>,
4212 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4213 _: Arc<Client>,
4214 mut cx: AsyncAppContext,
4215 ) -> Result<()> {
4216 let language_server_id = envelope.payload.language_server_id as usize;
4217 match envelope
4218 .payload
4219 .variant
4220 .ok_or_else(|| anyhow!("invalid variant"))?
4221 {
4222 proto::update_language_server::Variant::WorkStart(payload) => {
4223 this.update(&mut cx, |this, cx| {
4224 this.on_lsp_work_start(language_server_id, payload.token, cx);
4225 })
4226 }
4227 proto::update_language_server::Variant::WorkProgress(payload) => {
4228 this.update(&mut cx, |this, cx| {
4229 this.on_lsp_work_progress(
4230 language_server_id,
4231 payload.token,
4232 LanguageServerProgress {
4233 message: payload.message,
4234 percentage: payload.percentage.map(|p| p as usize),
4235 last_update_at: Instant::now(),
4236 },
4237 cx,
4238 );
4239 })
4240 }
4241 proto::update_language_server::Variant::WorkEnd(payload) => {
4242 this.update(&mut cx, |this, cx| {
4243 this.on_lsp_work_end(language_server_id, payload.token, cx);
4244 })
4245 }
4246 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4247 this.update(&mut cx, |this, cx| {
4248 this.disk_based_diagnostics_started(cx);
4249 })
4250 }
4251 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4252 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4253 }
4254 }
4255
4256 Ok(())
4257 }
4258
4259 async fn handle_update_buffer(
4260 this: ModelHandle<Self>,
4261 envelope: TypedEnvelope<proto::UpdateBuffer>,
4262 _: Arc<Client>,
4263 mut cx: AsyncAppContext,
4264 ) -> Result<()> {
4265 this.update(&mut cx, |this, cx| {
4266 let payload = envelope.payload.clone();
4267 let buffer_id = payload.buffer_id;
4268 let ops = payload
4269 .operations
4270 .into_iter()
4271 .map(|op| language::proto::deserialize_operation(op))
4272 .collect::<Result<Vec<_>, _>>()?;
4273 let is_remote = this.is_remote();
4274 match this.opened_buffers.entry(buffer_id) {
4275 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4276 OpenBuffer::Strong(buffer) => {
4277 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4278 }
4279 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4280 OpenBuffer::Weak(_) => {}
4281 },
4282 hash_map::Entry::Vacant(e) => {
4283 assert!(
4284 is_remote,
4285 "received buffer update from {:?}",
4286 envelope.original_sender_id
4287 );
4288 e.insert(OpenBuffer::Loading(ops));
4289 }
4290 }
4291 Ok(())
4292 })
4293 }
4294
4295 async fn handle_update_buffer_file(
4296 this: ModelHandle<Self>,
4297 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4298 _: Arc<Client>,
4299 mut cx: AsyncAppContext,
4300 ) -> Result<()> {
4301 this.update(&mut cx, |this, cx| {
4302 let payload = envelope.payload.clone();
4303 let buffer_id = payload.buffer_id;
4304 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4305 let worktree = this
4306 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4307 .ok_or_else(|| anyhow!("no such worktree"))?;
4308 let file = File::from_proto(file, worktree.clone(), cx)?;
4309 let buffer = this
4310 .opened_buffers
4311 .get_mut(&buffer_id)
4312 .and_then(|b| b.upgrade(cx))
4313 .ok_or_else(|| anyhow!("no such buffer"))?;
4314 buffer.update(cx, |buffer, cx| {
4315 buffer.file_updated(Box::new(file), cx).detach();
4316 });
4317 Ok(())
4318 })
4319 }
4320
4321 async fn handle_save_buffer(
4322 this: ModelHandle<Self>,
4323 envelope: TypedEnvelope<proto::SaveBuffer>,
4324 _: Arc<Client>,
4325 mut cx: AsyncAppContext,
4326 ) -> Result<proto::BufferSaved> {
4327 let buffer_id = envelope.payload.buffer_id;
4328 let requested_version = deserialize_version(envelope.payload.version);
4329
4330 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4331 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4332 let buffer = this
4333 .opened_buffers
4334 .get(&buffer_id)
4335 .and_then(|buffer| buffer.upgrade(cx))
4336 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4337 Ok::<_, anyhow::Error>((project_id, buffer))
4338 })?;
4339 buffer
4340 .update(&mut cx, |buffer, _| {
4341 buffer.wait_for_version(requested_version)
4342 })
4343 .await;
4344
4345 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4346 Ok(proto::BufferSaved {
4347 project_id,
4348 buffer_id,
4349 version: serialize_version(&saved_version),
4350 mtime: Some(mtime.into()),
4351 })
4352 }
4353
4354 async fn handle_reload_buffers(
4355 this: ModelHandle<Self>,
4356 envelope: TypedEnvelope<proto::ReloadBuffers>,
4357 _: Arc<Client>,
4358 mut cx: AsyncAppContext,
4359 ) -> Result<proto::ReloadBuffersResponse> {
4360 let sender_id = envelope.original_sender_id()?;
4361 let reload = this.update(&mut cx, |this, cx| {
4362 let mut buffers = HashSet::default();
4363 for buffer_id in &envelope.payload.buffer_ids {
4364 buffers.insert(
4365 this.opened_buffers
4366 .get(buffer_id)
4367 .and_then(|buffer| buffer.upgrade(cx))
4368 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4369 );
4370 }
4371 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4372 })?;
4373
4374 let project_transaction = reload.await?;
4375 let project_transaction = this.update(&mut cx, |this, cx| {
4376 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4377 });
4378 Ok(proto::ReloadBuffersResponse {
4379 transaction: Some(project_transaction),
4380 })
4381 }
4382
4383 async fn handle_format_buffers(
4384 this: ModelHandle<Self>,
4385 envelope: TypedEnvelope<proto::FormatBuffers>,
4386 _: Arc<Client>,
4387 mut cx: AsyncAppContext,
4388 ) -> Result<proto::FormatBuffersResponse> {
4389 let sender_id = envelope.original_sender_id()?;
4390 let format = this.update(&mut cx, |this, cx| {
4391 let mut buffers = HashSet::default();
4392 for buffer_id in &envelope.payload.buffer_ids {
4393 buffers.insert(
4394 this.opened_buffers
4395 .get(buffer_id)
4396 .and_then(|buffer| buffer.upgrade(cx))
4397 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4398 );
4399 }
4400 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4401 })?;
4402
4403 let project_transaction = format.await?;
4404 let project_transaction = this.update(&mut cx, |this, cx| {
4405 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4406 });
4407 Ok(proto::FormatBuffersResponse {
4408 transaction: Some(project_transaction),
4409 })
4410 }
4411
4412 async fn handle_get_completions(
4413 this: ModelHandle<Self>,
4414 envelope: TypedEnvelope<proto::GetCompletions>,
4415 _: Arc<Client>,
4416 mut cx: AsyncAppContext,
4417 ) -> Result<proto::GetCompletionsResponse> {
4418 let position = envelope
4419 .payload
4420 .position
4421 .and_then(language::proto::deserialize_anchor)
4422 .ok_or_else(|| anyhow!("invalid position"))?;
4423 let version = deserialize_version(envelope.payload.version);
4424 let buffer = this.read_with(&cx, |this, cx| {
4425 this.opened_buffers
4426 .get(&envelope.payload.buffer_id)
4427 .and_then(|buffer| buffer.upgrade(cx))
4428 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4429 })?;
4430 buffer
4431 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4432 .await;
4433 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4434 let completions = this
4435 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4436 .await?;
4437
4438 Ok(proto::GetCompletionsResponse {
4439 completions: completions
4440 .iter()
4441 .map(language::proto::serialize_completion)
4442 .collect(),
4443 version: serialize_version(&version),
4444 })
4445 }
4446
4447 async fn handle_apply_additional_edits_for_completion(
4448 this: ModelHandle<Self>,
4449 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4450 _: Arc<Client>,
4451 mut cx: AsyncAppContext,
4452 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4453 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4454 let buffer = this
4455 .opened_buffers
4456 .get(&envelope.payload.buffer_id)
4457 .and_then(|buffer| buffer.upgrade(cx))
4458 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4459 let language = buffer.read(cx).language();
4460 let completion = language::proto::deserialize_completion(
4461 envelope
4462 .payload
4463 .completion
4464 .ok_or_else(|| anyhow!("invalid completion"))?,
4465 language,
4466 )?;
4467 Ok::<_, anyhow::Error>(
4468 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4469 )
4470 })?;
4471
4472 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4473 transaction: apply_additional_edits
4474 .await?
4475 .as_ref()
4476 .map(language::proto::serialize_transaction),
4477 })
4478 }
4479
4480 async fn handle_get_code_actions(
4481 this: ModelHandle<Self>,
4482 envelope: TypedEnvelope<proto::GetCodeActions>,
4483 _: Arc<Client>,
4484 mut cx: AsyncAppContext,
4485 ) -> Result<proto::GetCodeActionsResponse> {
4486 let start = envelope
4487 .payload
4488 .start
4489 .and_then(language::proto::deserialize_anchor)
4490 .ok_or_else(|| anyhow!("invalid start"))?;
4491 let end = envelope
4492 .payload
4493 .end
4494 .and_then(language::proto::deserialize_anchor)
4495 .ok_or_else(|| anyhow!("invalid end"))?;
4496 let buffer = this.update(&mut cx, |this, cx| {
4497 this.opened_buffers
4498 .get(&envelope.payload.buffer_id)
4499 .and_then(|buffer| buffer.upgrade(cx))
4500 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4501 })?;
4502 buffer
4503 .update(&mut cx, |buffer, _| {
4504 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4505 })
4506 .await;
4507
4508 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4509 let code_actions = this.update(&mut cx, |this, cx| {
4510 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4511 })?;
4512
4513 Ok(proto::GetCodeActionsResponse {
4514 actions: code_actions
4515 .await?
4516 .iter()
4517 .map(language::proto::serialize_code_action)
4518 .collect(),
4519 version: serialize_version(&version),
4520 })
4521 }
4522
4523 async fn handle_apply_code_action(
4524 this: ModelHandle<Self>,
4525 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4526 _: Arc<Client>,
4527 mut cx: AsyncAppContext,
4528 ) -> Result<proto::ApplyCodeActionResponse> {
4529 let sender_id = envelope.original_sender_id()?;
4530 let action = language::proto::deserialize_code_action(
4531 envelope
4532 .payload
4533 .action
4534 .ok_or_else(|| anyhow!("invalid action"))?,
4535 )?;
4536 let apply_code_action = this.update(&mut cx, |this, cx| {
4537 let buffer = this
4538 .opened_buffers
4539 .get(&envelope.payload.buffer_id)
4540 .and_then(|buffer| buffer.upgrade(cx))
4541 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4542 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4543 })?;
4544
4545 let project_transaction = apply_code_action.await?;
4546 let project_transaction = this.update(&mut cx, |this, cx| {
4547 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4548 });
4549 Ok(proto::ApplyCodeActionResponse {
4550 transaction: Some(project_transaction),
4551 })
4552 }
4553
4554 async fn handle_lsp_command<T: LspCommand>(
4555 this: ModelHandle<Self>,
4556 envelope: TypedEnvelope<T::ProtoRequest>,
4557 _: Arc<Client>,
4558 mut cx: AsyncAppContext,
4559 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4560 where
4561 <T::LspRequest as lsp::request::Request>::Result: Send,
4562 {
4563 let sender_id = envelope.original_sender_id()?;
4564 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4565 let buffer_handle = this.read_with(&cx, |this, _| {
4566 this.opened_buffers
4567 .get(&buffer_id)
4568 .and_then(|buffer| buffer.upgrade(&cx))
4569 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4570 })?;
4571 let request = T::from_proto(
4572 envelope.payload,
4573 this.clone(),
4574 buffer_handle.clone(),
4575 cx.clone(),
4576 )
4577 .await?;
4578 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4579 let response = this
4580 .update(&mut cx, |this, cx| {
4581 this.request_lsp(buffer_handle, request, cx)
4582 })
4583 .await?;
4584 this.update(&mut cx, |this, cx| {
4585 Ok(T::response_to_proto(
4586 response,
4587 this,
4588 sender_id,
4589 &buffer_version,
4590 cx,
4591 ))
4592 })
4593 }
4594
4595 async fn handle_get_project_symbols(
4596 this: ModelHandle<Self>,
4597 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4598 _: Arc<Client>,
4599 mut cx: AsyncAppContext,
4600 ) -> Result<proto::GetProjectSymbolsResponse> {
4601 let symbols = this
4602 .update(&mut cx, |this, cx| {
4603 this.symbols(&envelope.payload.query, cx)
4604 })
4605 .await?;
4606
4607 Ok(proto::GetProjectSymbolsResponse {
4608 symbols: symbols.iter().map(serialize_symbol).collect(),
4609 })
4610 }
4611
4612 async fn handle_search_project(
4613 this: ModelHandle<Self>,
4614 envelope: TypedEnvelope<proto::SearchProject>,
4615 _: Arc<Client>,
4616 mut cx: AsyncAppContext,
4617 ) -> Result<proto::SearchProjectResponse> {
4618 let peer_id = envelope.original_sender_id()?;
4619 let query = SearchQuery::from_proto(envelope.payload)?;
4620 let result = this
4621 .update(&mut cx, |this, cx| this.search(query, cx))
4622 .await?;
4623
4624 this.update(&mut cx, |this, cx| {
4625 let mut locations = Vec::new();
4626 for (buffer, ranges) in result {
4627 for range in ranges {
4628 let start = serialize_anchor(&range.start);
4629 let end = serialize_anchor(&range.end);
4630 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4631 locations.push(proto::Location {
4632 buffer: Some(buffer),
4633 start: Some(start),
4634 end: Some(end),
4635 });
4636 }
4637 }
4638 Ok(proto::SearchProjectResponse { locations })
4639 })
4640 }
4641
4642 async fn handle_open_buffer_for_symbol(
4643 this: ModelHandle<Self>,
4644 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4645 _: Arc<Client>,
4646 mut cx: AsyncAppContext,
4647 ) -> Result<proto::OpenBufferForSymbolResponse> {
4648 let peer_id = envelope.original_sender_id()?;
4649 let symbol = envelope
4650 .payload
4651 .symbol
4652 .ok_or_else(|| anyhow!("invalid symbol"))?;
4653 let symbol = this.read_with(&cx, |this, _| {
4654 let symbol = this.deserialize_symbol(symbol)?;
4655 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4656 if signature == symbol.signature {
4657 Ok(symbol)
4658 } else {
4659 Err(anyhow!("invalid symbol signature"))
4660 }
4661 })?;
4662 let buffer = this
4663 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4664 .await?;
4665
4666 Ok(proto::OpenBufferForSymbolResponse {
4667 buffer: Some(this.update(&mut cx, |this, cx| {
4668 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4669 })),
4670 })
4671 }
4672
4673 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4674 let mut hasher = Sha256::new();
4675 hasher.update(worktree_id.to_proto().to_be_bytes());
4676 hasher.update(path.to_string_lossy().as_bytes());
4677 hasher.update(self.nonce.to_be_bytes());
4678 hasher.finalize().as_slice().try_into().unwrap()
4679 }
4680
4681 async fn handle_open_buffer_by_id(
4682 this: ModelHandle<Self>,
4683 envelope: TypedEnvelope<proto::OpenBufferById>,
4684 _: Arc<Client>,
4685 mut cx: AsyncAppContext,
4686 ) -> Result<proto::OpenBufferResponse> {
4687 let peer_id = envelope.original_sender_id()?;
4688 let buffer = this
4689 .update(&mut cx, |this, cx| {
4690 this.open_buffer_by_id(envelope.payload.id, cx)
4691 })
4692 .await?;
4693 this.update(&mut cx, |this, cx| {
4694 Ok(proto::OpenBufferResponse {
4695 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4696 })
4697 })
4698 }
4699
4700 async fn handle_open_buffer_by_path(
4701 this: ModelHandle<Self>,
4702 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4703 _: Arc<Client>,
4704 mut cx: AsyncAppContext,
4705 ) -> Result<proto::OpenBufferResponse> {
4706 let peer_id = envelope.original_sender_id()?;
4707 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4708 let open_buffer = this.update(&mut cx, |this, cx| {
4709 this.open_buffer(
4710 ProjectPath {
4711 worktree_id,
4712 path: PathBuf::from(envelope.payload.path).into(),
4713 },
4714 cx,
4715 )
4716 });
4717
4718 let buffer = open_buffer.await?;
4719 this.update(&mut cx, |this, cx| {
4720 Ok(proto::OpenBufferResponse {
4721 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4722 })
4723 })
4724 }
4725
4726 fn serialize_project_transaction_for_peer(
4727 &mut self,
4728 project_transaction: ProjectTransaction,
4729 peer_id: PeerId,
4730 cx: &AppContext,
4731 ) -> proto::ProjectTransaction {
4732 let mut serialized_transaction = proto::ProjectTransaction {
4733 buffers: Default::default(),
4734 transactions: Default::default(),
4735 };
4736 for (buffer, transaction) in project_transaction.0 {
4737 serialized_transaction
4738 .buffers
4739 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4740 serialized_transaction
4741 .transactions
4742 .push(language::proto::serialize_transaction(&transaction));
4743 }
4744 serialized_transaction
4745 }
4746
4747 fn deserialize_project_transaction(
4748 &mut self,
4749 message: proto::ProjectTransaction,
4750 push_to_history: bool,
4751 cx: &mut ModelContext<Self>,
4752 ) -> Task<Result<ProjectTransaction>> {
4753 cx.spawn(|this, mut cx| async move {
4754 let mut project_transaction = ProjectTransaction::default();
4755 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4756 let buffer = this
4757 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4758 .await?;
4759 let transaction = language::proto::deserialize_transaction(transaction)?;
4760 project_transaction.0.insert(buffer, transaction);
4761 }
4762
4763 for (buffer, transaction) in &project_transaction.0 {
4764 buffer
4765 .update(&mut cx, |buffer, _| {
4766 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4767 })
4768 .await;
4769
4770 if push_to_history {
4771 buffer.update(&mut cx, |buffer, _| {
4772 buffer.push_transaction(transaction.clone(), Instant::now());
4773 });
4774 }
4775 }
4776
4777 Ok(project_transaction)
4778 })
4779 }
4780
4781 fn serialize_buffer_for_peer(
4782 &mut self,
4783 buffer: &ModelHandle<Buffer>,
4784 peer_id: PeerId,
4785 cx: &AppContext,
4786 ) -> proto::Buffer {
4787 let buffer_id = buffer.read(cx).remote_id();
4788 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4789 if shared_buffers.insert(buffer_id) {
4790 proto::Buffer {
4791 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4792 }
4793 } else {
4794 proto::Buffer {
4795 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4796 }
4797 }
4798 }
4799
4800 fn deserialize_buffer(
4801 &mut self,
4802 buffer: proto::Buffer,
4803 cx: &mut ModelContext<Self>,
4804 ) -> Task<Result<ModelHandle<Buffer>>> {
4805 let replica_id = self.replica_id();
4806
4807 let opened_buffer_tx = self.opened_buffer.0.clone();
4808 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4809 cx.spawn(|this, mut cx| async move {
4810 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4811 proto::buffer::Variant::Id(id) => {
4812 let buffer = loop {
4813 let buffer = this.read_with(&cx, |this, cx| {
4814 this.opened_buffers
4815 .get(&id)
4816 .and_then(|buffer| buffer.upgrade(cx))
4817 });
4818 if let Some(buffer) = buffer {
4819 break buffer;
4820 }
4821 opened_buffer_rx
4822 .next()
4823 .await
4824 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4825 };
4826 Ok(buffer)
4827 }
4828 proto::buffer::Variant::State(mut buffer) => {
4829 let mut buffer_worktree = None;
4830 let mut buffer_file = None;
4831 if let Some(file) = buffer.file.take() {
4832 this.read_with(&cx, |this, cx| {
4833 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4834 let worktree =
4835 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4836 anyhow!("no worktree found for id {}", file.worktree_id)
4837 })?;
4838 buffer_file =
4839 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4840 as Box<dyn language::File>);
4841 buffer_worktree = Some(worktree);
4842 Ok::<_, anyhow::Error>(())
4843 })?;
4844 }
4845
4846 let buffer = cx.add_model(|cx| {
4847 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4848 });
4849
4850 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4851
4852 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4853 Ok(buffer)
4854 }
4855 }
4856 })
4857 }
4858
4859 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4860 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4861 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4862 let start = serialized_symbol
4863 .start
4864 .ok_or_else(|| anyhow!("invalid start"))?;
4865 let end = serialized_symbol
4866 .end
4867 .ok_or_else(|| anyhow!("invalid end"))?;
4868 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4869 let path = PathBuf::from(serialized_symbol.path);
4870 let language = self.languages.select_language(&path);
4871 Ok(Symbol {
4872 source_worktree_id,
4873 worktree_id,
4874 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4875 label: language
4876 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4877 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4878 name: serialized_symbol.name,
4879 path,
4880 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4881 kind,
4882 signature: serialized_symbol
4883 .signature
4884 .try_into()
4885 .map_err(|_| anyhow!("invalid signature"))?,
4886 })
4887 }
4888
4889 async fn handle_buffer_saved(
4890 this: ModelHandle<Self>,
4891 envelope: TypedEnvelope<proto::BufferSaved>,
4892 _: Arc<Client>,
4893 mut cx: AsyncAppContext,
4894 ) -> Result<()> {
4895 let version = deserialize_version(envelope.payload.version);
4896 let mtime = envelope
4897 .payload
4898 .mtime
4899 .ok_or_else(|| anyhow!("missing mtime"))?
4900 .into();
4901
4902 this.update(&mut cx, |this, cx| {
4903 let buffer = this
4904 .opened_buffers
4905 .get(&envelope.payload.buffer_id)
4906 .and_then(|buffer| buffer.upgrade(cx));
4907 if let Some(buffer) = buffer {
4908 buffer.update(cx, |buffer, cx| {
4909 buffer.did_save(version, mtime, None, cx);
4910 });
4911 }
4912 Ok(())
4913 })
4914 }
4915
4916 async fn handle_buffer_reloaded(
4917 this: ModelHandle<Self>,
4918 envelope: TypedEnvelope<proto::BufferReloaded>,
4919 _: Arc<Client>,
4920 mut cx: AsyncAppContext,
4921 ) -> Result<()> {
4922 let payload = envelope.payload.clone();
4923 let version = deserialize_version(payload.version);
4924 let mtime = payload
4925 .mtime
4926 .ok_or_else(|| anyhow!("missing mtime"))?
4927 .into();
4928 this.update(&mut cx, |this, cx| {
4929 let buffer = this
4930 .opened_buffers
4931 .get(&payload.buffer_id)
4932 .and_then(|buffer| buffer.upgrade(cx));
4933 if let Some(buffer) = buffer {
4934 buffer.update(cx, |buffer, cx| {
4935 buffer.did_reload(version, mtime, cx);
4936 });
4937 }
4938 Ok(())
4939 })
4940 }
4941
4942 pub fn match_paths<'a>(
4943 &self,
4944 query: &'a str,
4945 include_ignored: bool,
4946 smart_case: bool,
4947 max_results: usize,
4948 cancel_flag: &'a AtomicBool,
4949 cx: &AppContext,
4950 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4951 let worktrees = self
4952 .worktrees(cx)
4953 .filter(|worktree| worktree.read(cx).is_visible())
4954 .collect::<Vec<_>>();
4955 let include_root_name = worktrees.len() > 1;
4956 let candidate_sets = worktrees
4957 .into_iter()
4958 .map(|worktree| CandidateSet {
4959 snapshot: worktree.read(cx).snapshot(),
4960 include_ignored,
4961 include_root_name,
4962 })
4963 .collect::<Vec<_>>();
4964
4965 let background = cx.background().clone();
4966 async move {
4967 fuzzy::match_paths(
4968 candidate_sets.as_slice(),
4969 query,
4970 smart_case,
4971 max_results,
4972 cancel_flag,
4973 background,
4974 )
4975 .await
4976 }
4977 }
4978
4979 fn edits_from_lsp(
4980 &mut self,
4981 buffer: &ModelHandle<Buffer>,
4982 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4983 version: Option<i32>,
4984 cx: &mut ModelContext<Self>,
4985 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4986 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4987 cx.background().spawn(async move {
4988 let snapshot = snapshot?;
4989 let mut lsp_edits = lsp_edits
4990 .into_iter()
4991 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4992 .peekable();
4993
4994 let mut edits = Vec::new();
4995 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4996 // Combine any LSP edits that are adjacent.
4997 //
4998 // Also, combine LSP edits that are separated from each other by only
4999 // a newline. This is important because for some code actions,
5000 // Rust-analyzer rewrites the entire buffer via a series of edits that
5001 // are separated by unchanged newline characters.
5002 //
5003 // In order for the diffing logic below to work properly, any edits that
5004 // cancel each other out must be combined into one.
5005 while let Some((next_range, next_text)) = lsp_edits.peek() {
5006 if next_range.start > range.end {
5007 if next_range.start.row > range.end.row + 1
5008 || next_range.start.column > 0
5009 || snapshot.clip_point_utf16(
5010 PointUtf16::new(range.end.row, u32::MAX),
5011 Bias::Left,
5012 ) > range.end
5013 {
5014 break;
5015 }
5016 new_text.push('\n');
5017 }
5018 range.end = next_range.end;
5019 new_text.push_str(&next_text);
5020 lsp_edits.next();
5021 }
5022
5023 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5024 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5025 {
5026 return Err(anyhow!("invalid edits received from language server"));
5027 }
5028
5029 // For multiline edits, perform a diff of the old and new text so that
5030 // we can identify the changes more precisely, preserving the locations
5031 // of any anchors positioned in the unchanged regions.
5032 if range.end.row > range.start.row {
5033 let mut offset = range.start.to_offset(&snapshot);
5034 let old_text = snapshot.text_for_range(range).collect::<String>();
5035
5036 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5037 let mut moved_since_edit = true;
5038 for change in diff.iter_all_changes() {
5039 let tag = change.tag();
5040 let value = change.value();
5041 match tag {
5042 ChangeTag::Equal => {
5043 offset += value.len();
5044 moved_since_edit = true;
5045 }
5046 ChangeTag::Delete => {
5047 let start = snapshot.anchor_after(offset);
5048 let end = snapshot.anchor_before(offset + value.len());
5049 if moved_since_edit {
5050 edits.push((start..end, String::new()));
5051 } else {
5052 edits.last_mut().unwrap().0.end = end;
5053 }
5054 offset += value.len();
5055 moved_since_edit = false;
5056 }
5057 ChangeTag::Insert => {
5058 if moved_since_edit {
5059 let anchor = snapshot.anchor_after(offset);
5060 edits.push((anchor.clone()..anchor, value.to_string()));
5061 } else {
5062 edits.last_mut().unwrap().1.push_str(value);
5063 }
5064 moved_since_edit = false;
5065 }
5066 }
5067 }
5068 } else if range.end == range.start {
5069 let anchor = snapshot.anchor_after(range.start);
5070 edits.push((anchor.clone()..anchor, new_text));
5071 } else {
5072 let edit_start = snapshot.anchor_after(range.start);
5073 let edit_end = snapshot.anchor_before(range.end);
5074 edits.push((edit_start..edit_end, new_text));
5075 }
5076 }
5077
5078 Ok(edits)
5079 })
5080 }
5081
5082 fn buffer_snapshot_for_lsp_version(
5083 &mut self,
5084 buffer: &ModelHandle<Buffer>,
5085 version: Option<i32>,
5086 cx: &AppContext,
5087 ) -> Result<TextBufferSnapshot> {
5088 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5089
5090 if let Some(version) = version {
5091 let buffer_id = buffer.read(cx).remote_id();
5092 let snapshots = self
5093 .buffer_snapshots
5094 .get_mut(&buffer_id)
5095 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5096 let mut found_snapshot = None;
5097 snapshots.retain(|(snapshot_version, snapshot)| {
5098 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5099 false
5100 } else {
5101 if *snapshot_version == version {
5102 found_snapshot = Some(snapshot.clone());
5103 }
5104 true
5105 }
5106 });
5107
5108 found_snapshot.ok_or_else(|| {
5109 anyhow!(
5110 "snapshot not found for buffer {} at version {}",
5111 buffer_id,
5112 version
5113 )
5114 })
5115 } else {
5116 Ok((buffer.read(cx)).text_snapshot())
5117 }
5118 }
5119
5120 fn language_server_for_buffer(
5121 &self,
5122 buffer: &Buffer,
5123 cx: &AppContext,
5124 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5125 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5126 let worktree_id = file.worktree_id(cx);
5127 self.language_servers
5128 .get(&(worktree_id, language.lsp_adapter()?.name()))
5129 } else {
5130 None
5131 }
5132 }
5133}
5134
5135impl WorktreeHandle {
5136 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5137 match self {
5138 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5139 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5140 }
5141 }
5142}
5143
5144impl OpenBuffer {
5145 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5146 match self {
5147 OpenBuffer::Strong(handle) => Some(handle.clone()),
5148 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5149 OpenBuffer::Loading(_) => None,
5150 }
5151 }
5152}
5153
5154struct CandidateSet {
5155 snapshot: Snapshot,
5156 include_ignored: bool,
5157 include_root_name: bool,
5158}
5159
5160impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5161 type Candidates = CandidateSetIter<'a>;
5162
5163 fn id(&self) -> usize {
5164 self.snapshot.id().to_usize()
5165 }
5166
5167 fn len(&self) -> usize {
5168 if self.include_ignored {
5169 self.snapshot.file_count()
5170 } else {
5171 self.snapshot.visible_file_count()
5172 }
5173 }
5174
5175 fn prefix(&self) -> Arc<str> {
5176 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5177 self.snapshot.root_name().into()
5178 } else if self.include_root_name {
5179 format!("{}/", self.snapshot.root_name()).into()
5180 } else {
5181 "".into()
5182 }
5183 }
5184
5185 fn candidates(&'a self, start: usize) -> Self::Candidates {
5186 CandidateSetIter {
5187 traversal: self.snapshot.files(self.include_ignored, start),
5188 }
5189 }
5190}
5191
5192struct CandidateSetIter<'a> {
5193 traversal: Traversal<'a>,
5194}
5195
5196impl<'a> Iterator for CandidateSetIter<'a> {
5197 type Item = PathMatchCandidate<'a>;
5198
5199 fn next(&mut self) -> Option<Self::Item> {
5200 self.traversal.next().map(|entry| {
5201 if let EntryKind::File(char_bag) = entry.kind {
5202 PathMatchCandidate {
5203 path: &entry.path,
5204 char_bag,
5205 }
5206 } else {
5207 unreachable!()
5208 }
5209 })
5210 }
5211}
5212
5213impl Entity for Project {
5214 type Event = Event;
5215
5216 fn release(&mut self, _: &mut gpui::MutableAppContext) {
5217 match &self.client_state {
5218 ProjectClientState::Local { remote_id_rx, .. } => {
5219 if let Some(project_id) = *remote_id_rx.borrow() {
5220 self.client
5221 .send(proto::UnregisterProject { project_id })
5222 .log_err();
5223 }
5224 }
5225 ProjectClientState::Remote { remote_id, .. } => {
5226 self.client
5227 .send(proto::LeaveProject {
5228 project_id: *remote_id,
5229 })
5230 .log_err();
5231 }
5232 }
5233 }
5234
5235 fn app_will_quit(
5236 &mut self,
5237 _: &mut MutableAppContext,
5238 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5239 let shutdown_futures = self
5240 .language_servers
5241 .drain()
5242 .filter_map(|(_, (_, server))| server.shutdown())
5243 .collect::<Vec<_>>();
5244 Some(
5245 async move {
5246 futures::future::join_all(shutdown_futures).await;
5247 }
5248 .boxed(),
5249 )
5250 }
5251}
5252
5253impl Collaborator {
5254 fn from_proto(
5255 message: proto::Collaborator,
5256 user_store: &ModelHandle<UserStore>,
5257 cx: &mut AsyncAppContext,
5258 ) -> impl Future<Output = Result<Self>> {
5259 let user = user_store.update(cx, |user_store, cx| {
5260 user_store.fetch_user(message.user_id, cx)
5261 });
5262
5263 async move {
5264 Ok(Self {
5265 peer_id: PeerId(message.peer_id),
5266 user: user.await?,
5267 replica_id: message.replica_id as ReplicaId,
5268 })
5269 }
5270 }
5271}
5272
5273impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5274 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5275 Self {
5276 worktree_id,
5277 path: path.as_ref().into(),
5278 }
5279 }
5280}
5281
5282impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5283 fn from(options: lsp::CreateFileOptions) -> Self {
5284 Self {
5285 overwrite: options.overwrite.unwrap_or(false),
5286 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5287 }
5288 }
5289}
5290
5291impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5292 fn from(options: lsp::RenameFileOptions) -> Self {
5293 Self {
5294 overwrite: options.overwrite.unwrap_or(false),
5295 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5296 }
5297 }
5298}
5299
5300impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5301 fn from(options: lsp::DeleteFileOptions) -> Self {
5302 Self {
5303 recursive: options.recursive.unwrap_or(false),
5304 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5305 }
5306 }
5307}
5308
5309fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5310 proto::Symbol {
5311 source_worktree_id: symbol.source_worktree_id.to_proto(),
5312 worktree_id: symbol.worktree_id.to_proto(),
5313 language_server_name: symbol.language_server_name.0.to_string(),
5314 name: symbol.name.clone(),
5315 kind: unsafe { mem::transmute(symbol.kind) },
5316 path: symbol.path.to_string_lossy().to_string(),
5317 start: Some(proto::Point {
5318 row: symbol.range.start.row,
5319 column: symbol.range.start.column,
5320 }),
5321 end: Some(proto::Point {
5322 row: symbol.range.end.row,
5323 column: symbol.range.end.column,
5324 }),
5325 signature: symbol.signature.to_vec(),
5326 }
5327}
5328
5329fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5330 let mut path_components = path.components();
5331 let mut base_components = base.components();
5332 let mut components: Vec<Component> = Vec::new();
5333 loop {
5334 match (path_components.next(), base_components.next()) {
5335 (None, None) => break,
5336 (Some(a), None) => {
5337 components.push(a);
5338 components.extend(path_components.by_ref());
5339 break;
5340 }
5341 (None, _) => components.push(Component::ParentDir),
5342 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5343 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5344 (Some(a), Some(_)) => {
5345 components.push(Component::ParentDir);
5346 for _ in base_components {
5347 components.push(Component::ParentDir);
5348 }
5349 components.push(a);
5350 components.extend(path_components.by_ref());
5351 break;
5352 }
5353 }
5354 }
5355 components.iter().map(|c| c.as_os_str()).collect()
5356}
5357
5358impl Item for Buffer {
5359 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5360 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5361 }
5362}
5363
5364#[cfg(test)]
5365mod tests {
5366 use crate::worktree::WorktreeHandle;
5367
5368 use super::{Event, *};
5369 use fs::RealFs;
5370 use futures::{future, StreamExt};
5371 use gpui::test::subscribe;
5372 use language::{
5373 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5374 OffsetRangeExt, Point, ToPoint,
5375 };
5376 use lsp::Url;
5377 use serde_json::json;
5378 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5379 use unindent::Unindent as _;
5380 use util::{assert_set_eq, test::temp_tree};
5381
5382 #[gpui::test]
5383 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5384 let dir = temp_tree(json!({
5385 "root": {
5386 "apple": "",
5387 "banana": {
5388 "carrot": {
5389 "date": "",
5390 "endive": "",
5391 }
5392 },
5393 "fennel": {
5394 "grape": "",
5395 }
5396 }
5397 }));
5398
5399 let root_link_path = dir.path().join("root_link");
5400 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5401 unix::fs::symlink(
5402 &dir.path().join("root/fennel"),
5403 &dir.path().join("root/finnochio"),
5404 )
5405 .unwrap();
5406
5407 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5408
5409 project.read_with(cx, |project, cx| {
5410 let tree = project.worktrees(cx).next().unwrap().read(cx);
5411 assert_eq!(tree.file_count(), 5);
5412 assert_eq!(
5413 tree.inode_for_path("fennel/grape"),
5414 tree.inode_for_path("finnochio/grape")
5415 );
5416 });
5417
5418 let cancel_flag = Default::default();
5419 let results = project
5420 .read_with(cx, |project, cx| {
5421 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5422 })
5423 .await;
5424 assert_eq!(
5425 results
5426 .into_iter()
5427 .map(|result| result.path)
5428 .collect::<Vec<Arc<Path>>>(),
5429 vec![
5430 PathBuf::from("banana/carrot/date").into(),
5431 PathBuf::from("banana/carrot/endive").into(),
5432 ]
5433 );
5434 }
5435
5436 #[gpui::test]
5437 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5438 cx.foreground().forbid_parking();
5439
5440 let mut rust_language = Language::new(
5441 LanguageConfig {
5442 name: "Rust".into(),
5443 path_suffixes: vec!["rs".to_string()],
5444 ..Default::default()
5445 },
5446 Some(tree_sitter_rust::language()),
5447 );
5448 let mut json_language = Language::new(
5449 LanguageConfig {
5450 name: "JSON".into(),
5451 path_suffixes: vec!["json".to_string()],
5452 ..Default::default()
5453 },
5454 None,
5455 );
5456 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5457 name: "the-rust-language-server",
5458 capabilities: lsp::ServerCapabilities {
5459 completion_provider: Some(lsp::CompletionOptions {
5460 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5461 ..Default::default()
5462 }),
5463 ..Default::default()
5464 },
5465 ..Default::default()
5466 });
5467 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5468 name: "the-json-language-server",
5469 capabilities: lsp::ServerCapabilities {
5470 completion_provider: Some(lsp::CompletionOptions {
5471 trigger_characters: Some(vec![":".to_string()]),
5472 ..Default::default()
5473 }),
5474 ..Default::default()
5475 },
5476 ..Default::default()
5477 });
5478
5479 let fs = FakeFs::new(cx.background());
5480 fs.insert_tree(
5481 "/the-root",
5482 json!({
5483 "test.rs": "const A: i32 = 1;",
5484 "test2.rs": "",
5485 "Cargo.toml": "a = 1",
5486 "package.json": "{\"a\": 1}",
5487 }),
5488 )
5489 .await;
5490
5491 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5492 project.update(cx, |project, _| {
5493 project.languages.add(Arc::new(rust_language));
5494 project.languages.add(Arc::new(json_language));
5495 });
5496
5497 // Open a buffer without an associated language server.
5498 let toml_buffer = project
5499 .update(cx, |project, cx| {
5500 project.open_local_buffer("/the-root/Cargo.toml", cx)
5501 })
5502 .await
5503 .unwrap();
5504
5505 // Open a buffer with an associated language server.
5506 let rust_buffer = project
5507 .update(cx, |project, cx| {
5508 project.open_local_buffer("/the-root/test.rs", cx)
5509 })
5510 .await
5511 .unwrap();
5512
5513 // A server is started up, and it is notified about Rust files.
5514 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5515 assert_eq!(
5516 fake_rust_server
5517 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5518 .await
5519 .text_document,
5520 lsp::TextDocumentItem {
5521 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5522 version: 0,
5523 text: "const A: i32 = 1;".to_string(),
5524 language_id: Default::default()
5525 }
5526 );
5527
5528 // The buffer is configured based on the language server's capabilities.
5529 rust_buffer.read_with(cx, |buffer, _| {
5530 assert_eq!(
5531 buffer.completion_triggers(),
5532 &[".".to_string(), "::".to_string()]
5533 );
5534 });
5535 toml_buffer.read_with(cx, |buffer, _| {
5536 assert!(buffer.completion_triggers().is_empty());
5537 });
5538
5539 // Edit a buffer. The changes are reported to the language server.
5540 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5541 assert_eq!(
5542 fake_rust_server
5543 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5544 .await
5545 .text_document,
5546 lsp::VersionedTextDocumentIdentifier::new(
5547 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5548 1
5549 )
5550 );
5551
5552 // Open a third buffer with a different associated language server.
5553 let json_buffer = project
5554 .update(cx, |project, cx| {
5555 project.open_local_buffer("/the-root/package.json", cx)
5556 })
5557 .await
5558 .unwrap();
5559
5560 // A json language server is started up and is only notified about the json buffer.
5561 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5562 assert_eq!(
5563 fake_json_server
5564 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5565 .await
5566 .text_document,
5567 lsp::TextDocumentItem {
5568 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5569 version: 0,
5570 text: "{\"a\": 1}".to_string(),
5571 language_id: Default::default()
5572 }
5573 );
5574
5575 // This buffer is configured based on the second language server's
5576 // capabilities.
5577 json_buffer.read_with(cx, |buffer, _| {
5578 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5579 });
5580
5581 // When opening another buffer whose language server is already running,
5582 // it is also configured based on the existing language server's capabilities.
5583 let rust_buffer2 = project
5584 .update(cx, |project, cx| {
5585 project.open_local_buffer("/the-root/test2.rs", cx)
5586 })
5587 .await
5588 .unwrap();
5589 rust_buffer2.read_with(cx, |buffer, _| {
5590 assert_eq!(
5591 buffer.completion_triggers(),
5592 &[".".to_string(), "::".to_string()]
5593 );
5594 });
5595
5596 // Changes are reported only to servers matching the buffer's language.
5597 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5598 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5599 assert_eq!(
5600 fake_rust_server
5601 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5602 .await
5603 .text_document,
5604 lsp::VersionedTextDocumentIdentifier::new(
5605 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5606 1
5607 )
5608 );
5609
5610 // Save notifications are reported to all servers.
5611 toml_buffer
5612 .update(cx, |buffer, cx| buffer.save(cx))
5613 .await
5614 .unwrap();
5615 assert_eq!(
5616 fake_rust_server
5617 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5618 .await
5619 .text_document,
5620 lsp::TextDocumentIdentifier::new(
5621 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5622 )
5623 );
5624 assert_eq!(
5625 fake_json_server
5626 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5627 .await
5628 .text_document,
5629 lsp::TextDocumentIdentifier::new(
5630 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5631 )
5632 );
5633
5634 // Renames are reported only to servers matching the buffer's language.
5635 fs.rename(
5636 Path::new("/the-root/test2.rs"),
5637 Path::new("/the-root/test3.rs"),
5638 Default::default(),
5639 )
5640 .await
5641 .unwrap();
5642 assert_eq!(
5643 fake_rust_server
5644 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5645 .await
5646 .text_document,
5647 lsp::TextDocumentIdentifier::new(
5648 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5649 ),
5650 );
5651 assert_eq!(
5652 fake_rust_server
5653 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5654 .await
5655 .text_document,
5656 lsp::TextDocumentItem {
5657 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5658 version: 0,
5659 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5660 language_id: Default::default()
5661 },
5662 );
5663
5664 rust_buffer2.update(cx, |buffer, cx| {
5665 buffer.update_diagnostics(
5666 DiagnosticSet::from_sorted_entries(
5667 vec![DiagnosticEntry {
5668 diagnostic: Default::default(),
5669 range: Anchor::MIN..Anchor::MAX,
5670 }],
5671 &buffer.snapshot(),
5672 ),
5673 cx,
5674 );
5675 assert_eq!(
5676 buffer
5677 .snapshot()
5678 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5679 .count(),
5680 1
5681 );
5682 });
5683
5684 // When the rename changes the extension of the file, the buffer gets closed on the old
5685 // language server and gets opened on the new one.
5686 fs.rename(
5687 Path::new("/the-root/test3.rs"),
5688 Path::new("/the-root/test3.json"),
5689 Default::default(),
5690 )
5691 .await
5692 .unwrap();
5693 assert_eq!(
5694 fake_rust_server
5695 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5696 .await
5697 .text_document,
5698 lsp::TextDocumentIdentifier::new(
5699 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5700 ),
5701 );
5702 assert_eq!(
5703 fake_json_server
5704 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5705 .await
5706 .text_document,
5707 lsp::TextDocumentItem {
5708 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5709 version: 0,
5710 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5711 language_id: Default::default()
5712 },
5713 );
5714
5715 // We clear the diagnostics, since the language has changed.
5716 rust_buffer2.read_with(cx, |buffer, _| {
5717 assert_eq!(
5718 buffer
5719 .snapshot()
5720 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5721 .count(),
5722 0
5723 );
5724 });
5725
5726 // The renamed file's version resets after changing language server.
5727 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5728 assert_eq!(
5729 fake_json_server
5730 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5731 .await
5732 .text_document,
5733 lsp::VersionedTextDocumentIdentifier::new(
5734 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5735 1
5736 )
5737 );
5738
5739 // Restart language servers
5740 project.update(cx, |project, cx| {
5741 project.restart_language_servers_for_buffers(
5742 vec![rust_buffer.clone(), json_buffer.clone()],
5743 cx,
5744 );
5745 });
5746
5747 let mut rust_shutdown_requests = fake_rust_server
5748 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5749 let mut json_shutdown_requests = fake_json_server
5750 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5751 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5752
5753 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5754 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5755
5756 // Ensure rust document is reopened in new rust language server
5757 assert_eq!(
5758 fake_rust_server
5759 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5760 .await
5761 .text_document,
5762 lsp::TextDocumentItem {
5763 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5764 version: 1,
5765 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5766 language_id: Default::default()
5767 }
5768 );
5769
5770 // Ensure json documents are reopened in new json language server
5771 assert_set_eq!(
5772 [
5773 fake_json_server
5774 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5775 .await
5776 .text_document,
5777 fake_json_server
5778 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5779 .await
5780 .text_document,
5781 ],
5782 [
5783 lsp::TextDocumentItem {
5784 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5785 version: 0,
5786 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5787 language_id: Default::default()
5788 },
5789 lsp::TextDocumentItem {
5790 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5791 version: 1,
5792 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5793 language_id: Default::default()
5794 }
5795 ]
5796 );
5797
5798 // Close notifications are reported only to servers matching the buffer's language.
5799 cx.update(|_| drop(json_buffer));
5800 let close_message = lsp::DidCloseTextDocumentParams {
5801 text_document: lsp::TextDocumentIdentifier::new(
5802 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5803 ),
5804 };
5805 assert_eq!(
5806 fake_json_server
5807 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5808 .await,
5809 close_message,
5810 );
5811 }
5812
5813 #[gpui::test]
5814 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5815 cx.foreground().forbid_parking();
5816
5817 let fs = FakeFs::new(cx.background());
5818 fs.insert_tree(
5819 "/dir",
5820 json!({
5821 "a.rs": "let a = 1;",
5822 "b.rs": "let b = 2;"
5823 }),
5824 )
5825 .await;
5826
5827 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
5828
5829 let buffer_a = project
5830 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5831 .await
5832 .unwrap();
5833 let buffer_b = project
5834 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5835 .await
5836 .unwrap();
5837
5838 project.update(cx, |project, cx| {
5839 project
5840 .update_diagnostics(
5841 lsp::PublishDiagnosticsParams {
5842 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5843 version: None,
5844 diagnostics: vec![lsp::Diagnostic {
5845 range: lsp::Range::new(
5846 lsp::Position::new(0, 4),
5847 lsp::Position::new(0, 5),
5848 ),
5849 severity: Some(lsp::DiagnosticSeverity::ERROR),
5850 message: "error 1".to_string(),
5851 ..Default::default()
5852 }],
5853 },
5854 &[],
5855 cx,
5856 )
5857 .unwrap();
5858 project
5859 .update_diagnostics(
5860 lsp::PublishDiagnosticsParams {
5861 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5862 version: None,
5863 diagnostics: vec![lsp::Diagnostic {
5864 range: lsp::Range::new(
5865 lsp::Position::new(0, 4),
5866 lsp::Position::new(0, 5),
5867 ),
5868 severity: Some(lsp::DiagnosticSeverity::WARNING),
5869 message: "error 2".to_string(),
5870 ..Default::default()
5871 }],
5872 },
5873 &[],
5874 cx,
5875 )
5876 .unwrap();
5877 });
5878
5879 buffer_a.read_with(cx, |buffer, _| {
5880 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5881 assert_eq!(
5882 chunks
5883 .iter()
5884 .map(|(s, d)| (s.as_str(), *d))
5885 .collect::<Vec<_>>(),
5886 &[
5887 ("let ", None),
5888 ("a", Some(DiagnosticSeverity::ERROR)),
5889 (" = 1;", None),
5890 ]
5891 );
5892 });
5893 buffer_b.read_with(cx, |buffer, _| {
5894 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5895 assert_eq!(
5896 chunks
5897 .iter()
5898 .map(|(s, d)| (s.as_str(), *d))
5899 .collect::<Vec<_>>(),
5900 &[
5901 ("let ", None),
5902 ("b", Some(DiagnosticSeverity::WARNING)),
5903 (" = 2;", None),
5904 ]
5905 );
5906 });
5907 }
5908
5909 #[gpui::test]
5910 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5911 cx.foreground().forbid_parking();
5912
5913 let progress_token = "the-progress-token";
5914 let mut language = Language::new(
5915 LanguageConfig {
5916 name: "Rust".into(),
5917 path_suffixes: vec!["rs".to_string()],
5918 ..Default::default()
5919 },
5920 Some(tree_sitter_rust::language()),
5921 );
5922 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5923 disk_based_diagnostics_progress_token: Some(progress_token),
5924 disk_based_diagnostics_sources: &["disk"],
5925 ..Default::default()
5926 });
5927
5928 let fs = FakeFs::new(cx.background());
5929 fs.insert_tree(
5930 "/dir",
5931 json!({
5932 "a.rs": "fn a() { A }",
5933 "b.rs": "const y: i32 = 1",
5934 }),
5935 )
5936 .await;
5937
5938 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5939 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5940 let worktree_id =
5941 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5942
5943 // Cause worktree to start the fake language server
5944 let _buffer = project
5945 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5946 .await
5947 .unwrap();
5948
5949 let mut events = subscribe(&project, cx);
5950
5951 let mut fake_server = fake_servers.next().await.unwrap();
5952 fake_server.start_progress(progress_token).await;
5953 assert_eq!(
5954 events.next().await.unwrap(),
5955 Event::DiskBasedDiagnosticsStarted
5956 );
5957
5958 fake_server.start_progress(progress_token).await;
5959 fake_server.end_progress(progress_token).await;
5960 fake_server.start_progress(progress_token).await;
5961
5962 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5963 lsp::PublishDiagnosticsParams {
5964 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5965 version: None,
5966 diagnostics: vec![lsp::Diagnostic {
5967 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5968 severity: Some(lsp::DiagnosticSeverity::ERROR),
5969 message: "undefined variable 'A'".to_string(),
5970 ..Default::default()
5971 }],
5972 },
5973 );
5974 assert_eq!(
5975 events.next().await.unwrap(),
5976 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5977 );
5978
5979 fake_server.end_progress(progress_token).await;
5980 fake_server.end_progress(progress_token).await;
5981 assert_eq!(
5982 events.next().await.unwrap(),
5983 Event::DiskBasedDiagnosticsUpdated
5984 );
5985 assert_eq!(
5986 events.next().await.unwrap(),
5987 Event::DiskBasedDiagnosticsFinished
5988 );
5989
5990 let buffer = project
5991 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5992 .await
5993 .unwrap();
5994
5995 buffer.read_with(cx, |buffer, _| {
5996 let snapshot = buffer.snapshot();
5997 let diagnostics = snapshot
5998 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5999 .collect::<Vec<_>>();
6000 assert_eq!(
6001 diagnostics,
6002 &[DiagnosticEntry {
6003 range: Point::new(0, 9)..Point::new(0, 10),
6004 diagnostic: Diagnostic {
6005 severity: lsp::DiagnosticSeverity::ERROR,
6006 message: "undefined variable 'A'".to_string(),
6007 group_id: 0,
6008 is_primary: true,
6009 ..Default::default()
6010 }
6011 }]
6012 )
6013 });
6014
6015 // Ensure publishing empty diagnostics twice only results in one update event.
6016 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6017 lsp::PublishDiagnosticsParams {
6018 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6019 version: None,
6020 diagnostics: Default::default(),
6021 },
6022 );
6023 assert_eq!(
6024 events.next().await.unwrap(),
6025 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6026 );
6027
6028 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6029 lsp::PublishDiagnosticsParams {
6030 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6031 version: None,
6032 diagnostics: Default::default(),
6033 },
6034 );
6035 cx.foreground().run_until_parked();
6036 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6037 }
6038
6039 #[gpui::test]
6040 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6041 cx.foreground().forbid_parking();
6042
6043 let progress_token = "the-progress-token";
6044 let mut language = Language::new(
6045 LanguageConfig {
6046 path_suffixes: vec!["rs".to_string()],
6047 ..Default::default()
6048 },
6049 None,
6050 );
6051 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6052 disk_based_diagnostics_sources: &["disk"],
6053 disk_based_diagnostics_progress_token: Some(progress_token),
6054 ..Default::default()
6055 });
6056
6057 let fs = FakeFs::new(cx.background());
6058 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6059
6060 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6061 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6062
6063 let buffer = project
6064 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6065 .await
6066 .unwrap();
6067
6068 // Simulate diagnostics starting to update.
6069 let mut fake_server = fake_servers.next().await.unwrap();
6070 fake_server.start_progress(progress_token).await;
6071
6072 // Restart the server before the diagnostics finish updating.
6073 project.update(cx, |project, cx| {
6074 project.restart_language_servers_for_buffers([buffer], cx);
6075 });
6076 let mut events = subscribe(&project, cx);
6077
6078 // Simulate the newly started server sending more diagnostics.
6079 let mut fake_server = fake_servers.next().await.unwrap();
6080 fake_server.start_progress(progress_token).await;
6081 assert_eq!(
6082 events.next().await.unwrap(),
6083 Event::DiskBasedDiagnosticsStarted
6084 );
6085
6086 // All diagnostics are considered done, despite the old server's diagnostic
6087 // task never completing.
6088 fake_server.end_progress(progress_token).await;
6089 assert_eq!(
6090 events.next().await.unwrap(),
6091 Event::DiskBasedDiagnosticsUpdated
6092 );
6093 assert_eq!(
6094 events.next().await.unwrap(),
6095 Event::DiskBasedDiagnosticsFinished
6096 );
6097 project.read_with(cx, |project, _| {
6098 assert!(!project.is_running_disk_based_diagnostics());
6099 });
6100 }
6101
6102 #[gpui::test]
6103 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6104 cx.foreground().forbid_parking();
6105
6106 let mut language = Language::new(
6107 LanguageConfig {
6108 name: "Rust".into(),
6109 path_suffixes: vec!["rs".to_string()],
6110 ..Default::default()
6111 },
6112 Some(tree_sitter_rust::language()),
6113 );
6114 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6115 disk_based_diagnostics_sources: &["disk"],
6116 ..Default::default()
6117 });
6118
6119 let text = "
6120 fn a() { A }
6121 fn b() { BB }
6122 fn c() { CCC }
6123 "
6124 .unindent();
6125
6126 let fs = FakeFs::new(cx.background());
6127 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6128
6129 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6130 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6131
6132 let buffer = project
6133 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6134 .await
6135 .unwrap();
6136
6137 let mut fake_server = fake_servers.next().await.unwrap();
6138 let open_notification = fake_server
6139 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6140 .await;
6141
6142 // Edit the buffer, moving the content down
6143 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6144 let change_notification_1 = fake_server
6145 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6146 .await;
6147 assert!(
6148 change_notification_1.text_document.version > open_notification.text_document.version
6149 );
6150
6151 // Report some diagnostics for the initial version of the buffer
6152 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6153 lsp::PublishDiagnosticsParams {
6154 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6155 version: Some(open_notification.text_document.version),
6156 diagnostics: vec![
6157 lsp::Diagnostic {
6158 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6159 severity: Some(DiagnosticSeverity::ERROR),
6160 message: "undefined variable 'A'".to_string(),
6161 source: Some("disk".to_string()),
6162 ..Default::default()
6163 },
6164 lsp::Diagnostic {
6165 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6166 severity: Some(DiagnosticSeverity::ERROR),
6167 message: "undefined variable 'BB'".to_string(),
6168 source: Some("disk".to_string()),
6169 ..Default::default()
6170 },
6171 lsp::Diagnostic {
6172 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6173 severity: Some(DiagnosticSeverity::ERROR),
6174 source: Some("disk".to_string()),
6175 message: "undefined variable 'CCC'".to_string(),
6176 ..Default::default()
6177 },
6178 ],
6179 },
6180 );
6181
6182 // The diagnostics have moved down since they were created.
6183 buffer.next_notification(cx).await;
6184 buffer.read_with(cx, |buffer, _| {
6185 assert_eq!(
6186 buffer
6187 .snapshot()
6188 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6189 .collect::<Vec<_>>(),
6190 &[
6191 DiagnosticEntry {
6192 range: Point::new(3, 9)..Point::new(3, 11),
6193 diagnostic: Diagnostic {
6194 severity: DiagnosticSeverity::ERROR,
6195 message: "undefined variable 'BB'".to_string(),
6196 is_disk_based: true,
6197 group_id: 1,
6198 is_primary: true,
6199 ..Default::default()
6200 },
6201 },
6202 DiagnosticEntry {
6203 range: Point::new(4, 9)..Point::new(4, 12),
6204 diagnostic: Diagnostic {
6205 severity: DiagnosticSeverity::ERROR,
6206 message: "undefined variable 'CCC'".to_string(),
6207 is_disk_based: true,
6208 group_id: 2,
6209 is_primary: true,
6210 ..Default::default()
6211 }
6212 }
6213 ]
6214 );
6215 assert_eq!(
6216 chunks_with_diagnostics(buffer, 0..buffer.len()),
6217 [
6218 ("\n\nfn a() { ".to_string(), None),
6219 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6220 (" }\nfn b() { ".to_string(), None),
6221 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6222 (" }\nfn c() { ".to_string(), None),
6223 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6224 (" }\n".to_string(), None),
6225 ]
6226 );
6227 assert_eq!(
6228 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6229 [
6230 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6231 (" }\nfn c() { ".to_string(), None),
6232 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6233 ]
6234 );
6235 });
6236
6237 // Ensure overlapping diagnostics are highlighted correctly.
6238 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6239 lsp::PublishDiagnosticsParams {
6240 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6241 version: Some(open_notification.text_document.version),
6242 diagnostics: vec![
6243 lsp::Diagnostic {
6244 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6245 severity: Some(DiagnosticSeverity::ERROR),
6246 message: "undefined variable 'A'".to_string(),
6247 source: Some("disk".to_string()),
6248 ..Default::default()
6249 },
6250 lsp::Diagnostic {
6251 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6252 severity: Some(DiagnosticSeverity::WARNING),
6253 message: "unreachable statement".to_string(),
6254 source: Some("disk".to_string()),
6255 ..Default::default()
6256 },
6257 ],
6258 },
6259 );
6260
6261 buffer.next_notification(cx).await;
6262 buffer.read_with(cx, |buffer, _| {
6263 assert_eq!(
6264 buffer
6265 .snapshot()
6266 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6267 .collect::<Vec<_>>(),
6268 &[
6269 DiagnosticEntry {
6270 range: Point::new(2, 9)..Point::new(2, 12),
6271 diagnostic: Diagnostic {
6272 severity: DiagnosticSeverity::WARNING,
6273 message: "unreachable statement".to_string(),
6274 is_disk_based: true,
6275 group_id: 4,
6276 is_primary: true,
6277 ..Default::default()
6278 }
6279 },
6280 DiagnosticEntry {
6281 range: Point::new(2, 9)..Point::new(2, 10),
6282 diagnostic: Diagnostic {
6283 severity: DiagnosticSeverity::ERROR,
6284 message: "undefined variable 'A'".to_string(),
6285 is_disk_based: true,
6286 group_id: 3,
6287 is_primary: true,
6288 ..Default::default()
6289 },
6290 }
6291 ]
6292 );
6293 assert_eq!(
6294 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6295 [
6296 ("fn a() { ".to_string(), None),
6297 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6298 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6299 ("\n".to_string(), None),
6300 ]
6301 );
6302 assert_eq!(
6303 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6304 [
6305 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6306 ("\n".to_string(), None),
6307 ]
6308 );
6309 });
6310
6311 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6312 // changes since the last save.
6313 buffer.update(cx, |buffer, cx| {
6314 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6315 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6316 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6317 });
6318 let change_notification_2 = fake_server
6319 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6320 .await;
6321 assert!(
6322 change_notification_2.text_document.version
6323 > change_notification_1.text_document.version
6324 );
6325
6326 // Handle out-of-order diagnostics
6327 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6328 lsp::PublishDiagnosticsParams {
6329 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6330 version: Some(change_notification_2.text_document.version),
6331 diagnostics: vec![
6332 lsp::Diagnostic {
6333 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6334 severity: Some(DiagnosticSeverity::ERROR),
6335 message: "undefined variable 'BB'".to_string(),
6336 source: Some("disk".to_string()),
6337 ..Default::default()
6338 },
6339 lsp::Diagnostic {
6340 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6341 severity: Some(DiagnosticSeverity::WARNING),
6342 message: "undefined variable 'A'".to_string(),
6343 source: Some("disk".to_string()),
6344 ..Default::default()
6345 },
6346 ],
6347 },
6348 );
6349
6350 buffer.next_notification(cx).await;
6351 buffer.read_with(cx, |buffer, _| {
6352 assert_eq!(
6353 buffer
6354 .snapshot()
6355 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6356 .collect::<Vec<_>>(),
6357 &[
6358 DiagnosticEntry {
6359 range: Point::new(2, 21)..Point::new(2, 22),
6360 diagnostic: Diagnostic {
6361 severity: DiagnosticSeverity::WARNING,
6362 message: "undefined variable 'A'".to_string(),
6363 is_disk_based: true,
6364 group_id: 6,
6365 is_primary: true,
6366 ..Default::default()
6367 }
6368 },
6369 DiagnosticEntry {
6370 range: Point::new(3, 9)..Point::new(3, 14),
6371 diagnostic: Diagnostic {
6372 severity: DiagnosticSeverity::ERROR,
6373 message: "undefined variable 'BB'".to_string(),
6374 is_disk_based: true,
6375 group_id: 5,
6376 is_primary: true,
6377 ..Default::default()
6378 },
6379 }
6380 ]
6381 );
6382 });
6383 }
6384
6385 #[gpui::test]
6386 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6387 cx.foreground().forbid_parking();
6388
6389 let text = concat!(
6390 "let one = ;\n", //
6391 "let two = \n",
6392 "let three = 3;\n",
6393 );
6394
6395 let fs = FakeFs::new(cx.background());
6396 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6397
6398 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6399 let buffer = project
6400 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6401 .await
6402 .unwrap();
6403
6404 project.update(cx, |project, cx| {
6405 project
6406 .update_buffer_diagnostics(
6407 &buffer,
6408 vec![
6409 DiagnosticEntry {
6410 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6411 diagnostic: Diagnostic {
6412 severity: DiagnosticSeverity::ERROR,
6413 message: "syntax error 1".to_string(),
6414 ..Default::default()
6415 },
6416 },
6417 DiagnosticEntry {
6418 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6419 diagnostic: Diagnostic {
6420 severity: DiagnosticSeverity::ERROR,
6421 message: "syntax error 2".to_string(),
6422 ..Default::default()
6423 },
6424 },
6425 ],
6426 None,
6427 cx,
6428 )
6429 .unwrap();
6430 });
6431
6432 // An empty range is extended forward to include the following character.
6433 // At the end of a line, an empty range is extended backward to include
6434 // the preceding character.
6435 buffer.read_with(cx, |buffer, _| {
6436 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6437 assert_eq!(
6438 chunks
6439 .iter()
6440 .map(|(s, d)| (s.as_str(), *d))
6441 .collect::<Vec<_>>(),
6442 &[
6443 ("let one = ", None),
6444 (";", Some(DiagnosticSeverity::ERROR)),
6445 ("\nlet two =", None),
6446 (" ", Some(DiagnosticSeverity::ERROR)),
6447 ("\nlet three = 3;\n", None)
6448 ]
6449 );
6450 });
6451 }
6452
6453 #[gpui::test]
6454 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6455 cx.foreground().forbid_parking();
6456
6457 let mut language = Language::new(
6458 LanguageConfig {
6459 name: "Rust".into(),
6460 path_suffixes: vec!["rs".to_string()],
6461 ..Default::default()
6462 },
6463 Some(tree_sitter_rust::language()),
6464 );
6465 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6466
6467 let text = "
6468 fn a() {
6469 f1();
6470 }
6471 fn b() {
6472 f2();
6473 }
6474 fn c() {
6475 f3();
6476 }
6477 "
6478 .unindent();
6479
6480 let fs = FakeFs::new(cx.background());
6481 fs.insert_tree(
6482 "/dir",
6483 json!({
6484 "a.rs": text.clone(),
6485 }),
6486 )
6487 .await;
6488
6489 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6490 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6491 let buffer = project
6492 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6493 .await
6494 .unwrap();
6495
6496 let mut fake_server = fake_servers.next().await.unwrap();
6497 let lsp_document_version = fake_server
6498 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6499 .await
6500 .text_document
6501 .version;
6502
6503 // Simulate editing the buffer after the language server computes some edits.
6504 buffer.update(cx, |buffer, cx| {
6505 buffer.edit(
6506 [(
6507 Point::new(0, 0)..Point::new(0, 0),
6508 "// above first function\n",
6509 )],
6510 cx,
6511 );
6512 buffer.edit(
6513 [(
6514 Point::new(2, 0)..Point::new(2, 0),
6515 " // inside first function\n",
6516 )],
6517 cx,
6518 );
6519 buffer.edit(
6520 [(
6521 Point::new(6, 4)..Point::new(6, 4),
6522 "// inside second function ",
6523 )],
6524 cx,
6525 );
6526
6527 assert_eq!(
6528 buffer.text(),
6529 "
6530 // above first function
6531 fn a() {
6532 // inside first function
6533 f1();
6534 }
6535 fn b() {
6536 // inside second function f2();
6537 }
6538 fn c() {
6539 f3();
6540 }
6541 "
6542 .unindent()
6543 );
6544 });
6545
6546 let edits = project
6547 .update(cx, |project, cx| {
6548 project.edits_from_lsp(
6549 &buffer,
6550 vec![
6551 // replace body of first function
6552 lsp::TextEdit {
6553 range: lsp::Range::new(
6554 lsp::Position::new(0, 0),
6555 lsp::Position::new(3, 0),
6556 ),
6557 new_text: "
6558 fn a() {
6559 f10();
6560 }
6561 "
6562 .unindent(),
6563 },
6564 // edit inside second function
6565 lsp::TextEdit {
6566 range: lsp::Range::new(
6567 lsp::Position::new(4, 6),
6568 lsp::Position::new(4, 6),
6569 ),
6570 new_text: "00".into(),
6571 },
6572 // edit inside third function via two distinct edits
6573 lsp::TextEdit {
6574 range: lsp::Range::new(
6575 lsp::Position::new(7, 5),
6576 lsp::Position::new(7, 5),
6577 ),
6578 new_text: "4000".into(),
6579 },
6580 lsp::TextEdit {
6581 range: lsp::Range::new(
6582 lsp::Position::new(7, 5),
6583 lsp::Position::new(7, 6),
6584 ),
6585 new_text: "".into(),
6586 },
6587 ],
6588 Some(lsp_document_version),
6589 cx,
6590 )
6591 })
6592 .await
6593 .unwrap();
6594
6595 buffer.update(cx, |buffer, cx| {
6596 for (range, new_text) in edits {
6597 buffer.edit([(range, new_text)], cx);
6598 }
6599 assert_eq!(
6600 buffer.text(),
6601 "
6602 // above first function
6603 fn a() {
6604 // inside first function
6605 f10();
6606 }
6607 fn b() {
6608 // inside second function f200();
6609 }
6610 fn c() {
6611 f4000();
6612 }
6613 "
6614 .unindent()
6615 );
6616 });
6617 }
6618
6619 #[gpui::test]
6620 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6621 cx.foreground().forbid_parking();
6622
6623 let text = "
6624 use a::b;
6625 use a::c;
6626
6627 fn f() {
6628 b();
6629 c();
6630 }
6631 "
6632 .unindent();
6633
6634 let fs = FakeFs::new(cx.background());
6635 fs.insert_tree(
6636 "/dir",
6637 json!({
6638 "a.rs": text.clone(),
6639 }),
6640 )
6641 .await;
6642
6643 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6644 let buffer = project
6645 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6646 .await
6647 .unwrap();
6648
6649 // Simulate the language server sending us a small edit in the form of a very large diff.
6650 // Rust-analyzer does this when performing a merge-imports code action.
6651 let edits = project
6652 .update(cx, |project, cx| {
6653 project.edits_from_lsp(
6654 &buffer,
6655 [
6656 // Replace the first use statement without editing the semicolon.
6657 lsp::TextEdit {
6658 range: lsp::Range::new(
6659 lsp::Position::new(0, 4),
6660 lsp::Position::new(0, 8),
6661 ),
6662 new_text: "a::{b, c}".into(),
6663 },
6664 // Reinsert the remainder of the file between the semicolon and the final
6665 // newline of the file.
6666 lsp::TextEdit {
6667 range: lsp::Range::new(
6668 lsp::Position::new(0, 9),
6669 lsp::Position::new(0, 9),
6670 ),
6671 new_text: "\n\n".into(),
6672 },
6673 lsp::TextEdit {
6674 range: lsp::Range::new(
6675 lsp::Position::new(0, 9),
6676 lsp::Position::new(0, 9),
6677 ),
6678 new_text: "
6679 fn f() {
6680 b();
6681 c();
6682 }"
6683 .unindent(),
6684 },
6685 // Delete everything after the first newline of the file.
6686 lsp::TextEdit {
6687 range: lsp::Range::new(
6688 lsp::Position::new(1, 0),
6689 lsp::Position::new(7, 0),
6690 ),
6691 new_text: "".into(),
6692 },
6693 ],
6694 None,
6695 cx,
6696 )
6697 })
6698 .await
6699 .unwrap();
6700
6701 buffer.update(cx, |buffer, cx| {
6702 let edits = edits
6703 .into_iter()
6704 .map(|(range, text)| {
6705 (
6706 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6707 text,
6708 )
6709 })
6710 .collect::<Vec<_>>();
6711
6712 assert_eq!(
6713 edits,
6714 [
6715 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6716 (Point::new(1, 0)..Point::new(2, 0), "".into())
6717 ]
6718 );
6719
6720 for (range, new_text) in edits {
6721 buffer.edit([(range, new_text)], cx);
6722 }
6723 assert_eq!(
6724 buffer.text(),
6725 "
6726 use a::{b, c};
6727
6728 fn f() {
6729 b();
6730 c();
6731 }
6732 "
6733 .unindent()
6734 );
6735 });
6736 }
6737
6738 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6739 buffer: &Buffer,
6740 range: Range<T>,
6741 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6742 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6743 for chunk in buffer.snapshot().chunks(range, true) {
6744 if chunks.last().map_or(false, |prev_chunk| {
6745 prev_chunk.1 == chunk.diagnostic_severity
6746 }) {
6747 chunks.last_mut().unwrap().0.push_str(chunk.text);
6748 } else {
6749 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6750 }
6751 }
6752 chunks
6753 }
6754
6755 #[gpui::test]
6756 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6757 let dir = temp_tree(json!({
6758 "root": {
6759 "dir1": {},
6760 "dir2": {
6761 "dir3": {}
6762 }
6763 }
6764 }));
6765
6766 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6767 let cancel_flag = Default::default();
6768 let results = project
6769 .read_with(cx, |project, cx| {
6770 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6771 })
6772 .await;
6773
6774 assert!(results.is_empty());
6775 }
6776
6777 #[gpui::test(iterations = 10)]
6778 async fn test_definition(cx: &mut gpui::TestAppContext) {
6779 let mut language = Language::new(
6780 LanguageConfig {
6781 name: "Rust".into(),
6782 path_suffixes: vec!["rs".to_string()],
6783 ..Default::default()
6784 },
6785 Some(tree_sitter_rust::language()),
6786 );
6787 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6788
6789 let fs = FakeFs::new(cx.background());
6790 fs.insert_tree(
6791 "/dir",
6792 json!({
6793 "a.rs": "const fn a() { A }",
6794 "b.rs": "const y: i32 = crate::a()",
6795 }),
6796 )
6797 .await;
6798
6799 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
6800 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6801
6802 let buffer = project
6803 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6804 .await
6805 .unwrap();
6806
6807 let fake_server = fake_servers.next().await.unwrap();
6808 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6809 let params = params.text_document_position_params;
6810 assert_eq!(
6811 params.text_document.uri.to_file_path().unwrap(),
6812 Path::new("/dir/b.rs"),
6813 );
6814 assert_eq!(params.position, lsp::Position::new(0, 22));
6815
6816 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6817 lsp::Location::new(
6818 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6819 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6820 ),
6821 )))
6822 });
6823
6824 let mut definitions = project
6825 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6826 .await
6827 .unwrap();
6828
6829 assert_eq!(definitions.len(), 1);
6830 let definition = definitions.pop().unwrap();
6831 cx.update(|cx| {
6832 let target_buffer = definition.buffer.read(cx);
6833 assert_eq!(
6834 target_buffer
6835 .file()
6836 .unwrap()
6837 .as_local()
6838 .unwrap()
6839 .abs_path(cx),
6840 Path::new("/dir/a.rs"),
6841 );
6842 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6843 assert_eq!(
6844 list_worktrees(&project, cx),
6845 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6846 );
6847
6848 drop(definition);
6849 });
6850 cx.read(|cx| {
6851 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6852 });
6853
6854 fn list_worktrees<'a>(
6855 project: &'a ModelHandle<Project>,
6856 cx: &'a AppContext,
6857 ) -> Vec<(&'a Path, bool)> {
6858 project
6859 .read(cx)
6860 .worktrees(cx)
6861 .map(|worktree| {
6862 let worktree = worktree.read(cx);
6863 (
6864 worktree.as_local().unwrap().abs_path().as_ref(),
6865 worktree.is_visible(),
6866 )
6867 })
6868 .collect::<Vec<_>>()
6869 }
6870 }
6871
6872 #[gpui::test]
6873 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6874 let mut language = Language::new(
6875 LanguageConfig {
6876 name: "TypeScript".into(),
6877 path_suffixes: vec!["ts".to_string()],
6878 ..Default::default()
6879 },
6880 Some(tree_sitter_typescript::language_typescript()),
6881 );
6882 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6883
6884 let fs = FakeFs::new(cx.background());
6885 fs.insert_tree(
6886 "/dir",
6887 json!({
6888 "a.ts": "",
6889 }),
6890 )
6891 .await;
6892
6893 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6894 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6895 let buffer = project
6896 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6897 .await
6898 .unwrap();
6899
6900 let fake_server = fake_language_servers.next().await.unwrap();
6901
6902 let text = "let a = b.fqn";
6903 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6904 let completions = project.update(cx, |project, cx| {
6905 project.completions(&buffer, text.len(), cx)
6906 });
6907
6908 fake_server
6909 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6910 Ok(Some(lsp::CompletionResponse::Array(vec![
6911 lsp::CompletionItem {
6912 label: "fullyQualifiedName?".into(),
6913 insert_text: Some("fullyQualifiedName".into()),
6914 ..Default::default()
6915 },
6916 ])))
6917 })
6918 .next()
6919 .await;
6920 let completions = completions.await.unwrap();
6921 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6922 assert_eq!(completions.len(), 1);
6923 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6924 assert_eq!(
6925 completions[0].old_range.to_offset(&snapshot),
6926 text.len() - 3..text.len()
6927 );
6928 }
6929
6930 #[gpui::test(iterations = 10)]
6931 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6932 let mut language = Language::new(
6933 LanguageConfig {
6934 name: "TypeScript".into(),
6935 path_suffixes: vec!["ts".to_string()],
6936 ..Default::default()
6937 },
6938 None,
6939 );
6940 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6941
6942 let fs = FakeFs::new(cx.background());
6943 fs.insert_tree(
6944 "/dir",
6945 json!({
6946 "a.ts": "a",
6947 }),
6948 )
6949 .await;
6950
6951 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6952 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6953 let buffer = project
6954 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6955 .await
6956 .unwrap();
6957
6958 let fake_server = fake_language_servers.next().await.unwrap();
6959
6960 // Language server returns code actions that contain commands, and not edits.
6961 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6962 fake_server
6963 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6964 Ok(Some(vec![
6965 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6966 title: "The code action".into(),
6967 command: Some(lsp::Command {
6968 title: "The command".into(),
6969 command: "_the/command".into(),
6970 arguments: Some(vec![json!("the-argument")]),
6971 }),
6972 ..Default::default()
6973 }),
6974 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6975 title: "two".into(),
6976 ..Default::default()
6977 }),
6978 ]))
6979 })
6980 .next()
6981 .await;
6982
6983 let action = actions.await.unwrap()[0].clone();
6984 let apply = project.update(cx, |project, cx| {
6985 project.apply_code_action(buffer.clone(), action, true, cx)
6986 });
6987
6988 // Resolving the code action does not populate its edits. In absence of
6989 // edits, we must execute the given command.
6990 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6991 |action, _| async move { Ok(action) },
6992 );
6993
6994 // While executing the command, the language server sends the editor
6995 // a `workspaceEdit` request.
6996 fake_server
6997 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6998 let fake = fake_server.clone();
6999 move |params, _| {
7000 assert_eq!(params.command, "_the/command");
7001 let fake = fake.clone();
7002 async move {
7003 fake.server
7004 .request::<lsp::request::ApplyWorkspaceEdit>(
7005 lsp::ApplyWorkspaceEditParams {
7006 label: None,
7007 edit: lsp::WorkspaceEdit {
7008 changes: Some(
7009 [(
7010 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7011 vec![lsp::TextEdit {
7012 range: lsp::Range::new(
7013 lsp::Position::new(0, 0),
7014 lsp::Position::new(0, 0),
7015 ),
7016 new_text: "X".into(),
7017 }],
7018 )]
7019 .into_iter()
7020 .collect(),
7021 ),
7022 ..Default::default()
7023 },
7024 },
7025 )
7026 .await
7027 .unwrap();
7028 Ok(Some(json!(null)))
7029 }
7030 }
7031 })
7032 .next()
7033 .await;
7034
7035 // Applying the code action returns a project transaction containing the edits
7036 // sent by the language server in its `workspaceEdit` request.
7037 let transaction = apply.await.unwrap();
7038 assert!(transaction.0.contains_key(&buffer));
7039 buffer.update(cx, |buffer, cx| {
7040 assert_eq!(buffer.text(), "Xa");
7041 buffer.undo(cx);
7042 assert_eq!(buffer.text(), "a");
7043 });
7044 }
7045
7046 #[gpui::test]
7047 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7048 let fs = FakeFs::new(cx.background());
7049 fs.insert_tree(
7050 "/dir",
7051 json!({
7052 "file1": "the old contents",
7053 }),
7054 )
7055 .await;
7056
7057 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7058 let buffer = project
7059 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7060 .await
7061 .unwrap();
7062 buffer
7063 .update(cx, |buffer, cx| {
7064 assert_eq!(buffer.text(), "the old contents");
7065 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7066 buffer.save(cx)
7067 })
7068 .await
7069 .unwrap();
7070
7071 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7072 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7073 }
7074
7075 #[gpui::test]
7076 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7077 let fs = FakeFs::new(cx.background());
7078 fs.insert_tree(
7079 "/dir",
7080 json!({
7081 "file1": "the old contents",
7082 }),
7083 )
7084 .await;
7085
7086 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7087 let buffer = project
7088 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7089 .await
7090 .unwrap();
7091 buffer
7092 .update(cx, |buffer, cx| {
7093 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7094 buffer.save(cx)
7095 })
7096 .await
7097 .unwrap();
7098
7099 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7100 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7101 }
7102
7103 #[gpui::test]
7104 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7105 let fs = FakeFs::new(cx.background());
7106 fs.insert_tree("/dir", json!({})).await;
7107
7108 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7109 let buffer = project.update(cx, |project, cx| {
7110 project.create_buffer("", None, cx).unwrap()
7111 });
7112 buffer.update(cx, |buffer, cx| {
7113 buffer.edit([(0..0, "abc")], cx);
7114 assert!(buffer.is_dirty());
7115 assert!(!buffer.has_conflict());
7116 });
7117 project
7118 .update(cx, |project, cx| {
7119 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7120 })
7121 .await
7122 .unwrap();
7123 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7124 buffer.read_with(cx, |buffer, cx| {
7125 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7126 assert!(!buffer.is_dirty());
7127 assert!(!buffer.has_conflict());
7128 });
7129
7130 let opened_buffer = project
7131 .update(cx, |project, cx| {
7132 project.open_local_buffer("/dir/file1", cx)
7133 })
7134 .await
7135 .unwrap();
7136 assert_eq!(opened_buffer, buffer);
7137 }
7138
7139 #[gpui::test(retries = 5)]
7140 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7141 let dir = temp_tree(json!({
7142 "a": {
7143 "file1": "",
7144 "file2": "",
7145 "file3": "",
7146 },
7147 "b": {
7148 "c": {
7149 "file4": "",
7150 "file5": "",
7151 }
7152 }
7153 }));
7154
7155 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7156 let rpc = project.read_with(cx, |p, _| p.client.clone());
7157
7158 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7159 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7160 async move { buffer.await.unwrap() }
7161 };
7162 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7163 project.read_with(cx, |project, cx| {
7164 let tree = project.worktrees(cx).next().unwrap();
7165 tree.read(cx)
7166 .entry_for_path(path)
7167 .expect(&format!("no entry for path {}", path))
7168 .id
7169 })
7170 };
7171
7172 let buffer2 = buffer_for_path("a/file2", cx).await;
7173 let buffer3 = buffer_for_path("a/file3", cx).await;
7174 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7175 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7176
7177 let file2_id = id_for_path("a/file2", &cx);
7178 let file3_id = id_for_path("a/file3", &cx);
7179 let file4_id = id_for_path("b/c/file4", &cx);
7180
7181 // Create a remote copy of this worktree.
7182 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7183 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7184 let (remote, load_task) = cx.update(|cx| {
7185 Worktree::remote(
7186 1,
7187 1,
7188 initial_snapshot.to_proto(&Default::default(), true),
7189 rpc.clone(),
7190 cx,
7191 )
7192 });
7193 // tree
7194 load_task.await;
7195
7196 cx.read(|cx| {
7197 assert!(!buffer2.read(cx).is_dirty());
7198 assert!(!buffer3.read(cx).is_dirty());
7199 assert!(!buffer4.read(cx).is_dirty());
7200 assert!(!buffer5.read(cx).is_dirty());
7201 });
7202
7203 // Rename and delete files and directories.
7204 tree.flush_fs_events(&cx).await;
7205 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7206 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7207 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7208 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7209 tree.flush_fs_events(&cx).await;
7210
7211 let expected_paths = vec![
7212 "a",
7213 "a/file1",
7214 "a/file2.new",
7215 "b",
7216 "d",
7217 "d/file3",
7218 "d/file4",
7219 ];
7220
7221 cx.read(|app| {
7222 assert_eq!(
7223 tree.read(app)
7224 .paths()
7225 .map(|p| p.to_str().unwrap())
7226 .collect::<Vec<_>>(),
7227 expected_paths
7228 );
7229
7230 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7231 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7232 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7233
7234 assert_eq!(
7235 buffer2.read(app).file().unwrap().path().as_ref(),
7236 Path::new("a/file2.new")
7237 );
7238 assert_eq!(
7239 buffer3.read(app).file().unwrap().path().as_ref(),
7240 Path::new("d/file3")
7241 );
7242 assert_eq!(
7243 buffer4.read(app).file().unwrap().path().as_ref(),
7244 Path::new("d/file4")
7245 );
7246 assert_eq!(
7247 buffer5.read(app).file().unwrap().path().as_ref(),
7248 Path::new("b/c/file5")
7249 );
7250
7251 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7252 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7253 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7254 assert!(buffer5.read(app).file().unwrap().is_deleted());
7255 });
7256
7257 // Update the remote worktree. Check that it becomes consistent with the
7258 // local worktree.
7259 remote.update(cx, |remote, cx| {
7260 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7261 &initial_snapshot,
7262 1,
7263 1,
7264 true,
7265 );
7266 remote
7267 .as_remote_mut()
7268 .unwrap()
7269 .snapshot
7270 .apply_remote_update(update_message)
7271 .unwrap();
7272
7273 assert_eq!(
7274 remote
7275 .paths()
7276 .map(|p| p.to_str().unwrap())
7277 .collect::<Vec<_>>(),
7278 expected_paths
7279 );
7280 });
7281 }
7282
7283 #[gpui::test]
7284 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7285 let fs = FakeFs::new(cx.background());
7286 fs.insert_tree(
7287 "/dir",
7288 json!({
7289 "a.txt": "a-contents",
7290 "b.txt": "b-contents",
7291 }),
7292 )
7293 .await;
7294
7295 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7296
7297 // Spawn multiple tasks to open paths, repeating some paths.
7298 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7299 (
7300 p.open_local_buffer("/dir/a.txt", cx),
7301 p.open_local_buffer("/dir/b.txt", cx),
7302 p.open_local_buffer("/dir/a.txt", cx),
7303 )
7304 });
7305
7306 let buffer_a_1 = buffer_a_1.await.unwrap();
7307 let buffer_a_2 = buffer_a_2.await.unwrap();
7308 let buffer_b = buffer_b.await.unwrap();
7309 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7310 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7311
7312 // There is only one buffer per path.
7313 let buffer_a_id = buffer_a_1.id();
7314 assert_eq!(buffer_a_2.id(), buffer_a_id);
7315
7316 // Open the same path again while it is still open.
7317 drop(buffer_a_1);
7318 let buffer_a_3 = project
7319 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7320 .await
7321 .unwrap();
7322
7323 // There's still only one buffer per path.
7324 assert_eq!(buffer_a_3.id(), buffer_a_id);
7325 }
7326
7327 #[gpui::test]
7328 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7329 let fs = FakeFs::new(cx.background());
7330 fs.insert_tree(
7331 "/dir",
7332 json!({
7333 "file1": "abc",
7334 "file2": "def",
7335 "file3": "ghi",
7336 }),
7337 )
7338 .await;
7339
7340 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7341
7342 let buffer1 = project
7343 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7344 .await
7345 .unwrap();
7346 let events = Rc::new(RefCell::new(Vec::new()));
7347
7348 // initially, the buffer isn't dirty.
7349 buffer1.update(cx, |buffer, cx| {
7350 cx.subscribe(&buffer1, {
7351 let events = events.clone();
7352 move |_, _, event, _| match event {
7353 BufferEvent::Operation(_) => {}
7354 _ => events.borrow_mut().push(event.clone()),
7355 }
7356 })
7357 .detach();
7358
7359 assert!(!buffer.is_dirty());
7360 assert!(events.borrow().is_empty());
7361
7362 buffer.edit([(1..2, "")], cx);
7363 });
7364
7365 // after the first edit, the buffer is dirty, and emits a dirtied event.
7366 buffer1.update(cx, |buffer, cx| {
7367 assert!(buffer.text() == "ac");
7368 assert!(buffer.is_dirty());
7369 assert_eq!(
7370 *events.borrow(),
7371 &[language::Event::Edited, language::Event::Dirtied]
7372 );
7373 events.borrow_mut().clear();
7374 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7375 });
7376
7377 // after saving, the buffer is not dirty, and emits a saved event.
7378 buffer1.update(cx, |buffer, cx| {
7379 assert!(!buffer.is_dirty());
7380 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7381 events.borrow_mut().clear();
7382
7383 buffer.edit([(1..1, "B")], cx);
7384 buffer.edit([(2..2, "D")], cx);
7385 });
7386
7387 // after editing again, the buffer is dirty, and emits another dirty event.
7388 buffer1.update(cx, |buffer, cx| {
7389 assert!(buffer.text() == "aBDc");
7390 assert!(buffer.is_dirty());
7391 assert_eq!(
7392 *events.borrow(),
7393 &[
7394 language::Event::Edited,
7395 language::Event::Dirtied,
7396 language::Event::Edited,
7397 ],
7398 );
7399 events.borrow_mut().clear();
7400
7401 // TODO - currently, after restoring the buffer to its
7402 // previously-saved state, the is still considered dirty.
7403 buffer.edit([(1..3, "")], cx);
7404 assert!(buffer.text() == "ac");
7405 assert!(buffer.is_dirty());
7406 });
7407
7408 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7409
7410 // When a file is deleted, the buffer is considered dirty.
7411 let events = Rc::new(RefCell::new(Vec::new()));
7412 let buffer2 = project
7413 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7414 .await
7415 .unwrap();
7416 buffer2.update(cx, |_, cx| {
7417 cx.subscribe(&buffer2, {
7418 let events = events.clone();
7419 move |_, _, event, _| events.borrow_mut().push(event.clone())
7420 })
7421 .detach();
7422 });
7423
7424 fs.remove_file("/dir/file2".as_ref(), Default::default())
7425 .await
7426 .unwrap();
7427 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7428 assert_eq!(
7429 *events.borrow(),
7430 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7431 );
7432
7433 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7434 let events = Rc::new(RefCell::new(Vec::new()));
7435 let buffer3 = project
7436 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7437 .await
7438 .unwrap();
7439 buffer3.update(cx, |_, cx| {
7440 cx.subscribe(&buffer3, {
7441 let events = events.clone();
7442 move |_, _, event, _| events.borrow_mut().push(event.clone())
7443 })
7444 .detach();
7445 });
7446
7447 buffer3.update(cx, |buffer, cx| {
7448 buffer.edit([(0..0, "x")], cx);
7449 });
7450 events.borrow_mut().clear();
7451 fs.remove_file("/dir/file3".as_ref(), Default::default())
7452 .await
7453 .unwrap();
7454 buffer3
7455 .condition(&cx, |_, _| !events.borrow().is_empty())
7456 .await;
7457 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7458 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7459 }
7460
7461 #[gpui::test]
7462 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7463 let initial_contents = "aaa\nbbbbb\nc\n";
7464 let fs = FakeFs::new(cx.background());
7465 fs.insert_tree(
7466 "/dir",
7467 json!({
7468 "the-file": initial_contents,
7469 }),
7470 )
7471 .await;
7472 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7473 let buffer = project
7474 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7475 .await
7476 .unwrap();
7477
7478 let anchors = (0..3)
7479 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7480 .collect::<Vec<_>>();
7481
7482 // Change the file on disk, adding two new lines of text, and removing
7483 // one line.
7484 buffer.read_with(cx, |buffer, _| {
7485 assert!(!buffer.is_dirty());
7486 assert!(!buffer.has_conflict());
7487 });
7488 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7489 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7490 .await
7491 .unwrap();
7492
7493 // Because the buffer was not modified, it is reloaded from disk. Its
7494 // contents are edited according to the diff between the old and new
7495 // file contents.
7496 buffer
7497 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7498 .await;
7499
7500 buffer.update(cx, |buffer, _| {
7501 assert_eq!(buffer.text(), new_contents);
7502 assert!(!buffer.is_dirty());
7503 assert!(!buffer.has_conflict());
7504
7505 let anchor_positions = anchors
7506 .iter()
7507 .map(|anchor| anchor.to_point(&*buffer))
7508 .collect::<Vec<_>>();
7509 assert_eq!(
7510 anchor_positions,
7511 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7512 );
7513 });
7514
7515 // Modify the buffer
7516 buffer.update(cx, |buffer, cx| {
7517 buffer.edit([(0..0, " ")], cx);
7518 assert!(buffer.is_dirty());
7519 assert!(!buffer.has_conflict());
7520 });
7521
7522 // Change the file on disk again, adding blank lines to the beginning.
7523 fs.save(
7524 "/dir/the-file".as_ref(),
7525 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7526 )
7527 .await
7528 .unwrap();
7529
7530 // Because the buffer is modified, it doesn't reload from disk, but is
7531 // marked as having a conflict.
7532 buffer
7533 .condition(&cx, |buffer, _| buffer.has_conflict())
7534 .await;
7535 }
7536
7537 #[gpui::test]
7538 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7539 cx.foreground().forbid_parking();
7540
7541 let fs = FakeFs::new(cx.background());
7542 fs.insert_tree(
7543 "/the-dir",
7544 json!({
7545 "a.rs": "
7546 fn foo(mut v: Vec<usize>) {
7547 for x in &v {
7548 v.push(1);
7549 }
7550 }
7551 "
7552 .unindent(),
7553 }),
7554 )
7555 .await;
7556
7557 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7558 let buffer = project
7559 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7560 .await
7561 .unwrap();
7562
7563 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7564 let message = lsp::PublishDiagnosticsParams {
7565 uri: buffer_uri.clone(),
7566 diagnostics: vec![
7567 lsp::Diagnostic {
7568 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7569 severity: Some(DiagnosticSeverity::WARNING),
7570 message: "error 1".to_string(),
7571 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7572 location: lsp::Location {
7573 uri: buffer_uri.clone(),
7574 range: lsp::Range::new(
7575 lsp::Position::new(1, 8),
7576 lsp::Position::new(1, 9),
7577 ),
7578 },
7579 message: "error 1 hint 1".to_string(),
7580 }]),
7581 ..Default::default()
7582 },
7583 lsp::Diagnostic {
7584 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7585 severity: Some(DiagnosticSeverity::HINT),
7586 message: "error 1 hint 1".to_string(),
7587 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7588 location: lsp::Location {
7589 uri: buffer_uri.clone(),
7590 range: lsp::Range::new(
7591 lsp::Position::new(1, 8),
7592 lsp::Position::new(1, 9),
7593 ),
7594 },
7595 message: "original diagnostic".to_string(),
7596 }]),
7597 ..Default::default()
7598 },
7599 lsp::Diagnostic {
7600 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7601 severity: Some(DiagnosticSeverity::ERROR),
7602 message: "error 2".to_string(),
7603 related_information: Some(vec![
7604 lsp::DiagnosticRelatedInformation {
7605 location: lsp::Location {
7606 uri: buffer_uri.clone(),
7607 range: lsp::Range::new(
7608 lsp::Position::new(1, 13),
7609 lsp::Position::new(1, 15),
7610 ),
7611 },
7612 message: "error 2 hint 1".to_string(),
7613 },
7614 lsp::DiagnosticRelatedInformation {
7615 location: lsp::Location {
7616 uri: buffer_uri.clone(),
7617 range: lsp::Range::new(
7618 lsp::Position::new(1, 13),
7619 lsp::Position::new(1, 15),
7620 ),
7621 },
7622 message: "error 2 hint 2".to_string(),
7623 },
7624 ]),
7625 ..Default::default()
7626 },
7627 lsp::Diagnostic {
7628 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7629 severity: Some(DiagnosticSeverity::HINT),
7630 message: "error 2 hint 1".to_string(),
7631 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7632 location: lsp::Location {
7633 uri: buffer_uri.clone(),
7634 range: lsp::Range::new(
7635 lsp::Position::new(2, 8),
7636 lsp::Position::new(2, 17),
7637 ),
7638 },
7639 message: "original diagnostic".to_string(),
7640 }]),
7641 ..Default::default()
7642 },
7643 lsp::Diagnostic {
7644 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7645 severity: Some(DiagnosticSeverity::HINT),
7646 message: "error 2 hint 2".to_string(),
7647 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7648 location: lsp::Location {
7649 uri: buffer_uri.clone(),
7650 range: lsp::Range::new(
7651 lsp::Position::new(2, 8),
7652 lsp::Position::new(2, 17),
7653 ),
7654 },
7655 message: "original diagnostic".to_string(),
7656 }]),
7657 ..Default::default()
7658 },
7659 ],
7660 version: None,
7661 };
7662
7663 project
7664 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7665 .unwrap();
7666 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7667
7668 assert_eq!(
7669 buffer
7670 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7671 .collect::<Vec<_>>(),
7672 &[
7673 DiagnosticEntry {
7674 range: Point::new(1, 8)..Point::new(1, 9),
7675 diagnostic: Diagnostic {
7676 severity: DiagnosticSeverity::WARNING,
7677 message: "error 1".to_string(),
7678 group_id: 0,
7679 is_primary: true,
7680 ..Default::default()
7681 }
7682 },
7683 DiagnosticEntry {
7684 range: Point::new(1, 8)..Point::new(1, 9),
7685 diagnostic: Diagnostic {
7686 severity: DiagnosticSeverity::HINT,
7687 message: "error 1 hint 1".to_string(),
7688 group_id: 0,
7689 is_primary: false,
7690 ..Default::default()
7691 }
7692 },
7693 DiagnosticEntry {
7694 range: Point::new(1, 13)..Point::new(1, 15),
7695 diagnostic: Diagnostic {
7696 severity: DiagnosticSeverity::HINT,
7697 message: "error 2 hint 1".to_string(),
7698 group_id: 1,
7699 is_primary: false,
7700 ..Default::default()
7701 }
7702 },
7703 DiagnosticEntry {
7704 range: Point::new(1, 13)..Point::new(1, 15),
7705 diagnostic: Diagnostic {
7706 severity: DiagnosticSeverity::HINT,
7707 message: "error 2 hint 2".to_string(),
7708 group_id: 1,
7709 is_primary: false,
7710 ..Default::default()
7711 }
7712 },
7713 DiagnosticEntry {
7714 range: Point::new(2, 8)..Point::new(2, 17),
7715 diagnostic: Diagnostic {
7716 severity: DiagnosticSeverity::ERROR,
7717 message: "error 2".to_string(),
7718 group_id: 1,
7719 is_primary: true,
7720 ..Default::default()
7721 }
7722 }
7723 ]
7724 );
7725
7726 assert_eq!(
7727 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7728 &[
7729 DiagnosticEntry {
7730 range: Point::new(1, 8)..Point::new(1, 9),
7731 diagnostic: Diagnostic {
7732 severity: DiagnosticSeverity::WARNING,
7733 message: "error 1".to_string(),
7734 group_id: 0,
7735 is_primary: true,
7736 ..Default::default()
7737 }
7738 },
7739 DiagnosticEntry {
7740 range: Point::new(1, 8)..Point::new(1, 9),
7741 diagnostic: Diagnostic {
7742 severity: DiagnosticSeverity::HINT,
7743 message: "error 1 hint 1".to_string(),
7744 group_id: 0,
7745 is_primary: false,
7746 ..Default::default()
7747 }
7748 },
7749 ]
7750 );
7751 assert_eq!(
7752 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7753 &[
7754 DiagnosticEntry {
7755 range: Point::new(1, 13)..Point::new(1, 15),
7756 diagnostic: Diagnostic {
7757 severity: DiagnosticSeverity::HINT,
7758 message: "error 2 hint 1".to_string(),
7759 group_id: 1,
7760 is_primary: false,
7761 ..Default::default()
7762 }
7763 },
7764 DiagnosticEntry {
7765 range: Point::new(1, 13)..Point::new(1, 15),
7766 diagnostic: Diagnostic {
7767 severity: DiagnosticSeverity::HINT,
7768 message: "error 2 hint 2".to_string(),
7769 group_id: 1,
7770 is_primary: false,
7771 ..Default::default()
7772 }
7773 },
7774 DiagnosticEntry {
7775 range: Point::new(2, 8)..Point::new(2, 17),
7776 diagnostic: Diagnostic {
7777 severity: DiagnosticSeverity::ERROR,
7778 message: "error 2".to_string(),
7779 group_id: 1,
7780 is_primary: true,
7781 ..Default::default()
7782 }
7783 }
7784 ]
7785 );
7786 }
7787
7788 #[gpui::test]
7789 async fn test_rename(cx: &mut gpui::TestAppContext) {
7790 cx.foreground().forbid_parking();
7791
7792 let mut language = Language::new(
7793 LanguageConfig {
7794 name: "Rust".into(),
7795 path_suffixes: vec!["rs".to_string()],
7796 ..Default::default()
7797 },
7798 Some(tree_sitter_rust::language()),
7799 );
7800 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7801 capabilities: lsp::ServerCapabilities {
7802 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7803 prepare_provider: Some(true),
7804 work_done_progress_options: Default::default(),
7805 })),
7806 ..Default::default()
7807 },
7808 ..Default::default()
7809 });
7810
7811 let fs = FakeFs::new(cx.background());
7812 fs.insert_tree(
7813 "/dir",
7814 json!({
7815 "one.rs": "const ONE: usize = 1;",
7816 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7817 }),
7818 )
7819 .await;
7820
7821 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7822 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7823 let buffer = project
7824 .update(cx, |project, cx| {
7825 project.open_local_buffer("/dir/one.rs", cx)
7826 })
7827 .await
7828 .unwrap();
7829
7830 let fake_server = fake_servers.next().await.unwrap();
7831
7832 let response = project.update(cx, |project, cx| {
7833 project.prepare_rename(buffer.clone(), 7, cx)
7834 });
7835 fake_server
7836 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7837 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7838 assert_eq!(params.position, lsp::Position::new(0, 7));
7839 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7840 lsp::Position::new(0, 6),
7841 lsp::Position::new(0, 9),
7842 ))))
7843 })
7844 .next()
7845 .await
7846 .unwrap();
7847 let range = response.await.unwrap().unwrap();
7848 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7849 assert_eq!(range, 6..9);
7850
7851 let response = project.update(cx, |project, cx| {
7852 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7853 });
7854 fake_server
7855 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7856 assert_eq!(
7857 params.text_document_position.text_document.uri.as_str(),
7858 "file:///dir/one.rs"
7859 );
7860 assert_eq!(
7861 params.text_document_position.position,
7862 lsp::Position::new(0, 7)
7863 );
7864 assert_eq!(params.new_name, "THREE");
7865 Ok(Some(lsp::WorkspaceEdit {
7866 changes: Some(
7867 [
7868 (
7869 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7870 vec![lsp::TextEdit::new(
7871 lsp::Range::new(
7872 lsp::Position::new(0, 6),
7873 lsp::Position::new(0, 9),
7874 ),
7875 "THREE".to_string(),
7876 )],
7877 ),
7878 (
7879 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7880 vec![
7881 lsp::TextEdit::new(
7882 lsp::Range::new(
7883 lsp::Position::new(0, 24),
7884 lsp::Position::new(0, 27),
7885 ),
7886 "THREE".to_string(),
7887 ),
7888 lsp::TextEdit::new(
7889 lsp::Range::new(
7890 lsp::Position::new(0, 35),
7891 lsp::Position::new(0, 38),
7892 ),
7893 "THREE".to_string(),
7894 ),
7895 ],
7896 ),
7897 ]
7898 .into_iter()
7899 .collect(),
7900 ),
7901 ..Default::default()
7902 }))
7903 })
7904 .next()
7905 .await
7906 .unwrap();
7907 let mut transaction = response.await.unwrap().0;
7908 assert_eq!(transaction.len(), 2);
7909 assert_eq!(
7910 transaction
7911 .remove_entry(&buffer)
7912 .unwrap()
7913 .0
7914 .read_with(cx, |buffer, _| buffer.text()),
7915 "const THREE: usize = 1;"
7916 );
7917 assert_eq!(
7918 transaction
7919 .into_keys()
7920 .next()
7921 .unwrap()
7922 .read_with(cx, |buffer, _| buffer.text()),
7923 "const TWO: usize = one::THREE + one::THREE;"
7924 );
7925 }
7926
7927 #[gpui::test]
7928 async fn test_search(cx: &mut gpui::TestAppContext) {
7929 let fs = FakeFs::new(cx.background());
7930 fs.insert_tree(
7931 "/dir",
7932 json!({
7933 "one.rs": "const ONE: usize = 1;",
7934 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7935 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7936 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7937 }),
7938 )
7939 .await;
7940 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7941 assert_eq!(
7942 search(&project, SearchQuery::text("TWO", false, true), cx)
7943 .await
7944 .unwrap(),
7945 HashMap::from_iter([
7946 ("two.rs".to_string(), vec![6..9]),
7947 ("three.rs".to_string(), vec![37..40])
7948 ])
7949 );
7950
7951 let buffer_4 = project
7952 .update(cx, |project, cx| {
7953 project.open_local_buffer("/dir/four.rs", cx)
7954 })
7955 .await
7956 .unwrap();
7957 buffer_4.update(cx, |buffer, cx| {
7958 let text = "two::TWO";
7959 buffer.edit([(20..28, text), (31..43, text)], cx);
7960 });
7961
7962 assert_eq!(
7963 search(&project, SearchQuery::text("TWO", false, true), cx)
7964 .await
7965 .unwrap(),
7966 HashMap::from_iter([
7967 ("two.rs".to_string(), vec![6..9]),
7968 ("three.rs".to_string(), vec![37..40]),
7969 ("four.rs".to_string(), vec![25..28, 36..39])
7970 ])
7971 );
7972
7973 async fn search(
7974 project: &ModelHandle<Project>,
7975 query: SearchQuery,
7976 cx: &mut gpui::TestAppContext,
7977 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7978 let results = project
7979 .update(cx, |project, cx| project.search(query, cx))
7980 .await?;
7981
7982 Ok(results
7983 .into_iter()
7984 .map(|(buffer, ranges)| {
7985 buffer.read_with(cx, |buffer, _| {
7986 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7987 let ranges = ranges
7988 .into_iter()
7989 .map(|range| range.to_offset(buffer))
7990 .collect::<Vec<_>>();
7991 (path, ranges)
7992 })
7993 })
7994 .collect())
7995 }
7996 }
7997}