1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use thiserror::Error;
53use util::{post_inc, ResultExt, TryFutureExt as _};
54
55pub use fs::*;
56pub use worktree::*;
57
58pub trait Item: Entity {
59 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
60}
61
62pub struct Project {
63 worktrees: Vec<WorktreeHandle>,
64 active_entry: Option<ProjectEntryId>,
65 languages: Arc<LanguageRegistry>,
66 language_servers:
67 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
68 started_language_servers:
69 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
70 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
71 language_server_settings: Arc<Mutex<serde_json::Value>>,
72 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
73 next_language_server_id: usize,
74 client: Arc<client::Client>,
75 next_entry_id: Arc<AtomicUsize>,
76 user_store: ModelHandle<UserStore>,
77 fs: Arc<dyn Fs>,
78 client_state: ProjectClientState,
79 collaborators: HashMap<PeerId, Collaborator>,
80 subscriptions: Vec<client::Subscription>,
81 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
82 shared_buffers: HashMap<PeerId, HashSet<u64>>,
83 loading_buffers: HashMap<
84 ProjectPath,
85 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
86 >,
87 loading_local_worktrees:
88 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
89 opened_buffers: HashMap<u64, OpenBuffer>,
90 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
91 nonce: u128,
92}
93
94#[derive(Error, Debug)]
95pub enum JoinProjectError {
96 #[error("host declined join request")]
97 HostDeclined,
98 #[error("host closed the project")]
99 HostClosedProject,
100 #[error("host went offline")]
101 HostWentOffline,
102 #[error("{0}")]
103 Other(#[from] anyhow::Error),
104}
105
106enum OpenBuffer {
107 Strong(ModelHandle<Buffer>),
108 Weak(WeakModelHandle<Buffer>),
109 Loading(Vec<Operation>),
110}
111
112enum WorktreeHandle {
113 Strong(ModelHandle<Worktree>),
114 Weak(WeakModelHandle<Worktree>),
115}
116
117enum ProjectClientState {
118 Local {
119 is_shared: bool,
120 remote_id_tx: watch::Sender<Option<u64>>,
121 remote_id_rx: watch::Receiver<Option<u64>>,
122 _maintain_remote_id_task: Task<Option<()>>,
123 },
124 Remote {
125 sharing_has_stopped: bool,
126 remote_id: u64,
127 replica_id: ReplicaId,
128 _detect_unshare_task: Task<Option<()>>,
129 },
130}
131
132#[derive(Clone, Debug)]
133pub struct Collaborator {
134 pub user: Arc<User>,
135 pub peer_id: PeerId,
136 pub replica_id: ReplicaId,
137}
138
139#[derive(Clone, Debug, PartialEq, Eq)]
140pub enum Event {
141 ActiveEntryChanged(Option<ProjectEntryId>),
142 WorktreeAdded,
143 WorktreeRemoved(WorktreeId),
144 DiskBasedDiagnosticsStarted,
145 DiskBasedDiagnosticsUpdated,
146 DiskBasedDiagnosticsFinished,
147 DiagnosticsUpdated(ProjectPath),
148 RemoteIdChanged(Option<u64>),
149 CollaboratorLeft(PeerId),
150 ContactRequestedJoin(Arc<User>),
151 ContactCancelledJoinRequest(Arc<User>),
152}
153
154#[derive(Serialize)]
155pub struct LanguageServerStatus {
156 pub name: String,
157 pub pending_work: BTreeMap<String, LanguageServerProgress>,
158 pub pending_diagnostic_updates: isize,
159}
160
161#[derive(Clone, Debug, Serialize)]
162pub struct LanguageServerProgress {
163 pub message: Option<String>,
164 pub percentage: Option<usize>,
165 #[serde(skip_serializing)]
166 pub last_update_at: Instant,
167}
168
169#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
170pub struct ProjectPath {
171 pub worktree_id: WorktreeId,
172 pub path: Arc<Path>,
173}
174
175#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
176pub struct DiagnosticSummary {
177 pub error_count: usize,
178 pub warning_count: usize,
179}
180
181#[derive(Debug)]
182pub struct Location {
183 pub buffer: ModelHandle<Buffer>,
184 pub range: Range<language::Anchor>,
185}
186
187#[derive(Debug)]
188pub struct DocumentHighlight {
189 pub range: Range<language::Anchor>,
190 pub kind: DocumentHighlightKind,
191}
192
193#[derive(Clone, Debug)]
194pub struct Symbol {
195 pub source_worktree_id: WorktreeId,
196 pub worktree_id: WorktreeId,
197 pub language_server_name: LanguageServerName,
198 pub path: PathBuf,
199 pub label: CodeLabel,
200 pub name: String,
201 pub kind: lsp::SymbolKind,
202 pub range: Range<PointUtf16>,
203 pub signature: [u8; 32],
204}
205
206#[derive(Default)]
207pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
208
209impl DiagnosticSummary {
210 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
211 let mut this = Self {
212 error_count: 0,
213 warning_count: 0,
214 };
215
216 for entry in diagnostics {
217 if entry.diagnostic.is_primary {
218 match entry.diagnostic.severity {
219 DiagnosticSeverity::ERROR => this.error_count += 1,
220 DiagnosticSeverity::WARNING => this.warning_count += 1,
221 _ => {}
222 }
223 }
224 }
225
226 this
227 }
228
229 pub fn is_empty(&self) -> bool {
230 self.error_count == 0 && self.warning_count == 0
231 }
232
233 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
234 proto::DiagnosticSummary {
235 path: path.to_string_lossy().to_string(),
236 error_count: self.error_count as u32,
237 warning_count: self.warning_count as u32,
238 }
239 }
240}
241
242#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
243pub struct ProjectEntryId(usize);
244
245impl ProjectEntryId {
246 pub const MAX: Self = Self(usize::MAX);
247
248 pub fn new(counter: &AtomicUsize) -> Self {
249 Self(counter.fetch_add(1, SeqCst))
250 }
251
252 pub fn from_proto(id: u64) -> Self {
253 Self(id as usize)
254 }
255
256 pub fn to_proto(&self) -> u64 {
257 self.0 as u64
258 }
259
260 pub fn to_usize(&self) -> usize {
261 self.0
262 }
263}
264
265impl Project {
266 pub fn init(client: &Arc<Client>) {
267 client.add_model_message_handler(Self::handle_request_join_project);
268 client.add_model_message_handler(Self::handle_add_collaborator);
269 client.add_model_message_handler(Self::handle_buffer_reloaded);
270 client.add_model_message_handler(Self::handle_buffer_saved);
271 client.add_model_message_handler(Self::handle_start_language_server);
272 client.add_model_message_handler(Self::handle_update_language_server);
273 client.add_model_message_handler(Self::handle_remove_collaborator);
274 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
275 client.add_model_message_handler(Self::handle_register_worktree);
276 client.add_model_message_handler(Self::handle_unregister_worktree);
277 client.add_model_message_handler(Self::handle_unregister_project);
278 client.add_model_message_handler(Self::handle_project_unshared);
279 client.add_model_message_handler(Self::handle_update_buffer_file);
280 client.add_model_message_handler(Self::handle_update_buffer);
281 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
282 client.add_model_message_handler(Self::handle_update_worktree);
283 client.add_model_request_handler(Self::handle_create_project_entry);
284 client.add_model_request_handler(Self::handle_rename_project_entry);
285 client.add_model_request_handler(Self::handle_copy_project_entry);
286 client.add_model_request_handler(Self::handle_delete_project_entry);
287 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
288 client.add_model_request_handler(Self::handle_apply_code_action);
289 client.add_model_request_handler(Self::handle_reload_buffers);
290 client.add_model_request_handler(Self::handle_format_buffers);
291 client.add_model_request_handler(Self::handle_get_code_actions);
292 client.add_model_request_handler(Self::handle_get_completions);
293 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
294 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
295 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
296 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
297 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
298 client.add_model_request_handler(Self::handle_search_project);
299 client.add_model_request_handler(Self::handle_get_project_symbols);
300 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
301 client.add_model_request_handler(Self::handle_open_buffer_by_id);
302 client.add_model_request_handler(Self::handle_open_buffer_by_path);
303 client.add_model_request_handler(Self::handle_save_buffer);
304 }
305
306 pub fn local(
307 client: Arc<Client>,
308 user_store: ModelHandle<UserStore>,
309 languages: Arc<LanguageRegistry>,
310 fs: Arc<dyn Fs>,
311 cx: &mut MutableAppContext,
312 ) -> ModelHandle<Self> {
313 cx.add_model(|cx: &mut ModelContext<Self>| {
314 let (remote_id_tx, remote_id_rx) = watch::channel();
315 let _maintain_remote_id_task = cx.spawn_weak({
316 let rpc = client.clone();
317 move |this, mut cx| {
318 async move {
319 let mut status = rpc.status();
320 while let Some(status) = status.next().await {
321 if let Some(this) = this.upgrade(&cx) {
322 if status.is_connected() {
323 this.update(&mut cx, |this, cx| this.register(cx)).await?;
324 } else {
325 this.update(&mut cx, |this, cx| this.unregister(cx));
326 }
327 }
328 }
329 Ok(())
330 }
331 .log_err()
332 }
333 });
334
335 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
336 Self {
337 worktrees: Default::default(),
338 collaborators: Default::default(),
339 opened_buffers: Default::default(),
340 shared_buffers: Default::default(),
341 loading_buffers: Default::default(),
342 loading_local_worktrees: Default::default(),
343 buffer_snapshots: Default::default(),
344 client_state: ProjectClientState::Local {
345 is_shared: false,
346 remote_id_tx,
347 remote_id_rx,
348 _maintain_remote_id_task,
349 },
350 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
351 subscriptions: Vec::new(),
352 active_entry: None,
353 languages,
354 client,
355 user_store,
356 fs,
357 next_entry_id: Default::default(),
358 language_servers: Default::default(),
359 started_language_servers: Default::default(),
360 language_server_statuses: Default::default(),
361 last_workspace_edits_by_language_server: Default::default(),
362 language_server_settings: Default::default(),
363 next_language_server_id: 0,
364 nonce: StdRng::from_entropy().gen(),
365 }
366 })
367 }
368
369 pub async fn remote(
370 remote_id: u64,
371 client: Arc<Client>,
372 user_store: ModelHandle<UserStore>,
373 languages: Arc<LanguageRegistry>,
374 fs: Arc<dyn Fs>,
375 cx: &mut AsyncAppContext,
376 ) -> Result<ModelHandle<Self>, JoinProjectError> {
377 client.authenticate_and_connect(true, &cx).await?;
378
379 let response = client
380 .request(proto::JoinProject {
381 project_id: remote_id,
382 })
383 .await?;
384
385 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
386 proto::join_project_response::Variant::Accept(response) => response,
387 proto::join_project_response::Variant::Decline(decline) => {
388 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
389 Some(proto::join_project_response::decline::Reason::Declined) => {
390 Err(JoinProjectError::HostDeclined)?
391 }
392 Some(proto::join_project_response::decline::Reason::Closed) => {
393 Err(JoinProjectError::HostClosedProject)?
394 }
395 Some(proto::join_project_response::decline::Reason::WentOffline) => {
396 Err(JoinProjectError::HostWentOffline)?
397 }
398 None => Err(anyhow!("missing decline reason"))?,
399 }
400 }
401 };
402
403 let replica_id = response.replica_id as ReplicaId;
404
405 let mut worktrees = Vec::new();
406 for worktree in response.worktrees {
407 let (worktree, load_task) = cx
408 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
409 worktrees.push(worktree);
410 load_task.detach();
411 }
412
413 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
414 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
415 let mut this = Self {
416 worktrees: Vec::new(),
417 loading_buffers: Default::default(),
418 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
419 shared_buffers: Default::default(),
420 loading_local_worktrees: Default::default(),
421 active_entry: None,
422 collaborators: Default::default(),
423 languages,
424 user_store: user_store.clone(),
425 fs,
426 next_entry_id: Default::default(),
427 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
428 client: client.clone(),
429 client_state: ProjectClientState::Remote {
430 sharing_has_stopped: false,
431 remote_id,
432 replica_id,
433 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
434 async move {
435 let mut status = client.status();
436 let is_connected =
437 status.next().await.map_or(false, |s| s.is_connected());
438 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
439 if !is_connected || status.next().await.is_some() {
440 if let Some(this) = this.upgrade(&cx) {
441 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
442 }
443 }
444 Ok(())
445 }
446 .log_err()
447 }),
448 },
449 language_servers: Default::default(),
450 started_language_servers: Default::default(),
451 language_server_settings: Default::default(),
452 language_server_statuses: response
453 .language_servers
454 .into_iter()
455 .map(|server| {
456 (
457 server.id as usize,
458 LanguageServerStatus {
459 name: server.name,
460 pending_work: Default::default(),
461 pending_diagnostic_updates: 0,
462 },
463 )
464 })
465 .collect(),
466 last_workspace_edits_by_language_server: Default::default(),
467 next_language_server_id: 0,
468 opened_buffers: Default::default(),
469 buffer_snapshots: Default::default(),
470 nonce: StdRng::from_entropy().gen(),
471 };
472 for worktree in worktrees {
473 this.add_worktree(&worktree, cx);
474 }
475 this
476 });
477
478 let user_ids = response
479 .collaborators
480 .iter()
481 .map(|peer| peer.user_id)
482 .collect();
483 user_store
484 .update(cx, |user_store, cx| user_store.get_users(user_ids, cx))
485 .await?;
486 let mut collaborators = HashMap::default();
487 for message in response.collaborators {
488 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
489 collaborators.insert(collaborator.peer_id, collaborator);
490 }
491
492 this.update(cx, |this, _| {
493 this.collaborators = collaborators;
494 });
495
496 Ok(this)
497 }
498
499 #[cfg(any(test, feature = "test-support"))]
500 pub async fn test(
501 fs: Arc<dyn Fs>,
502 root_paths: impl IntoIterator<Item = &Path>,
503 cx: &mut gpui::TestAppContext,
504 ) -> ModelHandle<Project> {
505 let languages = Arc::new(LanguageRegistry::test());
506 let http_client = client::test::FakeHttpClient::with_404_response();
507 let client = client::Client::new(http_client.clone());
508 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
509 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
510 for path in root_paths {
511 let (tree, _) = project
512 .update(cx, |project, cx| {
513 project.find_or_create_local_worktree(path, true, cx)
514 })
515 .await
516 .unwrap();
517 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
518 .await;
519 }
520 project
521 }
522
523 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
524 self.opened_buffers
525 .get(&remote_id)
526 .and_then(|buffer| buffer.upgrade(cx))
527 }
528
529 pub fn languages(&self) -> &Arc<LanguageRegistry> {
530 &self.languages
531 }
532
533 pub fn client(&self) -> Arc<Client> {
534 self.client.clone()
535 }
536
537 pub fn user_store(&self) -> ModelHandle<UserStore> {
538 self.user_store.clone()
539 }
540
541 #[cfg(any(test, feature = "test-support"))]
542 pub fn check_invariants(&self, cx: &AppContext) {
543 if self.is_local() {
544 let mut worktree_root_paths = HashMap::default();
545 for worktree in self.worktrees(cx) {
546 let worktree = worktree.read(cx);
547 let abs_path = worktree.as_local().unwrap().abs_path().clone();
548 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
549 assert_eq!(
550 prev_worktree_id,
551 None,
552 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
553 abs_path,
554 worktree.id(),
555 prev_worktree_id
556 )
557 }
558 } else {
559 let replica_id = self.replica_id();
560 for buffer in self.opened_buffers.values() {
561 if let Some(buffer) = buffer.upgrade(cx) {
562 let buffer = buffer.read(cx);
563 assert_eq!(
564 buffer.deferred_ops_len(),
565 0,
566 "replica {}, buffer {} has deferred operations",
567 replica_id,
568 buffer.remote_id()
569 );
570 }
571 }
572 }
573 }
574
575 #[cfg(any(test, feature = "test-support"))]
576 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
577 let path = path.into();
578 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
579 self.opened_buffers.iter().any(|(_, buffer)| {
580 if let Some(buffer) = buffer.upgrade(cx) {
581 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
582 if file.worktree == worktree && file.path() == &path.path {
583 return true;
584 }
585 }
586 }
587 false
588 })
589 } else {
590 false
591 }
592 }
593
594 pub fn fs(&self) -> &Arc<dyn Fs> {
595 &self.fs
596 }
597
598 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
599 self.unshared(cx);
600 for worktree in &self.worktrees {
601 if let Some(worktree) = worktree.upgrade(cx) {
602 worktree.update(cx, |worktree, _| {
603 worktree.as_local_mut().unwrap().unregister();
604 });
605 }
606 }
607
608 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
609 *remote_id_tx.borrow_mut() = None;
610 }
611
612 self.subscriptions.clear();
613 }
614
615 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
616 self.unregister(cx);
617
618 let response = self.client.request(proto::RegisterProject {});
619 cx.spawn(|this, mut cx| async move {
620 let remote_id = response.await?.project_id;
621
622 let mut registrations = Vec::new();
623 this.update(&mut cx, |this, cx| {
624 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
625 *remote_id_tx.borrow_mut() = Some(remote_id);
626 }
627
628 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
629
630 this.subscriptions
631 .push(this.client.add_model_for_remote_entity(remote_id, cx));
632
633 for worktree in &this.worktrees {
634 if let Some(worktree) = worktree.upgrade(cx) {
635 registrations.push(worktree.update(cx, |worktree, cx| {
636 let worktree = worktree.as_local_mut().unwrap();
637 worktree.register(remote_id, cx)
638 }));
639 }
640 }
641 });
642
643 futures::future::try_join_all(registrations).await?;
644 Ok(())
645 })
646 }
647
648 pub fn remote_id(&self) -> Option<u64> {
649 match &self.client_state {
650 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
651 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
652 }
653 }
654
655 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
656 let mut id = None;
657 let mut watch = None;
658 match &self.client_state {
659 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
660 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
661 }
662
663 async move {
664 if let Some(id) = id {
665 return id;
666 }
667 let mut watch = watch.unwrap();
668 loop {
669 let id = *watch.borrow();
670 if let Some(id) = id {
671 return id;
672 }
673 watch.next().await;
674 }
675 }
676 }
677
678 pub fn replica_id(&self) -> ReplicaId {
679 match &self.client_state {
680 ProjectClientState::Local { .. } => 0,
681 ProjectClientState::Remote { replica_id, .. } => *replica_id,
682 }
683 }
684
685 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
686 &self.collaborators
687 }
688
689 pub fn worktrees<'a>(
690 &'a self,
691 cx: &'a AppContext,
692 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
693 self.worktrees
694 .iter()
695 .filter_map(move |worktree| worktree.upgrade(cx))
696 }
697
698 pub fn visible_worktrees<'a>(
699 &'a self,
700 cx: &'a AppContext,
701 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
702 self.worktrees.iter().filter_map(|worktree| {
703 worktree.upgrade(cx).and_then(|worktree| {
704 if worktree.read(cx).is_visible() {
705 Some(worktree)
706 } else {
707 None
708 }
709 })
710 })
711 }
712
713 pub fn worktree_for_id(
714 &self,
715 id: WorktreeId,
716 cx: &AppContext,
717 ) -> Option<ModelHandle<Worktree>> {
718 self.worktrees(cx)
719 .find(|worktree| worktree.read(cx).id() == id)
720 }
721
722 pub fn worktree_for_entry(
723 &self,
724 entry_id: ProjectEntryId,
725 cx: &AppContext,
726 ) -> Option<ModelHandle<Worktree>> {
727 self.worktrees(cx)
728 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
729 }
730
731 pub fn worktree_id_for_entry(
732 &self,
733 entry_id: ProjectEntryId,
734 cx: &AppContext,
735 ) -> Option<WorktreeId> {
736 self.worktree_for_entry(entry_id, cx)
737 .map(|worktree| worktree.read(cx).id())
738 }
739
740 pub fn create_entry(
741 &mut self,
742 project_path: impl Into<ProjectPath>,
743 is_directory: bool,
744 cx: &mut ModelContext<Self>,
745 ) -> Option<Task<Result<Entry>>> {
746 let project_path = project_path.into();
747 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
748 if self.is_local() {
749 Some(worktree.update(cx, |worktree, cx| {
750 worktree
751 .as_local_mut()
752 .unwrap()
753 .create_entry(project_path.path, is_directory, cx)
754 }))
755 } else {
756 let client = self.client.clone();
757 let project_id = self.remote_id().unwrap();
758 Some(cx.spawn_weak(|_, mut cx| async move {
759 let response = client
760 .request(proto::CreateProjectEntry {
761 worktree_id: project_path.worktree_id.to_proto(),
762 project_id,
763 path: project_path.path.as_os_str().as_bytes().to_vec(),
764 is_directory,
765 })
766 .await?;
767 let entry = response
768 .entry
769 .ok_or_else(|| anyhow!("missing entry in response"))?;
770 worktree
771 .update(&mut cx, |worktree, cx| {
772 worktree.as_remote().unwrap().insert_entry(
773 entry,
774 response.worktree_scan_id as usize,
775 cx,
776 )
777 })
778 .await
779 }))
780 }
781 }
782
783 pub fn copy_entry(
784 &mut self,
785 entry_id: ProjectEntryId,
786 new_path: impl Into<Arc<Path>>,
787 cx: &mut ModelContext<Self>,
788 ) -> Option<Task<Result<(Entry, Vec<Entry>)>>> {
789 let worktree = self.worktree_for_entry(entry_id, cx)?;
790 let new_path = new_path.into();
791 if self.is_local() {
792 worktree.update(cx, |worktree, cx| {
793 worktree
794 .as_local_mut()
795 .unwrap()
796 .copy_entry(entry_id, new_path, cx)
797 })
798 } else {
799 let client = self.client.clone();
800 let project_id = self.remote_id().unwrap();
801
802 Some(cx.spawn_weak(|_, mut cx| async move {
803 let response = client
804 .request(proto::CopyProjectEntry {
805 project_id,
806 entry_id: entry_id.to_proto(),
807 new_path: new_path.as_os_str().as_bytes().to_vec(),
808 })
809 .await?;
810 let entry = response
811 .entry
812 .ok_or_else(|| anyhow!("missing entry in response"))?;
813 let (entry, child_entries) = worktree.update(&mut cx, |worktree, cx| {
814 let worktree = worktree.as_remote().unwrap();
815 let root_entry =
816 worktree.insert_entry(entry, response.worktree_scan_id as usize, cx);
817 let mut child_entries = Vec::new();
818 for entry in response.child_entries {
819 child_entries.push(worktree.insert_entry(
820 entry,
821 response.worktree_scan_id as usize,
822 cx,
823 ));
824 }
825 (root_entry, child_entries)
826 });
827 Ok((
828 entry.await?,
829 futures::future::try_join_all(child_entries).await?,
830 ))
831 }))
832 }
833 }
834
835 pub fn rename_entry(
836 &mut self,
837 entry_id: ProjectEntryId,
838 new_path: impl Into<Arc<Path>>,
839 cx: &mut ModelContext<Self>,
840 ) -> Option<Task<Result<Entry>>> {
841 let worktree = self.worktree_for_entry(entry_id, cx)?;
842 let new_path = new_path.into();
843 if self.is_local() {
844 worktree.update(cx, |worktree, cx| {
845 worktree
846 .as_local_mut()
847 .unwrap()
848 .rename_entry(entry_id, new_path, cx)
849 })
850 } else {
851 let client = self.client.clone();
852 let project_id = self.remote_id().unwrap();
853
854 Some(cx.spawn_weak(|_, mut cx| async move {
855 let response = client
856 .request(proto::RenameProjectEntry {
857 project_id,
858 entry_id: entry_id.to_proto(),
859 new_path: new_path.as_os_str().as_bytes().to_vec(),
860 })
861 .await?;
862 let entry = response
863 .entry
864 .ok_or_else(|| anyhow!("missing entry in response"))?;
865 worktree
866 .update(&mut cx, |worktree, cx| {
867 worktree.as_remote().unwrap().insert_entry(
868 entry,
869 response.worktree_scan_id as usize,
870 cx,
871 )
872 })
873 .await
874 }))
875 }
876 }
877
878 pub fn delete_entry(
879 &mut self,
880 entry_id: ProjectEntryId,
881 cx: &mut ModelContext<Self>,
882 ) -> Option<Task<Result<()>>> {
883 let worktree = self.worktree_for_entry(entry_id, cx)?;
884 if self.is_local() {
885 worktree.update(cx, |worktree, cx| {
886 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
887 })
888 } else {
889 let client = self.client.clone();
890 let project_id = self.remote_id().unwrap();
891 Some(cx.spawn_weak(|_, mut cx| async move {
892 let response = client
893 .request(proto::DeleteProjectEntry {
894 project_id,
895 entry_id: entry_id.to_proto(),
896 })
897 .await?;
898 worktree
899 .update(&mut cx, move |worktree, cx| {
900 worktree.as_remote().unwrap().delete_entry(
901 entry_id,
902 response.worktree_scan_id as usize,
903 cx,
904 )
905 })
906 .await
907 }))
908 }
909 }
910
911 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
912 let project_id;
913 if let ProjectClientState::Local {
914 remote_id_rx,
915 is_shared,
916 ..
917 } = &mut self.client_state
918 {
919 if *is_shared {
920 return Task::ready(Ok(()));
921 }
922 *is_shared = true;
923 if let Some(id) = *remote_id_rx.borrow() {
924 project_id = id;
925 } else {
926 return Task::ready(Err(anyhow!("project hasn't been registered")));
927 }
928 } else {
929 return Task::ready(Err(anyhow!("can't share a remote project")));
930 };
931
932 for open_buffer in self.opened_buffers.values_mut() {
933 match open_buffer {
934 OpenBuffer::Strong(_) => {}
935 OpenBuffer::Weak(buffer) => {
936 if let Some(buffer) = buffer.upgrade(cx) {
937 *open_buffer = OpenBuffer::Strong(buffer);
938 }
939 }
940 OpenBuffer::Loading(_) => unreachable!(),
941 }
942 }
943
944 for worktree_handle in self.worktrees.iter_mut() {
945 match worktree_handle {
946 WorktreeHandle::Strong(_) => {}
947 WorktreeHandle::Weak(worktree) => {
948 if let Some(worktree) = worktree.upgrade(cx) {
949 *worktree_handle = WorktreeHandle::Strong(worktree);
950 }
951 }
952 }
953 }
954
955 let mut tasks = Vec::new();
956 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
957 worktree.update(cx, |worktree, cx| {
958 let worktree = worktree.as_local_mut().unwrap();
959 tasks.push(worktree.share(project_id, cx));
960 });
961 }
962
963 cx.spawn(|this, mut cx| async move {
964 for task in tasks {
965 task.await?;
966 }
967 this.update(&mut cx, |_, cx| cx.notify());
968 Ok(())
969 })
970 }
971
972 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
973 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
974 if !*is_shared {
975 return;
976 }
977
978 *is_shared = false;
979 self.collaborators.clear();
980 self.shared_buffers.clear();
981 for worktree_handle in self.worktrees.iter_mut() {
982 if let WorktreeHandle::Strong(worktree) = worktree_handle {
983 let is_visible = worktree.update(cx, |worktree, _| {
984 worktree.as_local_mut().unwrap().unshare();
985 worktree.is_visible()
986 });
987 if !is_visible {
988 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
989 }
990 }
991 }
992
993 for open_buffer in self.opened_buffers.values_mut() {
994 match open_buffer {
995 OpenBuffer::Strong(buffer) => {
996 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
997 }
998 _ => {}
999 }
1000 }
1001
1002 cx.notify();
1003 } else {
1004 log::error!("attempted to unshare a remote project");
1005 }
1006 }
1007
1008 pub fn respond_to_join_request(
1009 &mut self,
1010 requester_id: u64,
1011 allow: bool,
1012 cx: &mut ModelContext<Self>,
1013 ) {
1014 if let Some(project_id) = self.remote_id() {
1015 let share = self.share(cx);
1016 let client = self.client.clone();
1017 cx.foreground()
1018 .spawn(async move {
1019 share.await?;
1020 client.send(proto::RespondToJoinProjectRequest {
1021 requester_id,
1022 project_id,
1023 allow,
1024 })
1025 })
1026 .detach_and_log_err(cx);
1027 }
1028 }
1029
1030 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1031 if let ProjectClientState::Remote {
1032 sharing_has_stopped,
1033 ..
1034 } = &mut self.client_state
1035 {
1036 *sharing_has_stopped = true;
1037 self.collaborators.clear();
1038 cx.notify();
1039 }
1040 }
1041
1042 pub fn is_read_only(&self) -> bool {
1043 match &self.client_state {
1044 ProjectClientState::Local { .. } => false,
1045 ProjectClientState::Remote {
1046 sharing_has_stopped,
1047 ..
1048 } => *sharing_has_stopped,
1049 }
1050 }
1051
1052 pub fn is_local(&self) -> bool {
1053 match &self.client_state {
1054 ProjectClientState::Local { .. } => true,
1055 ProjectClientState::Remote { .. } => false,
1056 }
1057 }
1058
1059 pub fn is_remote(&self) -> bool {
1060 !self.is_local()
1061 }
1062
1063 pub fn create_buffer(
1064 &mut self,
1065 text: &str,
1066 language: Option<Arc<Language>>,
1067 cx: &mut ModelContext<Self>,
1068 ) -> Result<ModelHandle<Buffer>> {
1069 if self.is_remote() {
1070 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1071 }
1072
1073 let buffer = cx.add_model(|cx| {
1074 Buffer::new(self.replica_id(), text, cx)
1075 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1076 });
1077 self.register_buffer(&buffer, cx)?;
1078 Ok(buffer)
1079 }
1080
1081 pub fn open_path(
1082 &mut self,
1083 path: impl Into<ProjectPath>,
1084 cx: &mut ModelContext<Self>,
1085 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1086 let task = self.open_buffer(path, cx);
1087 cx.spawn_weak(|_, cx| async move {
1088 let buffer = task.await?;
1089 let project_entry_id = buffer
1090 .read_with(&cx, |buffer, cx| {
1091 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1092 })
1093 .ok_or_else(|| anyhow!("no project entry"))?;
1094 Ok((project_entry_id, buffer.into()))
1095 })
1096 }
1097
1098 pub fn open_local_buffer(
1099 &mut self,
1100 abs_path: impl AsRef<Path>,
1101 cx: &mut ModelContext<Self>,
1102 ) -> Task<Result<ModelHandle<Buffer>>> {
1103 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1104 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1105 } else {
1106 Task::ready(Err(anyhow!("no such path")))
1107 }
1108 }
1109
1110 pub fn open_buffer(
1111 &mut self,
1112 path: impl Into<ProjectPath>,
1113 cx: &mut ModelContext<Self>,
1114 ) -> Task<Result<ModelHandle<Buffer>>> {
1115 let project_path = path.into();
1116 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1117 worktree
1118 } else {
1119 return Task::ready(Err(anyhow!("no such worktree")));
1120 };
1121
1122 // If there is already a buffer for the given path, then return it.
1123 let existing_buffer = self.get_open_buffer(&project_path, cx);
1124 if let Some(existing_buffer) = existing_buffer {
1125 return Task::ready(Ok(existing_buffer));
1126 }
1127
1128 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1129 // If the given path is already being loaded, then wait for that existing
1130 // task to complete and return the same buffer.
1131 hash_map::Entry::Occupied(e) => e.get().clone(),
1132
1133 // Otherwise, record the fact that this path is now being loaded.
1134 hash_map::Entry::Vacant(entry) => {
1135 let (mut tx, rx) = postage::watch::channel();
1136 entry.insert(rx.clone());
1137
1138 let load_buffer = if worktree.read(cx).is_local() {
1139 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1140 } else {
1141 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1142 };
1143
1144 cx.spawn(move |this, mut cx| async move {
1145 let load_result = load_buffer.await;
1146 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1147 // Record the fact that the buffer is no longer loading.
1148 this.loading_buffers.remove(&project_path);
1149 let buffer = load_result.map_err(Arc::new)?;
1150 Ok(buffer)
1151 }));
1152 })
1153 .detach();
1154 rx
1155 }
1156 };
1157
1158 cx.foreground().spawn(async move {
1159 loop {
1160 if let Some(result) = loading_watch.borrow().as_ref() {
1161 match result {
1162 Ok(buffer) => return Ok(buffer.clone()),
1163 Err(error) => return Err(anyhow!("{}", error)),
1164 }
1165 }
1166 loading_watch.next().await;
1167 }
1168 })
1169 }
1170
1171 fn open_local_buffer_internal(
1172 &mut self,
1173 path: &Arc<Path>,
1174 worktree: &ModelHandle<Worktree>,
1175 cx: &mut ModelContext<Self>,
1176 ) -> Task<Result<ModelHandle<Buffer>>> {
1177 let load_buffer = worktree.update(cx, |worktree, cx| {
1178 let worktree = worktree.as_local_mut().unwrap();
1179 worktree.load_buffer(path, cx)
1180 });
1181 cx.spawn(|this, mut cx| async move {
1182 let buffer = load_buffer.await?;
1183 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1184 Ok(buffer)
1185 })
1186 }
1187
1188 fn open_remote_buffer_internal(
1189 &mut self,
1190 path: &Arc<Path>,
1191 worktree: &ModelHandle<Worktree>,
1192 cx: &mut ModelContext<Self>,
1193 ) -> Task<Result<ModelHandle<Buffer>>> {
1194 let rpc = self.client.clone();
1195 let project_id = self.remote_id().unwrap();
1196 let remote_worktree_id = worktree.read(cx).id();
1197 let path = path.clone();
1198 let path_string = path.to_string_lossy().to_string();
1199 cx.spawn(|this, mut cx| async move {
1200 let response = rpc
1201 .request(proto::OpenBufferByPath {
1202 project_id,
1203 worktree_id: remote_worktree_id.to_proto(),
1204 path: path_string,
1205 })
1206 .await?;
1207 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1208 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1209 .await
1210 })
1211 }
1212
1213 fn open_local_buffer_via_lsp(
1214 &mut self,
1215 abs_path: lsp::Url,
1216 lsp_adapter: Arc<dyn LspAdapter>,
1217 lsp_server: Arc<LanguageServer>,
1218 cx: &mut ModelContext<Self>,
1219 ) -> Task<Result<ModelHandle<Buffer>>> {
1220 cx.spawn(|this, mut cx| async move {
1221 let abs_path = abs_path
1222 .to_file_path()
1223 .map_err(|_| anyhow!("can't convert URI to path"))?;
1224 let (worktree, relative_path) = if let Some(result) =
1225 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1226 {
1227 result
1228 } else {
1229 let worktree = this
1230 .update(&mut cx, |this, cx| {
1231 this.create_local_worktree(&abs_path, false, cx)
1232 })
1233 .await?;
1234 this.update(&mut cx, |this, cx| {
1235 this.language_servers.insert(
1236 (worktree.read(cx).id(), lsp_adapter.name()),
1237 (lsp_adapter, lsp_server),
1238 );
1239 });
1240 (worktree, PathBuf::new())
1241 };
1242
1243 let project_path = ProjectPath {
1244 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1245 path: relative_path.into(),
1246 };
1247 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1248 .await
1249 })
1250 }
1251
1252 pub fn open_buffer_by_id(
1253 &mut self,
1254 id: u64,
1255 cx: &mut ModelContext<Self>,
1256 ) -> Task<Result<ModelHandle<Buffer>>> {
1257 if let Some(buffer) = self.buffer_for_id(id, cx) {
1258 Task::ready(Ok(buffer))
1259 } else if self.is_local() {
1260 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1261 } else if let Some(project_id) = self.remote_id() {
1262 let request = self
1263 .client
1264 .request(proto::OpenBufferById { project_id, id });
1265 cx.spawn(|this, mut cx| async move {
1266 let buffer = request
1267 .await?
1268 .buffer
1269 .ok_or_else(|| anyhow!("invalid buffer"))?;
1270 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1271 .await
1272 })
1273 } else {
1274 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1275 }
1276 }
1277
1278 pub fn save_buffer_as(
1279 &mut self,
1280 buffer: ModelHandle<Buffer>,
1281 abs_path: PathBuf,
1282 cx: &mut ModelContext<Project>,
1283 ) -> Task<Result<()>> {
1284 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1285 let old_path =
1286 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1287 cx.spawn(|this, mut cx| async move {
1288 if let Some(old_path) = old_path {
1289 this.update(&mut cx, |this, cx| {
1290 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1291 });
1292 }
1293 let (worktree, path) = worktree_task.await?;
1294 worktree
1295 .update(&mut cx, |worktree, cx| {
1296 worktree
1297 .as_local_mut()
1298 .unwrap()
1299 .save_buffer_as(buffer.clone(), path, cx)
1300 })
1301 .await?;
1302 this.update(&mut cx, |this, cx| {
1303 this.assign_language_to_buffer(&buffer, cx);
1304 this.register_buffer_with_language_server(&buffer, cx);
1305 });
1306 Ok(())
1307 })
1308 }
1309
1310 pub fn get_open_buffer(
1311 &mut self,
1312 path: &ProjectPath,
1313 cx: &mut ModelContext<Self>,
1314 ) -> Option<ModelHandle<Buffer>> {
1315 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1316 self.opened_buffers.values().find_map(|buffer| {
1317 let buffer = buffer.upgrade(cx)?;
1318 let file = File::from_dyn(buffer.read(cx).file())?;
1319 if file.worktree == worktree && file.path() == &path.path {
1320 Some(buffer)
1321 } else {
1322 None
1323 }
1324 })
1325 }
1326
1327 fn register_buffer(
1328 &mut self,
1329 buffer: &ModelHandle<Buffer>,
1330 cx: &mut ModelContext<Self>,
1331 ) -> Result<()> {
1332 let remote_id = buffer.read(cx).remote_id();
1333 let open_buffer = if self.is_remote() || self.is_shared() {
1334 OpenBuffer::Strong(buffer.clone())
1335 } else {
1336 OpenBuffer::Weak(buffer.downgrade())
1337 };
1338
1339 match self.opened_buffers.insert(remote_id, open_buffer) {
1340 None => {}
1341 Some(OpenBuffer::Loading(operations)) => {
1342 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1343 }
1344 Some(OpenBuffer::Weak(existing_handle)) => {
1345 if existing_handle.upgrade(cx).is_some() {
1346 Err(anyhow!(
1347 "already registered buffer with remote id {}",
1348 remote_id
1349 ))?
1350 }
1351 }
1352 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1353 "already registered buffer with remote id {}",
1354 remote_id
1355 ))?,
1356 }
1357 cx.subscribe(buffer, |this, buffer, event, cx| {
1358 this.on_buffer_event(buffer, event, cx);
1359 })
1360 .detach();
1361
1362 self.assign_language_to_buffer(buffer, cx);
1363 self.register_buffer_with_language_server(buffer, cx);
1364 cx.observe_release(buffer, |this, buffer, cx| {
1365 if let Some(file) = File::from_dyn(buffer.file()) {
1366 if file.is_local() {
1367 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1368 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1369 server
1370 .notify::<lsp::notification::DidCloseTextDocument>(
1371 lsp::DidCloseTextDocumentParams {
1372 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1373 },
1374 )
1375 .log_err();
1376 }
1377 }
1378 }
1379 })
1380 .detach();
1381
1382 Ok(())
1383 }
1384
1385 fn register_buffer_with_language_server(
1386 &mut self,
1387 buffer_handle: &ModelHandle<Buffer>,
1388 cx: &mut ModelContext<Self>,
1389 ) {
1390 let buffer = buffer_handle.read(cx);
1391 let buffer_id = buffer.remote_id();
1392 if let Some(file) = File::from_dyn(buffer.file()) {
1393 if file.is_local() {
1394 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1395 let initial_snapshot = buffer.text_snapshot();
1396
1397 let mut language_server = None;
1398 let mut language_id = None;
1399 if let Some(language) = buffer.language() {
1400 let worktree_id = file.worktree_id(cx);
1401 if let Some(adapter) = language.lsp_adapter() {
1402 language_id = adapter.id_for_language(language.name().as_ref());
1403 language_server = self
1404 .language_servers
1405 .get(&(worktree_id, adapter.name()))
1406 .cloned();
1407 }
1408 }
1409
1410 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1411 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1412 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1413 .log_err();
1414 }
1415 }
1416
1417 if let Some((_, server)) = language_server {
1418 server
1419 .notify::<lsp::notification::DidOpenTextDocument>(
1420 lsp::DidOpenTextDocumentParams {
1421 text_document: lsp::TextDocumentItem::new(
1422 uri,
1423 language_id.unwrap_or_default(),
1424 0,
1425 initial_snapshot.text(),
1426 ),
1427 }
1428 .clone(),
1429 )
1430 .log_err();
1431 buffer_handle.update(cx, |buffer, cx| {
1432 buffer.set_completion_triggers(
1433 server
1434 .capabilities()
1435 .completion_provider
1436 .as_ref()
1437 .and_then(|provider| provider.trigger_characters.clone())
1438 .unwrap_or(Vec::new()),
1439 cx,
1440 )
1441 });
1442 self.buffer_snapshots
1443 .insert(buffer_id, vec![(0, initial_snapshot)]);
1444 }
1445 }
1446 }
1447 }
1448
1449 fn unregister_buffer_from_language_server(
1450 &mut self,
1451 buffer: &ModelHandle<Buffer>,
1452 old_path: PathBuf,
1453 cx: &mut ModelContext<Self>,
1454 ) {
1455 buffer.update(cx, |buffer, cx| {
1456 buffer.update_diagnostics(Default::default(), cx);
1457 self.buffer_snapshots.remove(&buffer.remote_id());
1458 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1459 language_server
1460 .notify::<lsp::notification::DidCloseTextDocument>(
1461 lsp::DidCloseTextDocumentParams {
1462 text_document: lsp::TextDocumentIdentifier::new(
1463 lsp::Url::from_file_path(old_path).unwrap(),
1464 ),
1465 },
1466 )
1467 .log_err();
1468 }
1469 });
1470 }
1471
1472 fn on_buffer_event(
1473 &mut self,
1474 buffer: ModelHandle<Buffer>,
1475 event: &BufferEvent,
1476 cx: &mut ModelContext<Self>,
1477 ) -> Option<()> {
1478 match event {
1479 BufferEvent::Operation(operation) => {
1480 let project_id = self.remote_id()?;
1481 let request = self.client.request(proto::UpdateBuffer {
1482 project_id,
1483 buffer_id: buffer.read(cx).remote_id(),
1484 operations: vec![language::proto::serialize_operation(&operation)],
1485 });
1486 cx.background().spawn(request).detach_and_log_err(cx);
1487 }
1488 BufferEvent::Edited { .. } => {
1489 let (_, language_server) = self
1490 .language_server_for_buffer(buffer.read(cx), cx)?
1491 .clone();
1492 let buffer = buffer.read(cx);
1493 let file = File::from_dyn(buffer.file())?;
1494 let abs_path = file.as_local()?.abs_path(cx);
1495 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1496 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1497 let (version, prev_snapshot) = buffer_snapshots.last()?;
1498 let next_snapshot = buffer.text_snapshot();
1499 let next_version = version + 1;
1500
1501 let content_changes = buffer
1502 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1503 .map(|edit| {
1504 let edit_start = edit.new.start.0;
1505 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1506 let new_text = next_snapshot
1507 .text_for_range(edit.new.start.1..edit.new.end.1)
1508 .collect();
1509 lsp::TextDocumentContentChangeEvent {
1510 range: Some(lsp::Range::new(
1511 point_to_lsp(edit_start),
1512 point_to_lsp(edit_end),
1513 )),
1514 range_length: None,
1515 text: new_text,
1516 }
1517 })
1518 .collect();
1519
1520 buffer_snapshots.push((next_version, next_snapshot));
1521
1522 language_server
1523 .notify::<lsp::notification::DidChangeTextDocument>(
1524 lsp::DidChangeTextDocumentParams {
1525 text_document: lsp::VersionedTextDocumentIdentifier::new(
1526 uri,
1527 next_version,
1528 ),
1529 content_changes,
1530 },
1531 )
1532 .log_err();
1533 }
1534 BufferEvent::Saved => {
1535 let file = File::from_dyn(buffer.read(cx).file())?;
1536 let worktree_id = file.worktree_id(cx);
1537 let abs_path = file.as_local()?.abs_path(cx);
1538 let text_document = lsp::TextDocumentIdentifier {
1539 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1540 };
1541
1542 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1543 server
1544 .notify::<lsp::notification::DidSaveTextDocument>(
1545 lsp::DidSaveTextDocumentParams {
1546 text_document: text_document.clone(),
1547 text: None,
1548 },
1549 )
1550 .log_err();
1551 }
1552 }
1553 _ => {}
1554 }
1555
1556 None
1557 }
1558
1559 fn language_servers_for_worktree(
1560 &self,
1561 worktree_id: WorktreeId,
1562 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1563 self.language_servers.iter().filter_map(
1564 move |((language_server_worktree_id, _), server)| {
1565 if *language_server_worktree_id == worktree_id {
1566 Some(server)
1567 } else {
1568 None
1569 }
1570 },
1571 )
1572 }
1573
1574 fn assign_language_to_buffer(
1575 &mut self,
1576 buffer: &ModelHandle<Buffer>,
1577 cx: &mut ModelContext<Self>,
1578 ) -> Option<()> {
1579 // If the buffer has a language, set it and start the language server if we haven't already.
1580 let full_path = buffer.read(cx).file()?.full_path(cx);
1581 let language = self.languages.select_language(&full_path)?;
1582 buffer.update(cx, |buffer, cx| {
1583 buffer.set_language(Some(language.clone()), cx);
1584 });
1585
1586 let file = File::from_dyn(buffer.read(cx).file())?;
1587 let worktree = file.worktree.read(cx).as_local()?;
1588 let worktree_id = worktree.id();
1589 let worktree_abs_path = worktree.abs_path().clone();
1590 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1591
1592 None
1593 }
1594
1595 fn start_language_server(
1596 &mut self,
1597 worktree_id: WorktreeId,
1598 worktree_path: Arc<Path>,
1599 language: Arc<Language>,
1600 cx: &mut ModelContext<Self>,
1601 ) {
1602 let adapter = if let Some(adapter) = language.lsp_adapter() {
1603 adapter
1604 } else {
1605 return;
1606 };
1607 let key = (worktree_id, adapter.name());
1608 self.started_language_servers
1609 .entry(key.clone())
1610 .or_insert_with(|| {
1611 let server_id = post_inc(&mut self.next_language_server_id);
1612 let language_server = self.languages.start_language_server(
1613 server_id,
1614 language.clone(),
1615 worktree_path,
1616 self.client.http_client(),
1617 cx,
1618 );
1619 cx.spawn_weak(|this, mut cx| async move {
1620 let language_server = language_server?.await.log_err()?;
1621 let language_server = language_server
1622 .initialize(adapter.initialization_options())
1623 .await
1624 .log_err()?;
1625 let this = this.upgrade(&cx)?;
1626 let disk_based_diagnostics_progress_token =
1627 adapter.disk_based_diagnostics_progress_token();
1628
1629 language_server
1630 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1631 let this = this.downgrade();
1632 let adapter = adapter.clone();
1633 move |params, mut cx| {
1634 if let Some(this) = this.upgrade(&cx) {
1635 this.update(&mut cx, |this, cx| {
1636 this.on_lsp_diagnostics_published(
1637 server_id,
1638 params,
1639 &adapter,
1640 disk_based_diagnostics_progress_token,
1641 cx,
1642 );
1643 });
1644 }
1645 }
1646 })
1647 .detach();
1648
1649 language_server
1650 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1651 let settings = this
1652 .read_with(&cx, |this, _| this.language_server_settings.clone());
1653 move |params, _| {
1654 let settings = settings.lock().clone();
1655 async move {
1656 Ok(params
1657 .items
1658 .into_iter()
1659 .map(|item| {
1660 if let Some(section) = &item.section {
1661 settings
1662 .get(section)
1663 .cloned()
1664 .unwrap_or(serde_json::Value::Null)
1665 } else {
1666 settings.clone()
1667 }
1668 })
1669 .collect())
1670 }
1671 }
1672 })
1673 .detach();
1674
1675 language_server
1676 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1677 let this = this.downgrade();
1678 let adapter = adapter.clone();
1679 let language_server = language_server.clone();
1680 move |params, cx| {
1681 Self::on_lsp_workspace_edit(
1682 this,
1683 params,
1684 server_id,
1685 adapter.clone(),
1686 language_server.clone(),
1687 cx,
1688 )
1689 }
1690 })
1691 .detach();
1692
1693 language_server
1694 .on_notification::<lsp::notification::Progress, _>({
1695 let this = this.downgrade();
1696 move |params, mut cx| {
1697 if let Some(this) = this.upgrade(&cx) {
1698 this.update(&mut cx, |this, cx| {
1699 this.on_lsp_progress(
1700 params,
1701 server_id,
1702 disk_based_diagnostics_progress_token,
1703 cx,
1704 );
1705 });
1706 }
1707 }
1708 })
1709 .detach();
1710
1711 this.update(&mut cx, |this, cx| {
1712 this.language_servers
1713 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1714 this.language_server_statuses.insert(
1715 server_id,
1716 LanguageServerStatus {
1717 name: language_server.name().to_string(),
1718 pending_work: Default::default(),
1719 pending_diagnostic_updates: 0,
1720 },
1721 );
1722 language_server
1723 .notify::<lsp::notification::DidChangeConfiguration>(
1724 lsp::DidChangeConfigurationParams {
1725 settings: this.language_server_settings.lock().clone(),
1726 },
1727 )
1728 .ok();
1729
1730 if let Some(project_id) = this.remote_id() {
1731 this.client
1732 .send(proto::StartLanguageServer {
1733 project_id,
1734 server: Some(proto::LanguageServer {
1735 id: server_id as u64,
1736 name: language_server.name().to_string(),
1737 }),
1738 })
1739 .log_err();
1740 }
1741
1742 // Tell the language server about every open buffer in the worktree that matches the language.
1743 for buffer in this.opened_buffers.values() {
1744 if let Some(buffer_handle) = buffer.upgrade(cx) {
1745 let buffer = buffer_handle.read(cx);
1746 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1747 file
1748 } else {
1749 continue;
1750 };
1751 let language = if let Some(language) = buffer.language() {
1752 language
1753 } else {
1754 continue;
1755 };
1756 if file.worktree.read(cx).id() != key.0
1757 || language.lsp_adapter().map(|a| a.name())
1758 != Some(key.1.clone())
1759 {
1760 continue;
1761 }
1762
1763 let file = file.as_local()?;
1764 let versions = this
1765 .buffer_snapshots
1766 .entry(buffer.remote_id())
1767 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1768 let (version, initial_snapshot) = versions.last().unwrap();
1769 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1770 let language_id = adapter.id_for_language(language.name().as_ref());
1771 language_server
1772 .notify::<lsp::notification::DidOpenTextDocument>(
1773 lsp::DidOpenTextDocumentParams {
1774 text_document: lsp::TextDocumentItem::new(
1775 uri,
1776 language_id.unwrap_or_default(),
1777 *version,
1778 initial_snapshot.text(),
1779 ),
1780 },
1781 )
1782 .log_err()?;
1783 buffer_handle.update(cx, |buffer, cx| {
1784 buffer.set_completion_triggers(
1785 language_server
1786 .capabilities()
1787 .completion_provider
1788 .as_ref()
1789 .and_then(|provider| {
1790 provider.trigger_characters.clone()
1791 })
1792 .unwrap_or(Vec::new()),
1793 cx,
1794 )
1795 });
1796 }
1797 }
1798
1799 cx.notify();
1800 Some(())
1801 });
1802
1803 Some(language_server)
1804 })
1805 });
1806 }
1807
1808 pub fn restart_language_servers_for_buffers(
1809 &mut self,
1810 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1811 cx: &mut ModelContext<Self>,
1812 ) -> Option<()> {
1813 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1814 .into_iter()
1815 .filter_map(|buffer| {
1816 let file = File::from_dyn(buffer.read(cx).file())?;
1817 let worktree = file.worktree.read(cx).as_local()?;
1818 let worktree_id = worktree.id();
1819 let worktree_abs_path = worktree.abs_path().clone();
1820 let full_path = file.full_path(cx);
1821 Some((worktree_id, worktree_abs_path, full_path))
1822 })
1823 .collect();
1824 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1825 let language = self.languages.select_language(&full_path)?;
1826 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1827 }
1828
1829 None
1830 }
1831
1832 fn restart_language_server(
1833 &mut self,
1834 worktree_id: WorktreeId,
1835 worktree_path: Arc<Path>,
1836 language: Arc<Language>,
1837 cx: &mut ModelContext<Self>,
1838 ) {
1839 let adapter = if let Some(adapter) = language.lsp_adapter() {
1840 adapter
1841 } else {
1842 return;
1843 };
1844 let key = (worktree_id, adapter.name());
1845 let server_to_shutdown = self.language_servers.remove(&key);
1846 self.started_language_servers.remove(&key);
1847 server_to_shutdown
1848 .as_ref()
1849 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1850 cx.spawn_weak(|this, mut cx| async move {
1851 if let Some(this) = this.upgrade(&cx) {
1852 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1853 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1854 shutdown_task.await;
1855 }
1856 }
1857
1858 this.update(&mut cx, |this, cx| {
1859 this.start_language_server(worktree_id, worktree_path, language, cx);
1860 });
1861 }
1862 })
1863 .detach();
1864 }
1865
1866 fn on_lsp_diagnostics_published(
1867 &mut self,
1868 server_id: usize,
1869 mut params: lsp::PublishDiagnosticsParams,
1870 adapter: &Arc<dyn LspAdapter>,
1871 disk_based_diagnostics_progress_token: Option<&str>,
1872 cx: &mut ModelContext<Self>,
1873 ) {
1874 adapter.process_diagnostics(&mut params);
1875 if disk_based_diagnostics_progress_token.is_none() {
1876 self.disk_based_diagnostics_started(cx);
1877 self.broadcast_language_server_update(
1878 server_id,
1879 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1880 proto::LspDiskBasedDiagnosticsUpdating {},
1881 ),
1882 );
1883 }
1884 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1885 .log_err();
1886 if disk_based_diagnostics_progress_token.is_none() {
1887 self.disk_based_diagnostics_finished(cx);
1888 self.broadcast_language_server_update(
1889 server_id,
1890 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1891 proto::LspDiskBasedDiagnosticsUpdated {},
1892 ),
1893 );
1894 }
1895 }
1896
1897 fn on_lsp_progress(
1898 &mut self,
1899 progress: lsp::ProgressParams,
1900 server_id: usize,
1901 disk_based_diagnostics_progress_token: Option<&str>,
1902 cx: &mut ModelContext<Self>,
1903 ) {
1904 let token = match progress.token {
1905 lsp::NumberOrString::String(token) => token,
1906 lsp::NumberOrString::Number(token) => {
1907 log::info!("skipping numeric progress token {}", token);
1908 return;
1909 }
1910 };
1911 let progress = match progress.value {
1912 lsp::ProgressParamsValue::WorkDone(value) => value,
1913 };
1914 let language_server_status =
1915 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1916 status
1917 } else {
1918 return;
1919 };
1920 match progress {
1921 lsp::WorkDoneProgress::Begin(_) => {
1922 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1923 language_server_status.pending_diagnostic_updates += 1;
1924 if language_server_status.pending_diagnostic_updates == 1 {
1925 self.disk_based_diagnostics_started(cx);
1926 self.broadcast_language_server_update(
1927 server_id,
1928 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1929 proto::LspDiskBasedDiagnosticsUpdating {},
1930 ),
1931 );
1932 }
1933 } else {
1934 self.on_lsp_work_start(server_id, token.clone(), cx);
1935 self.broadcast_language_server_update(
1936 server_id,
1937 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1938 token,
1939 }),
1940 );
1941 }
1942 }
1943 lsp::WorkDoneProgress::Report(report) => {
1944 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1945 self.on_lsp_work_progress(
1946 server_id,
1947 token.clone(),
1948 LanguageServerProgress {
1949 message: report.message.clone(),
1950 percentage: report.percentage.map(|p| p as usize),
1951 last_update_at: Instant::now(),
1952 },
1953 cx,
1954 );
1955 self.broadcast_language_server_update(
1956 server_id,
1957 proto::update_language_server::Variant::WorkProgress(
1958 proto::LspWorkProgress {
1959 token,
1960 message: report.message,
1961 percentage: report.percentage.map(|p| p as u32),
1962 },
1963 ),
1964 );
1965 }
1966 }
1967 lsp::WorkDoneProgress::End(_) => {
1968 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1969 language_server_status.pending_diagnostic_updates -= 1;
1970 if language_server_status.pending_diagnostic_updates == 0 {
1971 self.disk_based_diagnostics_finished(cx);
1972 self.broadcast_language_server_update(
1973 server_id,
1974 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1975 proto::LspDiskBasedDiagnosticsUpdated {},
1976 ),
1977 );
1978 }
1979 } else {
1980 self.on_lsp_work_end(server_id, token.clone(), cx);
1981 self.broadcast_language_server_update(
1982 server_id,
1983 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1984 token,
1985 }),
1986 );
1987 }
1988 }
1989 }
1990 }
1991
1992 fn on_lsp_work_start(
1993 &mut self,
1994 language_server_id: usize,
1995 token: String,
1996 cx: &mut ModelContext<Self>,
1997 ) {
1998 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1999 status.pending_work.insert(
2000 token,
2001 LanguageServerProgress {
2002 message: None,
2003 percentage: None,
2004 last_update_at: Instant::now(),
2005 },
2006 );
2007 cx.notify();
2008 }
2009 }
2010
2011 fn on_lsp_work_progress(
2012 &mut self,
2013 language_server_id: usize,
2014 token: String,
2015 progress: LanguageServerProgress,
2016 cx: &mut ModelContext<Self>,
2017 ) {
2018 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2019 status.pending_work.insert(token, progress);
2020 cx.notify();
2021 }
2022 }
2023
2024 fn on_lsp_work_end(
2025 &mut self,
2026 language_server_id: usize,
2027 token: String,
2028 cx: &mut ModelContext<Self>,
2029 ) {
2030 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2031 status.pending_work.remove(&token);
2032 cx.notify();
2033 }
2034 }
2035
2036 async fn on_lsp_workspace_edit(
2037 this: WeakModelHandle<Self>,
2038 params: lsp::ApplyWorkspaceEditParams,
2039 server_id: usize,
2040 adapter: Arc<dyn LspAdapter>,
2041 language_server: Arc<LanguageServer>,
2042 mut cx: AsyncAppContext,
2043 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2044 let this = this
2045 .upgrade(&cx)
2046 .ok_or_else(|| anyhow!("project project closed"))?;
2047 let transaction = Self::deserialize_workspace_edit(
2048 this.clone(),
2049 params.edit,
2050 true,
2051 adapter.clone(),
2052 language_server.clone(),
2053 &mut cx,
2054 )
2055 .await
2056 .log_err();
2057 this.update(&mut cx, |this, _| {
2058 if let Some(transaction) = transaction {
2059 this.last_workspace_edits_by_language_server
2060 .insert(server_id, transaction);
2061 }
2062 });
2063 Ok(lsp::ApplyWorkspaceEditResponse {
2064 applied: true,
2065 failed_change: None,
2066 failure_reason: None,
2067 })
2068 }
2069
2070 fn broadcast_language_server_update(
2071 &self,
2072 language_server_id: usize,
2073 event: proto::update_language_server::Variant,
2074 ) {
2075 if let Some(project_id) = self.remote_id() {
2076 self.client
2077 .send(proto::UpdateLanguageServer {
2078 project_id,
2079 language_server_id: language_server_id as u64,
2080 variant: Some(event),
2081 })
2082 .log_err();
2083 }
2084 }
2085
2086 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2087 for (_, server) in self.language_servers.values() {
2088 server
2089 .notify::<lsp::notification::DidChangeConfiguration>(
2090 lsp::DidChangeConfigurationParams {
2091 settings: settings.clone(),
2092 },
2093 )
2094 .ok();
2095 }
2096 *self.language_server_settings.lock() = settings;
2097 }
2098
2099 pub fn language_server_statuses(
2100 &self,
2101 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2102 self.language_server_statuses.values()
2103 }
2104
2105 pub fn update_diagnostics(
2106 &mut self,
2107 params: lsp::PublishDiagnosticsParams,
2108 disk_based_sources: &[&str],
2109 cx: &mut ModelContext<Self>,
2110 ) -> Result<()> {
2111 let abs_path = params
2112 .uri
2113 .to_file_path()
2114 .map_err(|_| anyhow!("URI is not a file"))?;
2115 let mut next_group_id = 0;
2116 let mut diagnostics = Vec::default();
2117 let mut primary_diagnostic_group_ids = HashMap::default();
2118 let mut sources_by_group_id = HashMap::default();
2119 let mut supporting_diagnostics = HashMap::default();
2120 for diagnostic in ¶ms.diagnostics {
2121 let source = diagnostic.source.as_ref();
2122 let code = diagnostic.code.as_ref().map(|code| match code {
2123 lsp::NumberOrString::Number(code) => code.to_string(),
2124 lsp::NumberOrString::String(code) => code.clone(),
2125 });
2126 let range = range_from_lsp(diagnostic.range);
2127 let is_supporting = diagnostic
2128 .related_information
2129 .as_ref()
2130 .map_or(false, |infos| {
2131 infos.iter().any(|info| {
2132 primary_diagnostic_group_ids.contains_key(&(
2133 source,
2134 code.clone(),
2135 range_from_lsp(info.location.range),
2136 ))
2137 })
2138 });
2139
2140 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2141 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2142 });
2143
2144 if is_supporting {
2145 supporting_diagnostics.insert(
2146 (source, code.clone(), range),
2147 (diagnostic.severity, is_unnecessary),
2148 );
2149 } else {
2150 let group_id = post_inc(&mut next_group_id);
2151 let is_disk_based = source.map_or(false, |source| {
2152 disk_based_sources.contains(&source.as_str())
2153 });
2154
2155 sources_by_group_id.insert(group_id, source);
2156 primary_diagnostic_group_ids
2157 .insert((source, code.clone(), range.clone()), group_id);
2158
2159 diagnostics.push(DiagnosticEntry {
2160 range,
2161 diagnostic: Diagnostic {
2162 code: code.clone(),
2163 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2164 message: diagnostic.message.clone(),
2165 group_id,
2166 is_primary: true,
2167 is_valid: true,
2168 is_disk_based,
2169 is_unnecessary,
2170 },
2171 });
2172 if let Some(infos) = &diagnostic.related_information {
2173 for info in infos {
2174 if info.location.uri == params.uri && !info.message.is_empty() {
2175 let range = range_from_lsp(info.location.range);
2176 diagnostics.push(DiagnosticEntry {
2177 range,
2178 diagnostic: Diagnostic {
2179 code: code.clone(),
2180 severity: DiagnosticSeverity::INFORMATION,
2181 message: info.message.clone(),
2182 group_id,
2183 is_primary: false,
2184 is_valid: true,
2185 is_disk_based,
2186 is_unnecessary: false,
2187 },
2188 });
2189 }
2190 }
2191 }
2192 }
2193 }
2194
2195 for entry in &mut diagnostics {
2196 let diagnostic = &mut entry.diagnostic;
2197 if !diagnostic.is_primary {
2198 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2199 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2200 source,
2201 diagnostic.code.clone(),
2202 entry.range.clone(),
2203 )) {
2204 if let Some(severity) = severity {
2205 diagnostic.severity = severity;
2206 }
2207 diagnostic.is_unnecessary = is_unnecessary;
2208 }
2209 }
2210 }
2211
2212 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2213 Ok(())
2214 }
2215
2216 pub fn update_diagnostic_entries(
2217 &mut self,
2218 abs_path: PathBuf,
2219 version: Option<i32>,
2220 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2221 cx: &mut ModelContext<Project>,
2222 ) -> Result<(), anyhow::Error> {
2223 let (worktree, relative_path) = self
2224 .find_local_worktree(&abs_path, cx)
2225 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2226 if !worktree.read(cx).is_visible() {
2227 return Ok(());
2228 }
2229
2230 let project_path = ProjectPath {
2231 worktree_id: worktree.read(cx).id(),
2232 path: relative_path.into(),
2233 };
2234 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2235 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2236 }
2237
2238 let updated = worktree.update(cx, |worktree, cx| {
2239 worktree
2240 .as_local_mut()
2241 .ok_or_else(|| anyhow!("not a local worktree"))?
2242 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2243 })?;
2244 if updated {
2245 cx.emit(Event::DiagnosticsUpdated(project_path));
2246 }
2247 Ok(())
2248 }
2249
2250 fn update_buffer_diagnostics(
2251 &mut self,
2252 buffer: &ModelHandle<Buffer>,
2253 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2254 version: Option<i32>,
2255 cx: &mut ModelContext<Self>,
2256 ) -> Result<()> {
2257 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2258 Ordering::Equal
2259 .then_with(|| b.is_primary.cmp(&a.is_primary))
2260 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2261 .then_with(|| a.severity.cmp(&b.severity))
2262 .then_with(|| a.message.cmp(&b.message))
2263 }
2264
2265 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2266
2267 diagnostics.sort_unstable_by(|a, b| {
2268 Ordering::Equal
2269 .then_with(|| a.range.start.cmp(&b.range.start))
2270 .then_with(|| b.range.end.cmp(&a.range.end))
2271 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2272 });
2273
2274 let mut sanitized_diagnostics = Vec::new();
2275 let edits_since_save = Patch::new(
2276 snapshot
2277 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2278 .collect(),
2279 );
2280 for entry in diagnostics {
2281 let start;
2282 let end;
2283 if entry.diagnostic.is_disk_based {
2284 // Some diagnostics are based on files on disk instead of buffers'
2285 // current contents. Adjust these diagnostics' ranges to reflect
2286 // any unsaved edits.
2287 start = edits_since_save.old_to_new(entry.range.start);
2288 end = edits_since_save.old_to_new(entry.range.end);
2289 } else {
2290 start = entry.range.start;
2291 end = entry.range.end;
2292 }
2293
2294 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2295 ..snapshot.clip_point_utf16(end, Bias::Right);
2296
2297 // Expand empty ranges by one character
2298 if range.start == range.end {
2299 range.end.column += 1;
2300 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2301 if range.start == range.end && range.end.column > 0 {
2302 range.start.column -= 1;
2303 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2304 }
2305 }
2306
2307 sanitized_diagnostics.push(DiagnosticEntry {
2308 range,
2309 diagnostic: entry.diagnostic,
2310 });
2311 }
2312 drop(edits_since_save);
2313
2314 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2315 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2316 Ok(())
2317 }
2318
2319 pub fn reload_buffers(
2320 &self,
2321 buffers: HashSet<ModelHandle<Buffer>>,
2322 push_to_history: bool,
2323 cx: &mut ModelContext<Self>,
2324 ) -> Task<Result<ProjectTransaction>> {
2325 let mut local_buffers = Vec::new();
2326 let mut remote_buffers = None;
2327 for buffer_handle in buffers {
2328 let buffer = buffer_handle.read(cx);
2329 if buffer.is_dirty() {
2330 if let Some(file) = File::from_dyn(buffer.file()) {
2331 if file.is_local() {
2332 local_buffers.push(buffer_handle);
2333 } else {
2334 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2335 }
2336 }
2337 }
2338 }
2339
2340 let remote_buffers = self.remote_id().zip(remote_buffers);
2341 let client = self.client.clone();
2342
2343 cx.spawn(|this, mut cx| async move {
2344 let mut project_transaction = ProjectTransaction::default();
2345
2346 if let Some((project_id, remote_buffers)) = remote_buffers {
2347 let response = client
2348 .request(proto::ReloadBuffers {
2349 project_id,
2350 buffer_ids: remote_buffers
2351 .iter()
2352 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2353 .collect(),
2354 })
2355 .await?
2356 .transaction
2357 .ok_or_else(|| anyhow!("missing transaction"))?;
2358 project_transaction = this
2359 .update(&mut cx, |this, cx| {
2360 this.deserialize_project_transaction(response, push_to_history, cx)
2361 })
2362 .await?;
2363 }
2364
2365 for buffer in local_buffers {
2366 let transaction = buffer
2367 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2368 .await?;
2369 buffer.update(&mut cx, |buffer, cx| {
2370 if let Some(transaction) = transaction {
2371 if !push_to_history {
2372 buffer.forget_transaction(transaction.id);
2373 }
2374 project_transaction.0.insert(cx.handle(), transaction);
2375 }
2376 });
2377 }
2378
2379 Ok(project_transaction)
2380 })
2381 }
2382
2383 pub fn format(
2384 &self,
2385 buffers: HashSet<ModelHandle<Buffer>>,
2386 push_to_history: bool,
2387 cx: &mut ModelContext<Project>,
2388 ) -> Task<Result<ProjectTransaction>> {
2389 let mut local_buffers = Vec::new();
2390 let mut remote_buffers = None;
2391 for buffer_handle in buffers {
2392 let buffer = buffer_handle.read(cx);
2393 if let Some(file) = File::from_dyn(buffer.file()) {
2394 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2395 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2396 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2397 }
2398 } else {
2399 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2400 }
2401 } else {
2402 return Task::ready(Ok(Default::default()));
2403 }
2404 }
2405
2406 let remote_buffers = self.remote_id().zip(remote_buffers);
2407 let client = self.client.clone();
2408
2409 cx.spawn(|this, mut cx| async move {
2410 let mut project_transaction = ProjectTransaction::default();
2411
2412 if let Some((project_id, remote_buffers)) = remote_buffers {
2413 let response = client
2414 .request(proto::FormatBuffers {
2415 project_id,
2416 buffer_ids: remote_buffers
2417 .iter()
2418 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2419 .collect(),
2420 })
2421 .await?
2422 .transaction
2423 .ok_or_else(|| anyhow!("missing transaction"))?;
2424 project_transaction = this
2425 .update(&mut cx, |this, cx| {
2426 this.deserialize_project_transaction(response, push_to_history, cx)
2427 })
2428 .await?;
2429 }
2430
2431 for (buffer, buffer_abs_path, language_server) in local_buffers {
2432 let text_document = lsp::TextDocumentIdentifier::new(
2433 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2434 );
2435 let capabilities = &language_server.capabilities();
2436 let tab_size = cx.update(|cx| {
2437 let language_name = buffer.read(cx).language().map(|language| language.name());
2438 cx.global::<Settings>().tab_size(language_name.as_deref())
2439 });
2440 let lsp_edits = if capabilities
2441 .document_formatting_provider
2442 .as_ref()
2443 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2444 {
2445 language_server
2446 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2447 text_document,
2448 options: lsp::FormattingOptions {
2449 tab_size,
2450 insert_spaces: true,
2451 insert_final_newline: Some(true),
2452 ..Default::default()
2453 },
2454 work_done_progress_params: Default::default(),
2455 })
2456 .await?
2457 } else if capabilities
2458 .document_range_formatting_provider
2459 .as_ref()
2460 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2461 {
2462 let buffer_start = lsp::Position::new(0, 0);
2463 let buffer_end =
2464 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2465 language_server
2466 .request::<lsp::request::RangeFormatting>(
2467 lsp::DocumentRangeFormattingParams {
2468 text_document,
2469 range: lsp::Range::new(buffer_start, buffer_end),
2470 options: lsp::FormattingOptions {
2471 tab_size: 4,
2472 insert_spaces: true,
2473 insert_final_newline: Some(true),
2474 ..Default::default()
2475 },
2476 work_done_progress_params: Default::default(),
2477 },
2478 )
2479 .await?
2480 } else {
2481 continue;
2482 };
2483
2484 if let Some(lsp_edits) = lsp_edits {
2485 let edits = this
2486 .update(&mut cx, |this, cx| {
2487 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2488 })
2489 .await?;
2490 buffer.update(&mut cx, |buffer, cx| {
2491 buffer.finalize_last_transaction();
2492 buffer.start_transaction();
2493 for (range, text) in edits {
2494 buffer.edit([(range, text)], cx);
2495 }
2496 if buffer.end_transaction(cx).is_some() {
2497 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2498 if !push_to_history {
2499 buffer.forget_transaction(transaction.id);
2500 }
2501 project_transaction.0.insert(cx.handle(), transaction);
2502 }
2503 });
2504 }
2505 }
2506
2507 Ok(project_transaction)
2508 })
2509 }
2510
2511 pub fn definition<T: ToPointUtf16>(
2512 &self,
2513 buffer: &ModelHandle<Buffer>,
2514 position: T,
2515 cx: &mut ModelContext<Self>,
2516 ) -> Task<Result<Vec<Location>>> {
2517 let position = position.to_point_utf16(buffer.read(cx));
2518 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2519 }
2520
2521 pub fn references<T: ToPointUtf16>(
2522 &self,
2523 buffer: &ModelHandle<Buffer>,
2524 position: T,
2525 cx: &mut ModelContext<Self>,
2526 ) -> Task<Result<Vec<Location>>> {
2527 let position = position.to_point_utf16(buffer.read(cx));
2528 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2529 }
2530
2531 pub fn document_highlights<T: ToPointUtf16>(
2532 &self,
2533 buffer: &ModelHandle<Buffer>,
2534 position: T,
2535 cx: &mut ModelContext<Self>,
2536 ) -> Task<Result<Vec<DocumentHighlight>>> {
2537 let position = position.to_point_utf16(buffer.read(cx));
2538
2539 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2540 }
2541
2542 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2543 if self.is_local() {
2544 let mut requests = Vec::new();
2545 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2546 let worktree_id = *worktree_id;
2547 if let Some(worktree) = self
2548 .worktree_for_id(worktree_id, cx)
2549 .and_then(|worktree| worktree.read(cx).as_local())
2550 {
2551 let lsp_adapter = lsp_adapter.clone();
2552 let worktree_abs_path = worktree.abs_path().clone();
2553 requests.push(
2554 language_server
2555 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2556 query: query.to_string(),
2557 ..Default::default()
2558 })
2559 .log_err()
2560 .map(move |response| {
2561 (
2562 lsp_adapter,
2563 worktree_id,
2564 worktree_abs_path,
2565 response.unwrap_or_default(),
2566 )
2567 }),
2568 );
2569 }
2570 }
2571
2572 cx.spawn_weak(|this, cx| async move {
2573 let responses = futures::future::join_all(requests).await;
2574 let this = if let Some(this) = this.upgrade(&cx) {
2575 this
2576 } else {
2577 return Ok(Default::default());
2578 };
2579 this.read_with(&cx, |this, cx| {
2580 let mut symbols = Vec::new();
2581 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2582 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2583 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2584 let mut worktree_id = source_worktree_id;
2585 let path;
2586 if let Some((worktree, rel_path)) =
2587 this.find_local_worktree(&abs_path, cx)
2588 {
2589 worktree_id = worktree.read(cx).id();
2590 path = rel_path;
2591 } else {
2592 path = relativize_path(&worktree_abs_path, &abs_path);
2593 }
2594
2595 let label = this
2596 .languages
2597 .select_language(&path)
2598 .and_then(|language| {
2599 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2600 })
2601 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2602 let signature = this.symbol_signature(worktree_id, &path);
2603
2604 Some(Symbol {
2605 source_worktree_id,
2606 worktree_id,
2607 language_server_name: adapter.name(),
2608 name: lsp_symbol.name,
2609 kind: lsp_symbol.kind,
2610 label,
2611 path,
2612 range: range_from_lsp(lsp_symbol.location.range),
2613 signature,
2614 })
2615 }));
2616 }
2617 Ok(symbols)
2618 })
2619 })
2620 } else if let Some(project_id) = self.remote_id() {
2621 let request = self.client.request(proto::GetProjectSymbols {
2622 project_id,
2623 query: query.to_string(),
2624 });
2625 cx.spawn_weak(|this, cx| async move {
2626 let response = request.await?;
2627 let mut symbols = Vec::new();
2628 if let Some(this) = this.upgrade(&cx) {
2629 this.read_with(&cx, |this, _| {
2630 symbols.extend(
2631 response
2632 .symbols
2633 .into_iter()
2634 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2635 );
2636 })
2637 }
2638 Ok(symbols)
2639 })
2640 } else {
2641 Task::ready(Ok(Default::default()))
2642 }
2643 }
2644
2645 pub fn open_buffer_for_symbol(
2646 &mut self,
2647 symbol: &Symbol,
2648 cx: &mut ModelContext<Self>,
2649 ) -> Task<Result<ModelHandle<Buffer>>> {
2650 if self.is_local() {
2651 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2652 symbol.source_worktree_id,
2653 symbol.language_server_name.clone(),
2654 )) {
2655 server.clone()
2656 } else {
2657 return Task::ready(Err(anyhow!(
2658 "language server for worktree and language not found"
2659 )));
2660 };
2661
2662 let worktree_abs_path = if let Some(worktree_abs_path) = self
2663 .worktree_for_id(symbol.worktree_id, cx)
2664 .and_then(|worktree| worktree.read(cx).as_local())
2665 .map(|local_worktree| local_worktree.abs_path())
2666 {
2667 worktree_abs_path
2668 } else {
2669 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2670 };
2671 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2672 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2673 uri
2674 } else {
2675 return Task::ready(Err(anyhow!("invalid symbol path")));
2676 };
2677
2678 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2679 } else if let Some(project_id) = self.remote_id() {
2680 let request = self.client.request(proto::OpenBufferForSymbol {
2681 project_id,
2682 symbol: Some(serialize_symbol(symbol)),
2683 });
2684 cx.spawn(|this, mut cx| async move {
2685 let response = request.await?;
2686 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2687 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2688 .await
2689 })
2690 } else {
2691 Task::ready(Err(anyhow!("project does not have a remote id")))
2692 }
2693 }
2694
2695 pub fn completions<T: ToPointUtf16>(
2696 &self,
2697 source_buffer_handle: &ModelHandle<Buffer>,
2698 position: T,
2699 cx: &mut ModelContext<Self>,
2700 ) -> Task<Result<Vec<Completion>>> {
2701 let source_buffer_handle = source_buffer_handle.clone();
2702 let source_buffer = source_buffer_handle.read(cx);
2703 let buffer_id = source_buffer.remote_id();
2704 let language = source_buffer.language().cloned();
2705 let worktree;
2706 let buffer_abs_path;
2707 if let Some(file) = File::from_dyn(source_buffer.file()) {
2708 worktree = file.worktree.clone();
2709 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2710 } else {
2711 return Task::ready(Ok(Default::default()));
2712 };
2713
2714 let position = position.to_point_utf16(source_buffer);
2715 let anchor = source_buffer.anchor_after(position);
2716
2717 if worktree.read(cx).as_local().is_some() {
2718 let buffer_abs_path = buffer_abs_path.unwrap();
2719 let (_, lang_server) =
2720 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2721 server.clone()
2722 } else {
2723 return Task::ready(Ok(Default::default()));
2724 };
2725
2726 cx.spawn(|_, cx| async move {
2727 let completions = lang_server
2728 .request::<lsp::request::Completion>(lsp::CompletionParams {
2729 text_document_position: lsp::TextDocumentPositionParams::new(
2730 lsp::TextDocumentIdentifier::new(
2731 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2732 ),
2733 point_to_lsp(position),
2734 ),
2735 context: Default::default(),
2736 work_done_progress_params: Default::default(),
2737 partial_result_params: Default::default(),
2738 })
2739 .await
2740 .context("lsp completion request failed")?;
2741
2742 let completions = if let Some(completions) = completions {
2743 match completions {
2744 lsp::CompletionResponse::Array(completions) => completions,
2745 lsp::CompletionResponse::List(list) => list.items,
2746 }
2747 } else {
2748 Default::default()
2749 };
2750
2751 source_buffer_handle.read_with(&cx, |this, _| {
2752 let snapshot = this.snapshot();
2753 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2754 let mut range_for_token = None;
2755 Ok(completions
2756 .into_iter()
2757 .filter_map(|lsp_completion| {
2758 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2759 // If the language server provides a range to overwrite, then
2760 // check that the range is valid.
2761 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2762 let range = range_from_lsp(edit.range);
2763 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2764 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2765 if start != range.start || end != range.end {
2766 log::info!("completion out of expected range");
2767 return None;
2768 }
2769 (
2770 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2771 edit.new_text.clone(),
2772 )
2773 }
2774 // If the language server does not provide a range, then infer
2775 // the range based on the syntax tree.
2776 None => {
2777 if position != clipped_position {
2778 log::info!("completion out of expected range");
2779 return None;
2780 }
2781 let Range { start, end } = range_for_token
2782 .get_or_insert_with(|| {
2783 let offset = position.to_offset(&snapshot);
2784 snapshot
2785 .range_for_word_token_at(offset)
2786 .unwrap_or_else(|| offset..offset)
2787 })
2788 .clone();
2789 let text = lsp_completion
2790 .insert_text
2791 .as_ref()
2792 .unwrap_or(&lsp_completion.label)
2793 .clone();
2794 (
2795 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2796 text.clone(),
2797 )
2798 }
2799 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2800 log::info!("unsupported insert/replace completion");
2801 return None;
2802 }
2803 };
2804
2805 Some(Completion {
2806 old_range,
2807 new_text,
2808 label: language
2809 .as_ref()
2810 .and_then(|l| l.label_for_completion(&lsp_completion))
2811 .unwrap_or_else(|| {
2812 CodeLabel::plain(
2813 lsp_completion.label.clone(),
2814 lsp_completion.filter_text.as_deref(),
2815 )
2816 }),
2817 lsp_completion,
2818 })
2819 })
2820 .collect())
2821 })
2822 })
2823 } else if let Some(project_id) = self.remote_id() {
2824 let rpc = self.client.clone();
2825 let message = proto::GetCompletions {
2826 project_id,
2827 buffer_id,
2828 position: Some(language::proto::serialize_anchor(&anchor)),
2829 version: serialize_version(&source_buffer.version()),
2830 };
2831 cx.spawn_weak(|_, mut cx| async move {
2832 let response = rpc.request(message).await?;
2833
2834 source_buffer_handle
2835 .update(&mut cx, |buffer, _| {
2836 buffer.wait_for_version(deserialize_version(response.version))
2837 })
2838 .await;
2839
2840 response
2841 .completions
2842 .into_iter()
2843 .map(|completion| {
2844 language::proto::deserialize_completion(completion, language.as_ref())
2845 })
2846 .collect()
2847 })
2848 } else {
2849 Task::ready(Ok(Default::default()))
2850 }
2851 }
2852
2853 pub fn apply_additional_edits_for_completion(
2854 &self,
2855 buffer_handle: ModelHandle<Buffer>,
2856 completion: Completion,
2857 push_to_history: bool,
2858 cx: &mut ModelContext<Self>,
2859 ) -> Task<Result<Option<Transaction>>> {
2860 let buffer = buffer_handle.read(cx);
2861 let buffer_id = buffer.remote_id();
2862
2863 if self.is_local() {
2864 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2865 {
2866 server.clone()
2867 } else {
2868 return Task::ready(Ok(Default::default()));
2869 };
2870
2871 cx.spawn(|this, mut cx| async move {
2872 let resolved_completion = lang_server
2873 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2874 .await?;
2875 if let Some(edits) = resolved_completion.additional_text_edits {
2876 let edits = this
2877 .update(&mut cx, |this, cx| {
2878 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2879 })
2880 .await?;
2881 buffer_handle.update(&mut cx, |buffer, cx| {
2882 buffer.finalize_last_transaction();
2883 buffer.start_transaction();
2884 for (range, text) in edits {
2885 buffer.edit([(range, text)], cx);
2886 }
2887 let transaction = if buffer.end_transaction(cx).is_some() {
2888 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2889 if !push_to_history {
2890 buffer.forget_transaction(transaction.id);
2891 }
2892 Some(transaction)
2893 } else {
2894 None
2895 };
2896 Ok(transaction)
2897 })
2898 } else {
2899 Ok(None)
2900 }
2901 })
2902 } else if let Some(project_id) = self.remote_id() {
2903 let client = self.client.clone();
2904 cx.spawn(|_, mut cx| async move {
2905 let response = client
2906 .request(proto::ApplyCompletionAdditionalEdits {
2907 project_id,
2908 buffer_id,
2909 completion: Some(language::proto::serialize_completion(&completion)),
2910 })
2911 .await?;
2912
2913 if let Some(transaction) = response.transaction {
2914 let transaction = language::proto::deserialize_transaction(transaction)?;
2915 buffer_handle
2916 .update(&mut cx, |buffer, _| {
2917 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2918 })
2919 .await;
2920 if push_to_history {
2921 buffer_handle.update(&mut cx, |buffer, _| {
2922 buffer.push_transaction(transaction.clone(), Instant::now());
2923 });
2924 }
2925 Ok(Some(transaction))
2926 } else {
2927 Ok(None)
2928 }
2929 })
2930 } else {
2931 Task::ready(Err(anyhow!("project does not have a remote id")))
2932 }
2933 }
2934
2935 pub fn code_actions<T: Clone + ToOffset>(
2936 &self,
2937 buffer_handle: &ModelHandle<Buffer>,
2938 range: Range<T>,
2939 cx: &mut ModelContext<Self>,
2940 ) -> Task<Result<Vec<CodeAction>>> {
2941 let buffer_handle = buffer_handle.clone();
2942 let buffer = buffer_handle.read(cx);
2943 let snapshot = buffer.snapshot();
2944 let relevant_diagnostics = snapshot
2945 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2946 .map(|entry| entry.to_lsp_diagnostic_stub())
2947 .collect();
2948 let buffer_id = buffer.remote_id();
2949 let worktree;
2950 let buffer_abs_path;
2951 if let Some(file) = File::from_dyn(buffer.file()) {
2952 worktree = file.worktree.clone();
2953 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2954 } else {
2955 return Task::ready(Ok(Default::default()));
2956 };
2957 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2958
2959 if worktree.read(cx).as_local().is_some() {
2960 let buffer_abs_path = buffer_abs_path.unwrap();
2961 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2962 {
2963 server.clone()
2964 } else {
2965 return Task::ready(Ok(Default::default()));
2966 };
2967
2968 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2969 cx.foreground().spawn(async move {
2970 if !lang_server.capabilities().code_action_provider.is_some() {
2971 return Ok(Default::default());
2972 }
2973
2974 Ok(lang_server
2975 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2976 text_document: lsp::TextDocumentIdentifier::new(
2977 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2978 ),
2979 range: lsp_range,
2980 work_done_progress_params: Default::default(),
2981 partial_result_params: Default::default(),
2982 context: lsp::CodeActionContext {
2983 diagnostics: relevant_diagnostics,
2984 only: Some(vec![
2985 lsp::CodeActionKind::QUICKFIX,
2986 lsp::CodeActionKind::REFACTOR,
2987 lsp::CodeActionKind::REFACTOR_EXTRACT,
2988 lsp::CodeActionKind::SOURCE,
2989 ]),
2990 },
2991 })
2992 .await?
2993 .unwrap_or_default()
2994 .into_iter()
2995 .filter_map(|entry| {
2996 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2997 Some(CodeAction {
2998 range: range.clone(),
2999 lsp_action,
3000 })
3001 } else {
3002 None
3003 }
3004 })
3005 .collect())
3006 })
3007 } else if let Some(project_id) = self.remote_id() {
3008 let rpc = self.client.clone();
3009 let version = buffer.version();
3010 cx.spawn_weak(|_, mut cx| async move {
3011 let response = rpc
3012 .request(proto::GetCodeActions {
3013 project_id,
3014 buffer_id,
3015 start: Some(language::proto::serialize_anchor(&range.start)),
3016 end: Some(language::proto::serialize_anchor(&range.end)),
3017 version: serialize_version(&version),
3018 })
3019 .await?;
3020
3021 buffer_handle
3022 .update(&mut cx, |buffer, _| {
3023 buffer.wait_for_version(deserialize_version(response.version))
3024 })
3025 .await;
3026
3027 response
3028 .actions
3029 .into_iter()
3030 .map(language::proto::deserialize_code_action)
3031 .collect()
3032 })
3033 } else {
3034 Task::ready(Ok(Default::default()))
3035 }
3036 }
3037
3038 pub fn apply_code_action(
3039 &self,
3040 buffer_handle: ModelHandle<Buffer>,
3041 mut action: CodeAction,
3042 push_to_history: bool,
3043 cx: &mut ModelContext<Self>,
3044 ) -> Task<Result<ProjectTransaction>> {
3045 if self.is_local() {
3046 let buffer = buffer_handle.read(cx);
3047 let (lsp_adapter, lang_server) =
3048 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3049 server.clone()
3050 } else {
3051 return Task::ready(Ok(Default::default()));
3052 };
3053 let range = action.range.to_point_utf16(buffer);
3054
3055 cx.spawn(|this, mut cx| async move {
3056 if let Some(lsp_range) = action
3057 .lsp_action
3058 .data
3059 .as_mut()
3060 .and_then(|d| d.get_mut("codeActionParams"))
3061 .and_then(|d| d.get_mut("range"))
3062 {
3063 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3064 action.lsp_action = lang_server
3065 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3066 .await?;
3067 } else {
3068 let actions = this
3069 .update(&mut cx, |this, cx| {
3070 this.code_actions(&buffer_handle, action.range, cx)
3071 })
3072 .await?;
3073 action.lsp_action = actions
3074 .into_iter()
3075 .find(|a| a.lsp_action.title == action.lsp_action.title)
3076 .ok_or_else(|| anyhow!("code action is outdated"))?
3077 .lsp_action;
3078 }
3079
3080 if let Some(edit) = action.lsp_action.edit {
3081 Self::deserialize_workspace_edit(
3082 this,
3083 edit,
3084 push_to_history,
3085 lsp_adapter,
3086 lang_server,
3087 &mut cx,
3088 )
3089 .await
3090 } else if let Some(command) = action.lsp_action.command {
3091 this.update(&mut cx, |this, _| {
3092 this.last_workspace_edits_by_language_server
3093 .remove(&lang_server.server_id());
3094 });
3095 lang_server
3096 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3097 command: command.command,
3098 arguments: command.arguments.unwrap_or_default(),
3099 ..Default::default()
3100 })
3101 .await?;
3102 Ok(this.update(&mut cx, |this, _| {
3103 this.last_workspace_edits_by_language_server
3104 .remove(&lang_server.server_id())
3105 .unwrap_or_default()
3106 }))
3107 } else {
3108 Ok(ProjectTransaction::default())
3109 }
3110 })
3111 } else if let Some(project_id) = self.remote_id() {
3112 let client = self.client.clone();
3113 let request = proto::ApplyCodeAction {
3114 project_id,
3115 buffer_id: buffer_handle.read(cx).remote_id(),
3116 action: Some(language::proto::serialize_code_action(&action)),
3117 };
3118 cx.spawn(|this, mut cx| async move {
3119 let response = client
3120 .request(request)
3121 .await?
3122 .transaction
3123 .ok_or_else(|| anyhow!("missing transaction"))?;
3124 this.update(&mut cx, |this, cx| {
3125 this.deserialize_project_transaction(response, push_to_history, cx)
3126 })
3127 .await
3128 })
3129 } else {
3130 Task::ready(Err(anyhow!("project does not have a remote id")))
3131 }
3132 }
3133
3134 async fn deserialize_workspace_edit(
3135 this: ModelHandle<Self>,
3136 edit: lsp::WorkspaceEdit,
3137 push_to_history: bool,
3138 lsp_adapter: Arc<dyn LspAdapter>,
3139 language_server: Arc<LanguageServer>,
3140 cx: &mut AsyncAppContext,
3141 ) -> Result<ProjectTransaction> {
3142 let fs = this.read_with(cx, |this, _| this.fs.clone());
3143 let mut operations = Vec::new();
3144 if let Some(document_changes) = edit.document_changes {
3145 match document_changes {
3146 lsp::DocumentChanges::Edits(edits) => {
3147 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3148 }
3149 lsp::DocumentChanges::Operations(ops) => operations = ops,
3150 }
3151 } else if let Some(changes) = edit.changes {
3152 operations.extend(changes.into_iter().map(|(uri, edits)| {
3153 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3154 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3155 uri,
3156 version: None,
3157 },
3158 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3159 })
3160 }));
3161 }
3162
3163 let mut project_transaction = ProjectTransaction::default();
3164 for operation in operations {
3165 match operation {
3166 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3167 let abs_path = op
3168 .uri
3169 .to_file_path()
3170 .map_err(|_| anyhow!("can't convert URI to path"))?;
3171
3172 if let Some(parent_path) = abs_path.parent() {
3173 fs.create_dir(parent_path).await?;
3174 }
3175 if abs_path.ends_with("/") {
3176 fs.create_dir(&abs_path).await?;
3177 } else {
3178 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3179 .await?;
3180 }
3181 }
3182 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3183 let source_abs_path = op
3184 .old_uri
3185 .to_file_path()
3186 .map_err(|_| anyhow!("can't convert URI to path"))?;
3187 let target_abs_path = op
3188 .new_uri
3189 .to_file_path()
3190 .map_err(|_| anyhow!("can't convert URI to path"))?;
3191 fs.rename(
3192 &source_abs_path,
3193 &target_abs_path,
3194 op.options.map(Into::into).unwrap_or_default(),
3195 )
3196 .await?;
3197 }
3198 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3199 let abs_path = op
3200 .uri
3201 .to_file_path()
3202 .map_err(|_| anyhow!("can't convert URI to path"))?;
3203 let options = op.options.map(Into::into).unwrap_or_default();
3204 if abs_path.ends_with("/") {
3205 fs.remove_dir(&abs_path, options).await?;
3206 } else {
3207 fs.remove_file(&abs_path, options).await?;
3208 }
3209 }
3210 lsp::DocumentChangeOperation::Edit(op) => {
3211 let buffer_to_edit = this
3212 .update(cx, |this, cx| {
3213 this.open_local_buffer_via_lsp(
3214 op.text_document.uri,
3215 lsp_adapter.clone(),
3216 language_server.clone(),
3217 cx,
3218 )
3219 })
3220 .await?;
3221
3222 let edits = this
3223 .update(cx, |this, cx| {
3224 let edits = op.edits.into_iter().map(|edit| match edit {
3225 lsp::OneOf::Left(edit) => edit,
3226 lsp::OneOf::Right(edit) => edit.text_edit,
3227 });
3228 this.edits_from_lsp(
3229 &buffer_to_edit,
3230 edits,
3231 op.text_document.version,
3232 cx,
3233 )
3234 })
3235 .await?;
3236
3237 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3238 buffer.finalize_last_transaction();
3239 buffer.start_transaction();
3240 for (range, text) in edits {
3241 buffer.edit([(range, text)], cx);
3242 }
3243 let transaction = if buffer.end_transaction(cx).is_some() {
3244 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3245 if !push_to_history {
3246 buffer.forget_transaction(transaction.id);
3247 }
3248 Some(transaction)
3249 } else {
3250 None
3251 };
3252
3253 transaction
3254 });
3255 if let Some(transaction) = transaction {
3256 project_transaction.0.insert(buffer_to_edit, transaction);
3257 }
3258 }
3259 }
3260 }
3261
3262 Ok(project_transaction)
3263 }
3264
3265 pub fn prepare_rename<T: ToPointUtf16>(
3266 &self,
3267 buffer: ModelHandle<Buffer>,
3268 position: T,
3269 cx: &mut ModelContext<Self>,
3270 ) -> Task<Result<Option<Range<Anchor>>>> {
3271 let position = position.to_point_utf16(buffer.read(cx));
3272 self.request_lsp(buffer, PrepareRename { position }, cx)
3273 }
3274
3275 pub fn perform_rename<T: ToPointUtf16>(
3276 &self,
3277 buffer: ModelHandle<Buffer>,
3278 position: T,
3279 new_name: String,
3280 push_to_history: bool,
3281 cx: &mut ModelContext<Self>,
3282 ) -> Task<Result<ProjectTransaction>> {
3283 let position = position.to_point_utf16(buffer.read(cx));
3284 self.request_lsp(
3285 buffer,
3286 PerformRename {
3287 position,
3288 new_name,
3289 push_to_history,
3290 },
3291 cx,
3292 )
3293 }
3294
3295 pub fn search(
3296 &self,
3297 query: SearchQuery,
3298 cx: &mut ModelContext<Self>,
3299 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3300 if self.is_local() {
3301 let snapshots = self
3302 .visible_worktrees(cx)
3303 .filter_map(|tree| {
3304 let tree = tree.read(cx).as_local()?;
3305 Some(tree.snapshot())
3306 })
3307 .collect::<Vec<_>>();
3308
3309 let background = cx.background().clone();
3310 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3311 if path_count == 0 {
3312 return Task::ready(Ok(Default::default()));
3313 }
3314 let workers = background.num_cpus().min(path_count);
3315 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3316 cx.background()
3317 .spawn({
3318 let fs = self.fs.clone();
3319 let background = cx.background().clone();
3320 let query = query.clone();
3321 async move {
3322 let fs = &fs;
3323 let query = &query;
3324 let matching_paths_tx = &matching_paths_tx;
3325 let paths_per_worker = (path_count + workers - 1) / workers;
3326 let snapshots = &snapshots;
3327 background
3328 .scoped(|scope| {
3329 for worker_ix in 0..workers {
3330 let worker_start_ix = worker_ix * paths_per_worker;
3331 let worker_end_ix = worker_start_ix + paths_per_worker;
3332 scope.spawn(async move {
3333 let mut snapshot_start_ix = 0;
3334 let mut abs_path = PathBuf::new();
3335 for snapshot in snapshots {
3336 let snapshot_end_ix =
3337 snapshot_start_ix + snapshot.visible_file_count();
3338 if worker_end_ix <= snapshot_start_ix {
3339 break;
3340 } else if worker_start_ix > snapshot_end_ix {
3341 snapshot_start_ix = snapshot_end_ix;
3342 continue;
3343 } else {
3344 let start_in_snapshot = worker_start_ix
3345 .saturating_sub(snapshot_start_ix);
3346 let end_in_snapshot =
3347 cmp::min(worker_end_ix, snapshot_end_ix)
3348 - snapshot_start_ix;
3349
3350 for entry in snapshot
3351 .files(false, start_in_snapshot)
3352 .take(end_in_snapshot - start_in_snapshot)
3353 {
3354 if matching_paths_tx.is_closed() {
3355 break;
3356 }
3357
3358 abs_path.clear();
3359 abs_path.push(&snapshot.abs_path());
3360 abs_path.push(&entry.path);
3361 let matches = if let Some(file) =
3362 fs.open_sync(&abs_path).await.log_err()
3363 {
3364 query.detect(file).unwrap_or(false)
3365 } else {
3366 false
3367 };
3368
3369 if matches {
3370 let project_path =
3371 (snapshot.id(), entry.path.clone());
3372 if matching_paths_tx
3373 .send(project_path)
3374 .await
3375 .is_err()
3376 {
3377 break;
3378 }
3379 }
3380 }
3381
3382 snapshot_start_ix = snapshot_end_ix;
3383 }
3384 }
3385 });
3386 }
3387 })
3388 .await;
3389 }
3390 })
3391 .detach();
3392
3393 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3394 let open_buffers = self
3395 .opened_buffers
3396 .values()
3397 .filter_map(|b| b.upgrade(cx))
3398 .collect::<HashSet<_>>();
3399 cx.spawn(|this, cx| async move {
3400 for buffer in &open_buffers {
3401 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3402 buffers_tx.send((buffer.clone(), snapshot)).await?;
3403 }
3404
3405 let open_buffers = Rc::new(RefCell::new(open_buffers));
3406 while let Some(project_path) = matching_paths_rx.next().await {
3407 if buffers_tx.is_closed() {
3408 break;
3409 }
3410
3411 let this = this.clone();
3412 let open_buffers = open_buffers.clone();
3413 let buffers_tx = buffers_tx.clone();
3414 cx.spawn(|mut cx| async move {
3415 if let Some(buffer) = this
3416 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3417 .await
3418 .log_err()
3419 {
3420 if open_buffers.borrow_mut().insert(buffer.clone()) {
3421 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3422 buffers_tx.send((buffer, snapshot)).await?;
3423 }
3424 }
3425
3426 Ok::<_, anyhow::Error>(())
3427 })
3428 .detach();
3429 }
3430
3431 Ok::<_, anyhow::Error>(())
3432 })
3433 .detach_and_log_err(cx);
3434
3435 let background = cx.background().clone();
3436 cx.background().spawn(async move {
3437 let query = &query;
3438 let mut matched_buffers = Vec::new();
3439 for _ in 0..workers {
3440 matched_buffers.push(HashMap::default());
3441 }
3442 background
3443 .scoped(|scope| {
3444 for worker_matched_buffers in matched_buffers.iter_mut() {
3445 let mut buffers_rx = buffers_rx.clone();
3446 scope.spawn(async move {
3447 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3448 let buffer_matches = query
3449 .search(snapshot.as_rope())
3450 .await
3451 .iter()
3452 .map(|range| {
3453 snapshot.anchor_before(range.start)
3454 ..snapshot.anchor_after(range.end)
3455 })
3456 .collect::<Vec<_>>();
3457 if !buffer_matches.is_empty() {
3458 worker_matched_buffers
3459 .insert(buffer.clone(), buffer_matches);
3460 }
3461 }
3462 });
3463 }
3464 })
3465 .await;
3466 Ok(matched_buffers.into_iter().flatten().collect())
3467 })
3468 } else if let Some(project_id) = self.remote_id() {
3469 let request = self.client.request(query.to_proto(project_id));
3470 cx.spawn(|this, mut cx| async move {
3471 let response = request.await?;
3472 let mut result = HashMap::default();
3473 for location in response.locations {
3474 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3475 let target_buffer = this
3476 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3477 .await?;
3478 let start = location
3479 .start
3480 .and_then(deserialize_anchor)
3481 .ok_or_else(|| anyhow!("missing target start"))?;
3482 let end = location
3483 .end
3484 .and_then(deserialize_anchor)
3485 .ok_or_else(|| anyhow!("missing target end"))?;
3486 result
3487 .entry(target_buffer)
3488 .or_insert(Vec::new())
3489 .push(start..end)
3490 }
3491 Ok(result)
3492 })
3493 } else {
3494 Task::ready(Ok(Default::default()))
3495 }
3496 }
3497
3498 fn request_lsp<R: LspCommand>(
3499 &self,
3500 buffer_handle: ModelHandle<Buffer>,
3501 request: R,
3502 cx: &mut ModelContext<Self>,
3503 ) -> Task<Result<R::Response>>
3504 where
3505 <R::LspRequest as lsp::request::Request>::Result: Send,
3506 {
3507 let buffer = buffer_handle.read(cx);
3508 if self.is_local() {
3509 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3510 if let Some((file, (_, language_server))) =
3511 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3512 {
3513 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3514 return cx.spawn(|this, cx| async move {
3515 if !request.check_capabilities(&language_server.capabilities()) {
3516 return Ok(Default::default());
3517 }
3518
3519 let response = language_server
3520 .request::<R::LspRequest>(lsp_params)
3521 .await
3522 .context("lsp request failed")?;
3523 request
3524 .response_from_lsp(response, this, buffer_handle, cx)
3525 .await
3526 });
3527 }
3528 } else if let Some(project_id) = self.remote_id() {
3529 let rpc = self.client.clone();
3530 let message = request.to_proto(project_id, buffer);
3531 return cx.spawn(|this, cx| async move {
3532 let response = rpc.request(message).await?;
3533 request
3534 .response_from_proto(response, this, buffer_handle, cx)
3535 .await
3536 });
3537 }
3538 Task::ready(Ok(Default::default()))
3539 }
3540
3541 pub fn find_or_create_local_worktree(
3542 &mut self,
3543 abs_path: impl AsRef<Path>,
3544 visible: bool,
3545 cx: &mut ModelContext<Self>,
3546 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3547 let abs_path = abs_path.as_ref();
3548 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3549 Task::ready(Ok((tree.clone(), relative_path.into())))
3550 } else {
3551 let worktree = self.create_local_worktree(abs_path, visible, cx);
3552 cx.foreground()
3553 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3554 }
3555 }
3556
3557 pub fn find_local_worktree(
3558 &self,
3559 abs_path: &Path,
3560 cx: &AppContext,
3561 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3562 for tree in self.worktrees(cx) {
3563 if let Some(relative_path) = tree
3564 .read(cx)
3565 .as_local()
3566 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3567 {
3568 return Some((tree.clone(), relative_path.into()));
3569 }
3570 }
3571 None
3572 }
3573
3574 pub fn is_shared(&self) -> bool {
3575 match &self.client_state {
3576 ProjectClientState::Local { is_shared, .. } => *is_shared,
3577 ProjectClientState::Remote { .. } => false,
3578 }
3579 }
3580
3581 fn create_local_worktree(
3582 &mut self,
3583 abs_path: impl AsRef<Path>,
3584 visible: bool,
3585 cx: &mut ModelContext<Self>,
3586 ) -> Task<Result<ModelHandle<Worktree>>> {
3587 let fs = self.fs.clone();
3588 let client = self.client.clone();
3589 let next_entry_id = self.next_entry_id.clone();
3590 let path: Arc<Path> = abs_path.as_ref().into();
3591 let task = self
3592 .loading_local_worktrees
3593 .entry(path.clone())
3594 .or_insert_with(|| {
3595 cx.spawn(|project, mut cx| {
3596 async move {
3597 let worktree = Worktree::local(
3598 client.clone(),
3599 path.clone(),
3600 visible,
3601 fs,
3602 next_entry_id,
3603 &mut cx,
3604 )
3605 .await;
3606 project.update(&mut cx, |project, _| {
3607 project.loading_local_worktrees.remove(&path);
3608 });
3609 let worktree = worktree?;
3610
3611 let remote_project_id = project.update(&mut cx, |project, cx| {
3612 project.add_worktree(&worktree, cx);
3613 project.remote_id()
3614 });
3615
3616 if let Some(project_id) = remote_project_id {
3617 // Because sharing is async, we may have *unshared* the project by the time it completes,
3618 // in which case we need to register the worktree instead.
3619 loop {
3620 if project.read_with(&cx, |project, _| project.is_shared()) {
3621 if worktree
3622 .update(&mut cx, |worktree, cx| {
3623 worktree.as_local_mut().unwrap().share(project_id, cx)
3624 })
3625 .await
3626 .is_ok()
3627 {
3628 break;
3629 }
3630 } else {
3631 worktree
3632 .update(&mut cx, |worktree, cx| {
3633 worktree
3634 .as_local_mut()
3635 .unwrap()
3636 .register(project_id, cx)
3637 })
3638 .await?;
3639 break;
3640 }
3641 }
3642 }
3643
3644 Ok(worktree)
3645 }
3646 .map_err(|err| Arc::new(err))
3647 })
3648 .shared()
3649 })
3650 .clone();
3651 cx.foreground().spawn(async move {
3652 match task.await {
3653 Ok(worktree) => Ok(worktree),
3654 Err(err) => Err(anyhow!("{}", err)),
3655 }
3656 })
3657 }
3658
3659 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3660 self.worktrees.retain(|worktree| {
3661 if let Some(worktree) = worktree.upgrade(cx) {
3662 let id = worktree.read(cx).id();
3663 if id == id_to_remove {
3664 cx.emit(Event::WorktreeRemoved(id));
3665 false
3666 } else {
3667 true
3668 }
3669 } else {
3670 false
3671 }
3672 });
3673 cx.notify();
3674 }
3675
3676 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3677 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3678 if worktree.read(cx).is_local() {
3679 cx.subscribe(&worktree, |this, worktree, _, cx| {
3680 this.update_local_worktree_buffers(worktree, cx);
3681 })
3682 .detach();
3683 }
3684
3685 let push_strong_handle = {
3686 let worktree = worktree.read(cx);
3687 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3688 };
3689 if push_strong_handle {
3690 self.worktrees
3691 .push(WorktreeHandle::Strong(worktree.clone()));
3692 } else {
3693 cx.observe_release(&worktree, |this, _, cx| {
3694 this.worktrees
3695 .retain(|worktree| worktree.upgrade(cx).is_some());
3696 cx.notify();
3697 })
3698 .detach();
3699 self.worktrees
3700 .push(WorktreeHandle::Weak(worktree.downgrade()));
3701 }
3702 cx.emit(Event::WorktreeAdded);
3703 cx.notify();
3704 }
3705
3706 fn update_local_worktree_buffers(
3707 &mut self,
3708 worktree_handle: ModelHandle<Worktree>,
3709 cx: &mut ModelContext<Self>,
3710 ) {
3711 let snapshot = worktree_handle.read(cx).snapshot();
3712 let mut buffers_to_delete = Vec::new();
3713 let mut renamed_buffers = Vec::new();
3714 for (buffer_id, buffer) in &self.opened_buffers {
3715 if let Some(buffer) = buffer.upgrade(cx) {
3716 buffer.update(cx, |buffer, cx| {
3717 if let Some(old_file) = File::from_dyn(buffer.file()) {
3718 if old_file.worktree != worktree_handle {
3719 return;
3720 }
3721
3722 let new_file = if let Some(entry) = old_file
3723 .entry_id
3724 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3725 {
3726 File {
3727 is_local: true,
3728 entry_id: Some(entry.id),
3729 mtime: entry.mtime,
3730 path: entry.path.clone(),
3731 worktree: worktree_handle.clone(),
3732 }
3733 } else if let Some(entry) =
3734 snapshot.entry_for_path(old_file.path().as_ref())
3735 {
3736 File {
3737 is_local: true,
3738 entry_id: Some(entry.id),
3739 mtime: entry.mtime,
3740 path: entry.path.clone(),
3741 worktree: worktree_handle.clone(),
3742 }
3743 } else {
3744 File {
3745 is_local: true,
3746 entry_id: None,
3747 path: old_file.path().clone(),
3748 mtime: old_file.mtime(),
3749 worktree: worktree_handle.clone(),
3750 }
3751 };
3752
3753 let old_path = old_file.abs_path(cx);
3754 if new_file.abs_path(cx) != old_path {
3755 renamed_buffers.push((cx.handle(), old_path));
3756 }
3757
3758 if let Some(project_id) = self.remote_id() {
3759 self.client
3760 .send(proto::UpdateBufferFile {
3761 project_id,
3762 buffer_id: *buffer_id as u64,
3763 file: Some(new_file.to_proto()),
3764 })
3765 .log_err();
3766 }
3767 buffer.file_updated(Box::new(new_file), cx).detach();
3768 }
3769 });
3770 } else {
3771 buffers_to_delete.push(*buffer_id);
3772 }
3773 }
3774
3775 for buffer_id in buffers_to_delete {
3776 self.opened_buffers.remove(&buffer_id);
3777 }
3778
3779 for (buffer, old_path) in renamed_buffers {
3780 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3781 self.assign_language_to_buffer(&buffer, cx);
3782 self.register_buffer_with_language_server(&buffer, cx);
3783 }
3784 }
3785
3786 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3787 let new_active_entry = entry.and_then(|project_path| {
3788 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3789 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3790 Some(entry.id)
3791 });
3792 if new_active_entry != self.active_entry {
3793 self.active_entry = new_active_entry;
3794 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3795 }
3796 }
3797
3798 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3799 self.language_server_statuses
3800 .values()
3801 .any(|status| status.pending_diagnostic_updates > 0)
3802 }
3803
3804 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3805 let mut summary = DiagnosticSummary::default();
3806 for (_, path_summary) in self.diagnostic_summaries(cx) {
3807 summary.error_count += path_summary.error_count;
3808 summary.warning_count += path_summary.warning_count;
3809 }
3810 summary
3811 }
3812
3813 pub fn diagnostic_summaries<'a>(
3814 &'a self,
3815 cx: &'a AppContext,
3816 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3817 self.worktrees(cx).flat_map(move |worktree| {
3818 let worktree = worktree.read(cx);
3819 let worktree_id = worktree.id();
3820 worktree
3821 .diagnostic_summaries()
3822 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3823 })
3824 }
3825
3826 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3827 if self
3828 .language_server_statuses
3829 .values()
3830 .map(|status| status.pending_diagnostic_updates)
3831 .sum::<isize>()
3832 == 1
3833 {
3834 cx.emit(Event::DiskBasedDiagnosticsStarted);
3835 }
3836 }
3837
3838 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3839 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3840 if self
3841 .language_server_statuses
3842 .values()
3843 .map(|status| status.pending_diagnostic_updates)
3844 .sum::<isize>()
3845 == 0
3846 {
3847 cx.emit(Event::DiskBasedDiagnosticsFinished);
3848 }
3849 }
3850
3851 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3852 self.active_entry
3853 }
3854
3855 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3856 self.worktree_for_id(path.worktree_id, cx)?
3857 .read(cx)
3858 .entry_for_path(&path.path)
3859 .map(|entry| entry.id)
3860 }
3861
3862 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3863 let worktree = self.worktree_for_entry(entry_id, cx)?;
3864 let worktree = worktree.read(cx);
3865 let worktree_id = worktree.id();
3866 let path = worktree.entry_for_id(entry_id)?.path.clone();
3867 Some(ProjectPath { worktree_id, path })
3868 }
3869
3870 // RPC message handlers
3871
3872 async fn handle_request_join_project(
3873 this: ModelHandle<Self>,
3874 message: TypedEnvelope<proto::RequestJoinProject>,
3875 _: Arc<Client>,
3876 mut cx: AsyncAppContext,
3877 ) -> Result<()> {
3878 let user_id = message.payload.requester_id;
3879 if this.read_with(&cx, |project, _| {
3880 project.collaborators.values().any(|c| c.user.id == user_id)
3881 }) {
3882 this.update(&mut cx, |this, cx| {
3883 this.respond_to_join_request(user_id, true, cx)
3884 });
3885 } else {
3886 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3887 let user = user_store
3888 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3889 .await?;
3890 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3891 }
3892 Ok(())
3893 }
3894
3895 async fn handle_unregister_project(
3896 this: ModelHandle<Self>,
3897 _: TypedEnvelope<proto::UnregisterProject>,
3898 _: Arc<Client>,
3899 mut cx: AsyncAppContext,
3900 ) -> Result<()> {
3901 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3902 Ok(())
3903 }
3904
3905 async fn handle_project_unshared(
3906 this: ModelHandle<Self>,
3907 _: TypedEnvelope<proto::ProjectUnshared>,
3908 _: Arc<Client>,
3909 mut cx: AsyncAppContext,
3910 ) -> Result<()> {
3911 this.update(&mut cx, |this, cx| this.unshared(cx));
3912 Ok(())
3913 }
3914
3915 async fn handle_add_collaborator(
3916 this: ModelHandle<Self>,
3917 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3918 _: Arc<Client>,
3919 mut cx: AsyncAppContext,
3920 ) -> Result<()> {
3921 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3922 let collaborator = envelope
3923 .payload
3924 .collaborator
3925 .take()
3926 .ok_or_else(|| anyhow!("empty collaborator"))?;
3927
3928 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3929 this.update(&mut cx, |this, cx| {
3930 this.collaborators
3931 .insert(collaborator.peer_id, collaborator);
3932 cx.notify();
3933 });
3934
3935 Ok(())
3936 }
3937
3938 async fn handle_remove_collaborator(
3939 this: ModelHandle<Self>,
3940 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3941 _: Arc<Client>,
3942 mut cx: AsyncAppContext,
3943 ) -> Result<()> {
3944 this.update(&mut cx, |this, cx| {
3945 let peer_id = PeerId(envelope.payload.peer_id);
3946 let replica_id = this
3947 .collaborators
3948 .remove(&peer_id)
3949 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3950 .replica_id;
3951 for (_, buffer) in &this.opened_buffers {
3952 if let Some(buffer) = buffer.upgrade(cx) {
3953 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3954 }
3955 }
3956
3957 cx.emit(Event::CollaboratorLeft(peer_id));
3958 cx.notify();
3959 Ok(())
3960 })
3961 }
3962
3963 async fn handle_join_project_request_cancelled(
3964 this: ModelHandle<Self>,
3965 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
3966 _: Arc<Client>,
3967 mut cx: AsyncAppContext,
3968 ) -> Result<()> {
3969 let user = this
3970 .update(&mut cx, |this, cx| {
3971 this.user_store.update(cx, |user_store, cx| {
3972 user_store.fetch_user(envelope.payload.requester_id, cx)
3973 })
3974 })
3975 .await?;
3976
3977 this.update(&mut cx, |_, cx| {
3978 cx.emit(Event::ContactCancelledJoinRequest(user));
3979 });
3980
3981 Ok(())
3982 }
3983
3984 async fn handle_register_worktree(
3985 this: ModelHandle<Self>,
3986 envelope: TypedEnvelope<proto::RegisterWorktree>,
3987 client: Arc<Client>,
3988 mut cx: AsyncAppContext,
3989 ) -> Result<()> {
3990 this.update(&mut cx, |this, cx| {
3991 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3992 let replica_id = this.replica_id();
3993 let worktree = proto::Worktree {
3994 id: envelope.payload.worktree_id,
3995 root_name: envelope.payload.root_name,
3996 entries: Default::default(),
3997 diagnostic_summaries: Default::default(),
3998 visible: envelope.payload.visible,
3999 scan_id: 0,
4000 };
4001 let (worktree, load_task) =
4002 Worktree::remote(remote_id, replica_id, worktree, client, cx);
4003 this.add_worktree(&worktree, cx);
4004 load_task.detach();
4005 Ok(())
4006 })
4007 }
4008
4009 async fn handle_unregister_worktree(
4010 this: ModelHandle<Self>,
4011 envelope: TypedEnvelope<proto::UnregisterWorktree>,
4012 _: Arc<Client>,
4013 mut cx: AsyncAppContext,
4014 ) -> Result<()> {
4015 this.update(&mut cx, |this, cx| {
4016 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4017 this.remove_worktree(worktree_id, cx);
4018 Ok(())
4019 })
4020 }
4021
4022 async fn handle_update_worktree(
4023 this: ModelHandle<Self>,
4024 envelope: TypedEnvelope<proto::UpdateWorktree>,
4025 _: Arc<Client>,
4026 mut cx: AsyncAppContext,
4027 ) -> Result<()> {
4028 this.update(&mut cx, |this, cx| {
4029 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4030 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4031 worktree.update(cx, |worktree, _| {
4032 let worktree = worktree.as_remote_mut().unwrap();
4033 worktree.update_from_remote(envelope)
4034 })?;
4035 }
4036 Ok(())
4037 })
4038 }
4039
4040 async fn handle_create_project_entry(
4041 this: ModelHandle<Self>,
4042 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4043 _: Arc<Client>,
4044 mut cx: AsyncAppContext,
4045 ) -> Result<proto::ProjectEntryResponse> {
4046 let worktree = this.update(&mut cx, |this, cx| {
4047 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4048 this.worktree_for_id(worktree_id, cx)
4049 .ok_or_else(|| anyhow!("worktree not found"))
4050 })?;
4051 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4052 let entry = worktree
4053 .update(&mut cx, |worktree, cx| {
4054 let worktree = worktree.as_local_mut().unwrap();
4055 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4056 worktree.create_entry(path, envelope.payload.is_directory, cx)
4057 })
4058 .await?;
4059 Ok(proto::ProjectEntryResponse {
4060 entry: Some((&entry).into()),
4061 child_entries: Default::default(),
4062 worktree_scan_id: worktree_scan_id as u64,
4063 })
4064 }
4065
4066 async fn handle_rename_project_entry(
4067 this: ModelHandle<Self>,
4068 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4069 _: Arc<Client>,
4070 mut cx: AsyncAppContext,
4071 ) -> Result<proto::ProjectEntryResponse> {
4072 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4073 let worktree = this.read_with(&cx, |this, cx| {
4074 this.worktree_for_entry(entry_id, cx)
4075 .ok_or_else(|| anyhow!("worktree not found"))
4076 })?;
4077 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4078 let entry = worktree
4079 .update(&mut cx, |worktree, cx| {
4080 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4081 worktree
4082 .as_local_mut()
4083 .unwrap()
4084 .rename_entry(entry_id, new_path, cx)
4085 .ok_or_else(|| anyhow!("invalid entry"))
4086 })?
4087 .await?;
4088 Ok(proto::ProjectEntryResponse {
4089 entry: Some((&entry).into()),
4090 child_entries: Default::default(),
4091 worktree_scan_id: worktree_scan_id as u64,
4092 })
4093 }
4094
4095 async fn handle_copy_project_entry(
4096 this: ModelHandle<Self>,
4097 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4098 _: Arc<Client>,
4099 mut cx: AsyncAppContext,
4100 ) -> Result<proto::ProjectEntryResponse> {
4101 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4102 let worktree = this.read_with(&cx, |this, cx| {
4103 this.worktree_for_entry(entry_id, cx)
4104 .ok_or_else(|| anyhow!("worktree not found"))
4105 })?;
4106 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4107 let (entry, child_entries) = worktree
4108 .update(&mut cx, |worktree, cx| {
4109 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4110 worktree
4111 .as_local_mut()
4112 .unwrap()
4113 .copy_entry(entry_id, new_path, cx)
4114 .ok_or_else(|| anyhow!("invalid entry"))
4115 })?
4116 .await?;
4117 Ok(proto::ProjectEntryResponse {
4118 entry: Some((&entry).into()),
4119 child_entries: child_entries.iter().map(Into::into).collect(),
4120 worktree_scan_id: worktree_scan_id as u64,
4121 })
4122 }
4123
4124 async fn handle_delete_project_entry(
4125 this: ModelHandle<Self>,
4126 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4127 _: Arc<Client>,
4128 mut cx: AsyncAppContext,
4129 ) -> Result<proto::ProjectEntryResponse> {
4130 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4131 let worktree = this.read_with(&cx, |this, cx| {
4132 this.worktree_for_entry(entry_id, cx)
4133 .ok_or_else(|| anyhow!("worktree not found"))
4134 })?;
4135 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4136 worktree
4137 .update(&mut cx, |worktree, cx| {
4138 worktree
4139 .as_local_mut()
4140 .unwrap()
4141 .delete_entry(entry_id, cx)
4142 .ok_or_else(|| anyhow!("invalid entry"))
4143 })?
4144 .await?;
4145 Ok(proto::ProjectEntryResponse {
4146 entry: None,
4147 child_entries: Default::default(),
4148 worktree_scan_id: worktree_scan_id as u64,
4149 })
4150 }
4151
4152 async fn handle_update_diagnostic_summary(
4153 this: ModelHandle<Self>,
4154 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4155 _: Arc<Client>,
4156 mut cx: AsyncAppContext,
4157 ) -> Result<()> {
4158 this.update(&mut cx, |this, cx| {
4159 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4160 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4161 if let Some(summary) = envelope.payload.summary {
4162 let project_path = ProjectPath {
4163 worktree_id,
4164 path: Path::new(&summary.path).into(),
4165 };
4166 worktree.update(cx, |worktree, _| {
4167 worktree
4168 .as_remote_mut()
4169 .unwrap()
4170 .update_diagnostic_summary(project_path.path.clone(), &summary);
4171 });
4172 cx.emit(Event::DiagnosticsUpdated(project_path));
4173 }
4174 }
4175 Ok(())
4176 })
4177 }
4178
4179 async fn handle_start_language_server(
4180 this: ModelHandle<Self>,
4181 envelope: TypedEnvelope<proto::StartLanguageServer>,
4182 _: Arc<Client>,
4183 mut cx: AsyncAppContext,
4184 ) -> Result<()> {
4185 let server = envelope
4186 .payload
4187 .server
4188 .ok_or_else(|| anyhow!("invalid server"))?;
4189 this.update(&mut cx, |this, cx| {
4190 this.language_server_statuses.insert(
4191 server.id as usize,
4192 LanguageServerStatus {
4193 name: server.name,
4194 pending_work: Default::default(),
4195 pending_diagnostic_updates: 0,
4196 },
4197 );
4198 cx.notify();
4199 });
4200 Ok(())
4201 }
4202
4203 async fn handle_update_language_server(
4204 this: ModelHandle<Self>,
4205 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4206 _: Arc<Client>,
4207 mut cx: AsyncAppContext,
4208 ) -> Result<()> {
4209 let language_server_id = envelope.payload.language_server_id as usize;
4210 match envelope
4211 .payload
4212 .variant
4213 .ok_or_else(|| anyhow!("invalid variant"))?
4214 {
4215 proto::update_language_server::Variant::WorkStart(payload) => {
4216 this.update(&mut cx, |this, cx| {
4217 this.on_lsp_work_start(language_server_id, payload.token, cx);
4218 })
4219 }
4220 proto::update_language_server::Variant::WorkProgress(payload) => {
4221 this.update(&mut cx, |this, cx| {
4222 this.on_lsp_work_progress(
4223 language_server_id,
4224 payload.token,
4225 LanguageServerProgress {
4226 message: payload.message,
4227 percentage: payload.percentage.map(|p| p as usize),
4228 last_update_at: Instant::now(),
4229 },
4230 cx,
4231 );
4232 })
4233 }
4234 proto::update_language_server::Variant::WorkEnd(payload) => {
4235 this.update(&mut cx, |this, cx| {
4236 this.on_lsp_work_end(language_server_id, payload.token, cx);
4237 })
4238 }
4239 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4240 this.update(&mut cx, |this, cx| {
4241 this.disk_based_diagnostics_started(cx);
4242 })
4243 }
4244 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4245 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4246 }
4247 }
4248
4249 Ok(())
4250 }
4251
4252 async fn handle_update_buffer(
4253 this: ModelHandle<Self>,
4254 envelope: TypedEnvelope<proto::UpdateBuffer>,
4255 _: Arc<Client>,
4256 mut cx: AsyncAppContext,
4257 ) -> Result<()> {
4258 this.update(&mut cx, |this, cx| {
4259 let payload = envelope.payload.clone();
4260 let buffer_id = payload.buffer_id;
4261 let ops = payload
4262 .operations
4263 .into_iter()
4264 .map(|op| language::proto::deserialize_operation(op))
4265 .collect::<Result<Vec<_>, _>>()?;
4266 let is_remote = this.is_remote();
4267 match this.opened_buffers.entry(buffer_id) {
4268 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4269 OpenBuffer::Strong(buffer) => {
4270 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4271 }
4272 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4273 OpenBuffer::Weak(_) => {}
4274 },
4275 hash_map::Entry::Vacant(e) => {
4276 assert!(
4277 is_remote,
4278 "received buffer update from {:?}",
4279 envelope.original_sender_id
4280 );
4281 e.insert(OpenBuffer::Loading(ops));
4282 }
4283 }
4284 Ok(())
4285 })
4286 }
4287
4288 async fn handle_update_buffer_file(
4289 this: ModelHandle<Self>,
4290 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4291 _: Arc<Client>,
4292 mut cx: AsyncAppContext,
4293 ) -> Result<()> {
4294 this.update(&mut cx, |this, cx| {
4295 let payload = envelope.payload.clone();
4296 let buffer_id = payload.buffer_id;
4297 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4298 let worktree = this
4299 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4300 .ok_or_else(|| anyhow!("no such worktree"))?;
4301 let file = File::from_proto(file, worktree.clone(), cx)?;
4302 let buffer = this
4303 .opened_buffers
4304 .get_mut(&buffer_id)
4305 .and_then(|b| b.upgrade(cx))
4306 .ok_or_else(|| anyhow!("no such buffer"))?;
4307 buffer.update(cx, |buffer, cx| {
4308 buffer.file_updated(Box::new(file), cx).detach();
4309 });
4310 Ok(())
4311 })
4312 }
4313
4314 async fn handle_save_buffer(
4315 this: ModelHandle<Self>,
4316 envelope: TypedEnvelope<proto::SaveBuffer>,
4317 _: Arc<Client>,
4318 mut cx: AsyncAppContext,
4319 ) -> Result<proto::BufferSaved> {
4320 let buffer_id = envelope.payload.buffer_id;
4321 let requested_version = deserialize_version(envelope.payload.version);
4322
4323 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4324 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4325 let buffer = this
4326 .opened_buffers
4327 .get(&buffer_id)
4328 .and_then(|buffer| buffer.upgrade(cx))
4329 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4330 Ok::<_, anyhow::Error>((project_id, buffer))
4331 })?;
4332 buffer
4333 .update(&mut cx, |buffer, _| {
4334 buffer.wait_for_version(requested_version)
4335 })
4336 .await;
4337
4338 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4339 Ok(proto::BufferSaved {
4340 project_id,
4341 buffer_id,
4342 version: serialize_version(&saved_version),
4343 mtime: Some(mtime.into()),
4344 })
4345 }
4346
4347 async fn handle_reload_buffers(
4348 this: ModelHandle<Self>,
4349 envelope: TypedEnvelope<proto::ReloadBuffers>,
4350 _: Arc<Client>,
4351 mut cx: AsyncAppContext,
4352 ) -> Result<proto::ReloadBuffersResponse> {
4353 let sender_id = envelope.original_sender_id()?;
4354 let reload = this.update(&mut cx, |this, cx| {
4355 let mut buffers = HashSet::default();
4356 for buffer_id in &envelope.payload.buffer_ids {
4357 buffers.insert(
4358 this.opened_buffers
4359 .get(buffer_id)
4360 .and_then(|buffer| buffer.upgrade(cx))
4361 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4362 );
4363 }
4364 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4365 })?;
4366
4367 let project_transaction = reload.await?;
4368 let project_transaction = this.update(&mut cx, |this, cx| {
4369 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4370 });
4371 Ok(proto::ReloadBuffersResponse {
4372 transaction: Some(project_transaction),
4373 })
4374 }
4375
4376 async fn handle_format_buffers(
4377 this: ModelHandle<Self>,
4378 envelope: TypedEnvelope<proto::FormatBuffers>,
4379 _: Arc<Client>,
4380 mut cx: AsyncAppContext,
4381 ) -> Result<proto::FormatBuffersResponse> {
4382 let sender_id = envelope.original_sender_id()?;
4383 let format = this.update(&mut cx, |this, cx| {
4384 let mut buffers = HashSet::default();
4385 for buffer_id in &envelope.payload.buffer_ids {
4386 buffers.insert(
4387 this.opened_buffers
4388 .get(buffer_id)
4389 .and_then(|buffer| buffer.upgrade(cx))
4390 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4391 );
4392 }
4393 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4394 })?;
4395
4396 let project_transaction = format.await?;
4397 let project_transaction = this.update(&mut cx, |this, cx| {
4398 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4399 });
4400 Ok(proto::FormatBuffersResponse {
4401 transaction: Some(project_transaction),
4402 })
4403 }
4404
4405 async fn handle_get_completions(
4406 this: ModelHandle<Self>,
4407 envelope: TypedEnvelope<proto::GetCompletions>,
4408 _: Arc<Client>,
4409 mut cx: AsyncAppContext,
4410 ) -> Result<proto::GetCompletionsResponse> {
4411 let position = envelope
4412 .payload
4413 .position
4414 .and_then(language::proto::deserialize_anchor)
4415 .ok_or_else(|| anyhow!("invalid position"))?;
4416 let version = deserialize_version(envelope.payload.version);
4417 let buffer = this.read_with(&cx, |this, cx| {
4418 this.opened_buffers
4419 .get(&envelope.payload.buffer_id)
4420 .and_then(|buffer| buffer.upgrade(cx))
4421 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4422 })?;
4423 buffer
4424 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4425 .await;
4426 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4427 let completions = this
4428 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4429 .await?;
4430
4431 Ok(proto::GetCompletionsResponse {
4432 completions: completions
4433 .iter()
4434 .map(language::proto::serialize_completion)
4435 .collect(),
4436 version: serialize_version(&version),
4437 })
4438 }
4439
4440 async fn handle_apply_additional_edits_for_completion(
4441 this: ModelHandle<Self>,
4442 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4443 _: Arc<Client>,
4444 mut cx: AsyncAppContext,
4445 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4446 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4447 let buffer = this
4448 .opened_buffers
4449 .get(&envelope.payload.buffer_id)
4450 .and_then(|buffer| buffer.upgrade(cx))
4451 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4452 let language = buffer.read(cx).language();
4453 let completion = language::proto::deserialize_completion(
4454 envelope
4455 .payload
4456 .completion
4457 .ok_or_else(|| anyhow!("invalid completion"))?,
4458 language,
4459 )?;
4460 Ok::<_, anyhow::Error>(
4461 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4462 )
4463 })?;
4464
4465 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4466 transaction: apply_additional_edits
4467 .await?
4468 .as_ref()
4469 .map(language::proto::serialize_transaction),
4470 })
4471 }
4472
4473 async fn handle_get_code_actions(
4474 this: ModelHandle<Self>,
4475 envelope: TypedEnvelope<proto::GetCodeActions>,
4476 _: Arc<Client>,
4477 mut cx: AsyncAppContext,
4478 ) -> Result<proto::GetCodeActionsResponse> {
4479 let start = envelope
4480 .payload
4481 .start
4482 .and_then(language::proto::deserialize_anchor)
4483 .ok_or_else(|| anyhow!("invalid start"))?;
4484 let end = envelope
4485 .payload
4486 .end
4487 .and_then(language::proto::deserialize_anchor)
4488 .ok_or_else(|| anyhow!("invalid end"))?;
4489 let buffer = this.update(&mut cx, |this, cx| {
4490 this.opened_buffers
4491 .get(&envelope.payload.buffer_id)
4492 .and_then(|buffer| buffer.upgrade(cx))
4493 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4494 })?;
4495 buffer
4496 .update(&mut cx, |buffer, _| {
4497 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4498 })
4499 .await;
4500
4501 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4502 let code_actions = this.update(&mut cx, |this, cx| {
4503 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4504 })?;
4505
4506 Ok(proto::GetCodeActionsResponse {
4507 actions: code_actions
4508 .await?
4509 .iter()
4510 .map(language::proto::serialize_code_action)
4511 .collect(),
4512 version: serialize_version(&version),
4513 })
4514 }
4515
4516 async fn handle_apply_code_action(
4517 this: ModelHandle<Self>,
4518 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4519 _: Arc<Client>,
4520 mut cx: AsyncAppContext,
4521 ) -> Result<proto::ApplyCodeActionResponse> {
4522 let sender_id = envelope.original_sender_id()?;
4523 let action = language::proto::deserialize_code_action(
4524 envelope
4525 .payload
4526 .action
4527 .ok_or_else(|| anyhow!("invalid action"))?,
4528 )?;
4529 let apply_code_action = this.update(&mut cx, |this, cx| {
4530 let buffer = this
4531 .opened_buffers
4532 .get(&envelope.payload.buffer_id)
4533 .and_then(|buffer| buffer.upgrade(cx))
4534 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4535 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4536 })?;
4537
4538 let project_transaction = apply_code_action.await?;
4539 let project_transaction = this.update(&mut cx, |this, cx| {
4540 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4541 });
4542 Ok(proto::ApplyCodeActionResponse {
4543 transaction: Some(project_transaction),
4544 })
4545 }
4546
4547 async fn handle_lsp_command<T: LspCommand>(
4548 this: ModelHandle<Self>,
4549 envelope: TypedEnvelope<T::ProtoRequest>,
4550 _: Arc<Client>,
4551 mut cx: AsyncAppContext,
4552 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4553 where
4554 <T::LspRequest as lsp::request::Request>::Result: Send,
4555 {
4556 let sender_id = envelope.original_sender_id()?;
4557 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4558 let buffer_handle = this.read_with(&cx, |this, _| {
4559 this.opened_buffers
4560 .get(&buffer_id)
4561 .and_then(|buffer| buffer.upgrade(&cx))
4562 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4563 })?;
4564 let request = T::from_proto(
4565 envelope.payload,
4566 this.clone(),
4567 buffer_handle.clone(),
4568 cx.clone(),
4569 )
4570 .await?;
4571 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4572 let response = this
4573 .update(&mut cx, |this, cx| {
4574 this.request_lsp(buffer_handle, request, cx)
4575 })
4576 .await?;
4577 this.update(&mut cx, |this, cx| {
4578 Ok(T::response_to_proto(
4579 response,
4580 this,
4581 sender_id,
4582 &buffer_version,
4583 cx,
4584 ))
4585 })
4586 }
4587
4588 async fn handle_get_project_symbols(
4589 this: ModelHandle<Self>,
4590 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4591 _: Arc<Client>,
4592 mut cx: AsyncAppContext,
4593 ) -> Result<proto::GetProjectSymbolsResponse> {
4594 let symbols = this
4595 .update(&mut cx, |this, cx| {
4596 this.symbols(&envelope.payload.query, cx)
4597 })
4598 .await?;
4599
4600 Ok(proto::GetProjectSymbolsResponse {
4601 symbols: symbols.iter().map(serialize_symbol).collect(),
4602 })
4603 }
4604
4605 async fn handle_search_project(
4606 this: ModelHandle<Self>,
4607 envelope: TypedEnvelope<proto::SearchProject>,
4608 _: Arc<Client>,
4609 mut cx: AsyncAppContext,
4610 ) -> Result<proto::SearchProjectResponse> {
4611 let peer_id = envelope.original_sender_id()?;
4612 let query = SearchQuery::from_proto(envelope.payload)?;
4613 let result = this
4614 .update(&mut cx, |this, cx| this.search(query, cx))
4615 .await?;
4616
4617 this.update(&mut cx, |this, cx| {
4618 let mut locations = Vec::new();
4619 for (buffer, ranges) in result {
4620 for range in ranges {
4621 let start = serialize_anchor(&range.start);
4622 let end = serialize_anchor(&range.end);
4623 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4624 locations.push(proto::Location {
4625 buffer: Some(buffer),
4626 start: Some(start),
4627 end: Some(end),
4628 });
4629 }
4630 }
4631 Ok(proto::SearchProjectResponse { locations })
4632 })
4633 }
4634
4635 async fn handle_open_buffer_for_symbol(
4636 this: ModelHandle<Self>,
4637 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4638 _: Arc<Client>,
4639 mut cx: AsyncAppContext,
4640 ) -> Result<proto::OpenBufferForSymbolResponse> {
4641 let peer_id = envelope.original_sender_id()?;
4642 let symbol = envelope
4643 .payload
4644 .symbol
4645 .ok_or_else(|| anyhow!("invalid symbol"))?;
4646 let symbol = this.read_with(&cx, |this, _| {
4647 let symbol = this.deserialize_symbol(symbol)?;
4648 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4649 if signature == symbol.signature {
4650 Ok(symbol)
4651 } else {
4652 Err(anyhow!("invalid symbol signature"))
4653 }
4654 })?;
4655 let buffer = this
4656 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4657 .await?;
4658
4659 Ok(proto::OpenBufferForSymbolResponse {
4660 buffer: Some(this.update(&mut cx, |this, cx| {
4661 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4662 })),
4663 })
4664 }
4665
4666 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4667 let mut hasher = Sha256::new();
4668 hasher.update(worktree_id.to_proto().to_be_bytes());
4669 hasher.update(path.to_string_lossy().as_bytes());
4670 hasher.update(self.nonce.to_be_bytes());
4671 hasher.finalize().as_slice().try_into().unwrap()
4672 }
4673
4674 async fn handle_open_buffer_by_id(
4675 this: ModelHandle<Self>,
4676 envelope: TypedEnvelope<proto::OpenBufferById>,
4677 _: Arc<Client>,
4678 mut cx: AsyncAppContext,
4679 ) -> Result<proto::OpenBufferResponse> {
4680 let peer_id = envelope.original_sender_id()?;
4681 let buffer = this
4682 .update(&mut cx, |this, cx| {
4683 this.open_buffer_by_id(envelope.payload.id, cx)
4684 })
4685 .await?;
4686 this.update(&mut cx, |this, cx| {
4687 Ok(proto::OpenBufferResponse {
4688 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4689 })
4690 })
4691 }
4692
4693 async fn handle_open_buffer_by_path(
4694 this: ModelHandle<Self>,
4695 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4696 _: Arc<Client>,
4697 mut cx: AsyncAppContext,
4698 ) -> Result<proto::OpenBufferResponse> {
4699 let peer_id = envelope.original_sender_id()?;
4700 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4701 let open_buffer = this.update(&mut cx, |this, cx| {
4702 this.open_buffer(
4703 ProjectPath {
4704 worktree_id,
4705 path: PathBuf::from(envelope.payload.path).into(),
4706 },
4707 cx,
4708 )
4709 });
4710
4711 let buffer = open_buffer.await?;
4712 this.update(&mut cx, |this, cx| {
4713 Ok(proto::OpenBufferResponse {
4714 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4715 })
4716 })
4717 }
4718
4719 fn serialize_project_transaction_for_peer(
4720 &mut self,
4721 project_transaction: ProjectTransaction,
4722 peer_id: PeerId,
4723 cx: &AppContext,
4724 ) -> proto::ProjectTransaction {
4725 let mut serialized_transaction = proto::ProjectTransaction {
4726 buffers: Default::default(),
4727 transactions: Default::default(),
4728 };
4729 for (buffer, transaction) in project_transaction.0 {
4730 serialized_transaction
4731 .buffers
4732 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4733 serialized_transaction
4734 .transactions
4735 .push(language::proto::serialize_transaction(&transaction));
4736 }
4737 serialized_transaction
4738 }
4739
4740 fn deserialize_project_transaction(
4741 &mut self,
4742 message: proto::ProjectTransaction,
4743 push_to_history: bool,
4744 cx: &mut ModelContext<Self>,
4745 ) -> Task<Result<ProjectTransaction>> {
4746 cx.spawn(|this, mut cx| async move {
4747 let mut project_transaction = ProjectTransaction::default();
4748 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4749 let buffer = this
4750 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4751 .await?;
4752 let transaction = language::proto::deserialize_transaction(transaction)?;
4753 project_transaction.0.insert(buffer, transaction);
4754 }
4755
4756 for (buffer, transaction) in &project_transaction.0 {
4757 buffer
4758 .update(&mut cx, |buffer, _| {
4759 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4760 })
4761 .await;
4762
4763 if push_to_history {
4764 buffer.update(&mut cx, |buffer, _| {
4765 buffer.push_transaction(transaction.clone(), Instant::now());
4766 });
4767 }
4768 }
4769
4770 Ok(project_transaction)
4771 })
4772 }
4773
4774 fn serialize_buffer_for_peer(
4775 &mut self,
4776 buffer: &ModelHandle<Buffer>,
4777 peer_id: PeerId,
4778 cx: &AppContext,
4779 ) -> proto::Buffer {
4780 let buffer_id = buffer.read(cx).remote_id();
4781 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4782 if shared_buffers.insert(buffer_id) {
4783 proto::Buffer {
4784 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4785 }
4786 } else {
4787 proto::Buffer {
4788 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4789 }
4790 }
4791 }
4792
4793 fn deserialize_buffer(
4794 &mut self,
4795 buffer: proto::Buffer,
4796 cx: &mut ModelContext<Self>,
4797 ) -> Task<Result<ModelHandle<Buffer>>> {
4798 let replica_id = self.replica_id();
4799
4800 let opened_buffer_tx = self.opened_buffer.0.clone();
4801 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4802 cx.spawn(|this, mut cx| async move {
4803 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4804 proto::buffer::Variant::Id(id) => {
4805 let buffer = loop {
4806 let buffer = this.read_with(&cx, |this, cx| {
4807 this.opened_buffers
4808 .get(&id)
4809 .and_then(|buffer| buffer.upgrade(cx))
4810 });
4811 if let Some(buffer) = buffer {
4812 break buffer;
4813 }
4814 opened_buffer_rx
4815 .next()
4816 .await
4817 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4818 };
4819 Ok(buffer)
4820 }
4821 proto::buffer::Variant::State(mut buffer) => {
4822 let mut buffer_worktree = None;
4823 let mut buffer_file = None;
4824 if let Some(file) = buffer.file.take() {
4825 this.read_with(&cx, |this, cx| {
4826 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4827 let worktree =
4828 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4829 anyhow!("no worktree found for id {}", file.worktree_id)
4830 })?;
4831 buffer_file =
4832 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4833 as Box<dyn language::File>);
4834 buffer_worktree = Some(worktree);
4835 Ok::<_, anyhow::Error>(())
4836 })?;
4837 }
4838
4839 let buffer = cx.add_model(|cx| {
4840 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4841 });
4842
4843 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4844
4845 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4846 Ok(buffer)
4847 }
4848 }
4849 })
4850 }
4851
4852 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4853 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4854 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4855 let start = serialized_symbol
4856 .start
4857 .ok_or_else(|| anyhow!("invalid start"))?;
4858 let end = serialized_symbol
4859 .end
4860 .ok_or_else(|| anyhow!("invalid end"))?;
4861 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4862 let path = PathBuf::from(serialized_symbol.path);
4863 let language = self.languages.select_language(&path);
4864 Ok(Symbol {
4865 source_worktree_id,
4866 worktree_id,
4867 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4868 label: language
4869 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4870 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4871 name: serialized_symbol.name,
4872 path,
4873 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4874 kind,
4875 signature: serialized_symbol
4876 .signature
4877 .try_into()
4878 .map_err(|_| anyhow!("invalid signature"))?,
4879 })
4880 }
4881
4882 async fn handle_buffer_saved(
4883 this: ModelHandle<Self>,
4884 envelope: TypedEnvelope<proto::BufferSaved>,
4885 _: Arc<Client>,
4886 mut cx: AsyncAppContext,
4887 ) -> Result<()> {
4888 let version = deserialize_version(envelope.payload.version);
4889 let mtime = envelope
4890 .payload
4891 .mtime
4892 .ok_or_else(|| anyhow!("missing mtime"))?
4893 .into();
4894
4895 this.update(&mut cx, |this, cx| {
4896 let buffer = this
4897 .opened_buffers
4898 .get(&envelope.payload.buffer_id)
4899 .and_then(|buffer| buffer.upgrade(cx));
4900 if let Some(buffer) = buffer {
4901 buffer.update(cx, |buffer, cx| {
4902 buffer.did_save(version, mtime, None, cx);
4903 });
4904 }
4905 Ok(())
4906 })
4907 }
4908
4909 async fn handle_buffer_reloaded(
4910 this: ModelHandle<Self>,
4911 envelope: TypedEnvelope<proto::BufferReloaded>,
4912 _: Arc<Client>,
4913 mut cx: AsyncAppContext,
4914 ) -> Result<()> {
4915 let payload = envelope.payload.clone();
4916 let version = deserialize_version(payload.version);
4917 let mtime = payload
4918 .mtime
4919 .ok_or_else(|| anyhow!("missing mtime"))?
4920 .into();
4921 this.update(&mut cx, |this, cx| {
4922 let buffer = this
4923 .opened_buffers
4924 .get(&payload.buffer_id)
4925 .and_then(|buffer| buffer.upgrade(cx));
4926 if let Some(buffer) = buffer {
4927 buffer.update(cx, |buffer, cx| {
4928 buffer.did_reload(version, mtime, cx);
4929 });
4930 }
4931 Ok(())
4932 })
4933 }
4934
4935 pub fn match_paths<'a>(
4936 &self,
4937 query: &'a str,
4938 include_ignored: bool,
4939 smart_case: bool,
4940 max_results: usize,
4941 cancel_flag: &'a AtomicBool,
4942 cx: &AppContext,
4943 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4944 let worktrees = self
4945 .worktrees(cx)
4946 .filter(|worktree| worktree.read(cx).is_visible())
4947 .collect::<Vec<_>>();
4948 let include_root_name = worktrees.len() > 1;
4949 let candidate_sets = worktrees
4950 .into_iter()
4951 .map(|worktree| CandidateSet {
4952 snapshot: worktree.read(cx).snapshot(),
4953 include_ignored,
4954 include_root_name,
4955 })
4956 .collect::<Vec<_>>();
4957
4958 let background = cx.background().clone();
4959 async move {
4960 fuzzy::match_paths(
4961 candidate_sets.as_slice(),
4962 query,
4963 smart_case,
4964 max_results,
4965 cancel_flag,
4966 background,
4967 )
4968 .await
4969 }
4970 }
4971
4972 fn edits_from_lsp(
4973 &mut self,
4974 buffer: &ModelHandle<Buffer>,
4975 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4976 version: Option<i32>,
4977 cx: &mut ModelContext<Self>,
4978 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4979 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4980 cx.background().spawn(async move {
4981 let snapshot = snapshot?;
4982 let mut lsp_edits = lsp_edits
4983 .into_iter()
4984 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4985 .peekable();
4986
4987 let mut edits = Vec::new();
4988 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4989 // Combine any LSP edits that are adjacent.
4990 //
4991 // Also, combine LSP edits that are separated from each other by only
4992 // a newline. This is important because for some code actions,
4993 // Rust-analyzer rewrites the entire buffer via a series of edits that
4994 // are separated by unchanged newline characters.
4995 //
4996 // In order for the diffing logic below to work properly, any edits that
4997 // cancel each other out must be combined into one.
4998 while let Some((next_range, next_text)) = lsp_edits.peek() {
4999 if next_range.start > range.end {
5000 if next_range.start.row > range.end.row + 1
5001 || next_range.start.column > 0
5002 || snapshot.clip_point_utf16(
5003 PointUtf16::new(range.end.row, u32::MAX),
5004 Bias::Left,
5005 ) > range.end
5006 {
5007 break;
5008 }
5009 new_text.push('\n');
5010 }
5011 range.end = next_range.end;
5012 new_text.push_str(&next_text);
5013 lsp_edits.next();
5014 }
5015
5016 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5017 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5018 {
5019 return Err(anyhow!("invalid edits received from language server"));
5020 }
5021
5022 // For multiline edits, perform a diff of the old and new text so that
5023 // we can identify the changes more precisely, preserving the locations
5024 // of any anchors positioned in the unchanged regions.
5025 if range.end.row > range.start.row {
5026 let mut offset = range.start.to_offset(&snapshot);
5027 let old_text = snapshot.text_for_range(range).collect::<String>();
5028
5029 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5030 let mut moved_since_edit = true;
5031 for change in diff.iter_all_changes() {
5032 let tag = change.tag();
5033 let value = change.value();
5034 match tag {
5035 ChangeTag::Equal => {
5036 offset += value.len();
5037 moved_since_edit = true;
5038 }
5039 ChangeTag::Delete => {
5040 let start = snapshot.anchor_after(offset);
5041 let end = snapshot.anchor_before(offset + value.len());
5042 if moved_since_edit {
5043 edits.push((start..end, String::new()));
5044 } else {
5045 edits.last_mut().unwrap().0.end = end;
5046 }
5047 offset += value.len();
5048 moved_since_edit = false;
5049 }
5050 ChangeTag::Insert => {
5051 if moved_since_edit {
5052 let anchor = snapshot.anchor_after(offset);
5053 edits.push((anchor.clone()..anchor, value.to_string()));
5054 } else {
5055 edits.last_mut().unwrap().1.push_str(value);
5056 }
5057 moved_since_edit = false;
5058 }
5059 }
5060 }
5061 } else if range.end == range.start {
5062 let anchor = snapshot.anchor_after(range.start);
5063 edits.push((anchor.clone()..anchor, new_text));
5064 } else {
5065 let edit_start = snapshot.anchor_after(range.start);
5066 let edit_end = snapshot.anchor_before(range.end);
5067 edits.push((edit_start..edit_end, new_text));
5068 }
5069 }
5070
5071 Ok(edits)
5072 })
5073 }
5074
5075 fn buffer_snapshot_for_lsp_version(
5076 &mut self,
5077 buffer: &ModelHandle<Buffer>,
5078 version: Option<i32>,
5079 cx: &AppContext,
5080 ) -> Result<TextBufferSnapshot> {
5081 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5082
5083 if let Some(version) = version {
5084 let buffer_id = buffer.read(cx).remote_id();
5085 let snapshots = self
5086 .buffer_snapshots
5087 .get_mut(&buffer_id)
5088 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5089 let mut found_snapshot = None;
5090 snapshots.retain(|(snapshot_version, snapshot)| {
5091 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5092 false
5093 } else {
5094 if *snapshot_version == version {
5095 found_snapshot = Some(snapshot.clone());
5096 }
5097 true
5098 }
5099 });
5100
5101 found_snapshot.ok_or_else(|| {
5102 anyhow!(
5103 "snapshot not found for buffer {} at version {}",
5104 buffer_id,
5105 version
5106 )
5107 })
5108 } else {
5109 Ok((buffer.read(cx)).text_snapshot())
5110 }
5111 }
5112
5113 fn language_server_for_buffer(
5114 &self,
5115 buffer: &Buffer,
5116 cx: &AppContext,
5117 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5118 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5119 let worktree_id = file.worktree_id(cx);
5120 self.language_servers
5121 .get(&(worktree_id, language.lsp_adapter()?.name()))
5122 } else {
5123 None
5124 }
5125 }
5126}
5127
5128impl WorktreeHandle {
5129 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5130 match self {
5131 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5132 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5133 }
5134 }
5135}
5136
5137impl OpenBuffer {
5138 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5139 match self {
5140 OpenBuffer::Strong(handle) => Some(handle.clone()),
5141 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5142 OpenBuffer::Loading(_) => None,
5143 }
5144 }
5145}
5146
5147struct CandidateSet {
5148 snapshot: Snapshot,
5149 include_ignored: bool,
5150 include_root_name: bool,
5151}
5152
5153impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5154 type Candidates = CandidateSetIter<'a>;
5155
5156 fn id(&self) -> usize {
5157 self.snapshot.id().to_usize()
5158 }
5159
5160 fn len(&self) -> usize {
5161 if self.include_ignored {
5162 self.snapshot.file_count()
5163 } else {
5164 self.snapshot.visible_file_count()
5165 }
5166 }
5167
5168 fn prefix(&self) -> Arc<str> {
5169 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5170 self.snapshot.root_name().into()
5171 } else if self.include_root_name {
5172 format!("{}/", self.snapshot.root_name()).into()
5173 } else {
5174 "".into()
5175 }
5176 }
5177
5178 fn candidates(&'a self, start: usize) -> Self::Candidates {
5179 CandidateSetIter {
5180 traversal: self.snapshot.files(self.include_ignored, start),
5181 }
5182 }
5183}
5184
5185struct CandidateSetIter<'a> {
5186 traversal: Traversal<'a>,
5187}
5188
5189impl<'a> Iterator for CandidateSetIter<'a> {
5190 type Item = PathMatchCandidate<'a>;
5191
5192 fn next(&mut self) -> Option<Self::Item> {
5193 self.traversal.next().map(|entry| {
5194 if let EntryKind::File(char_bag) = entry.kind {
5195 PathMatchCandidate {
5196 path: &entry.path,
5197 char_bag,
5198 }
5199 } else {
5200 unreachable!()
5201 }
5202 })
5203 }
5204}
5205
5206impl Entity for Project {
5207 type Event = Event;
5208
5209 fn release(&mut self, _: &mut gpui::MutableAppContext) {
5210 match &self.client_state {
5211 ProjectClientState::Local { remote_id_rx, .. } => {
5212 if let Some(project_id) = *remote_id_rx.borrow() {
5213 self.client
5214 .send(proto::UnregisterProject { project_id })
5215 .log_err();
5216 }
5217 }
5218 ProjectClientState::Remote { remote_id, .. } => {
5219 self.client
5220 .send(proto::LeaveProject {
5221 project_id: *remote_id,
5222 })
5223 .log_err();
5224 }
5225 }
5226 }
5227
5228 fn app_will_quit(
5229 &mut self,
5230 _: &mut MutableAppContext,
5231 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5232 let shutdown_futures = self
5233 .language_servers
5234 .drain()
5235 .filter_map(|(_, (_, server))| server.shutdown())
5236 .collect::<Vec<_>>();
5237 Some(
5238 async move {
5239 futures::future::join_all(shutdown_futures).await;
5240 }
5241 .boxed(),
5242 )
5243 }
5244}
5245
5246impl Collaborator {
5247 fn from_proto(
5248 message: proto::Collaborator,
5249 user_store: &ModelHandle<UserStore>,
5250 cx: &mut AsyncAppContext,
5251 ) -> impl Future<Output = Result<Self>> {
5252 let user = user_store.update(cx, |user_store, cx| {
5253 user_store.fetch_user(message.user_id, cx)
5254 });
5255
5256 async move {
5257 Ok(Self {
5258 peer_id: PeerId(message.peer_id),
5259 user: user.await?,
5260 replica_id: message.replica_id as ReplicaId,
5261 })
5262 }
5263 }
5264}
5265
5266impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5267 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5268 Self {
5269 worktree_id,
5270 path: path.as_ref().into(),
5271 }
5272 }
5273}
5274
5275impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5276 fn from(options: lsp::CreateFileOptions) -> Self {
5277 Self {
5278 overwrite: options.overwrite.unwrap_or(false),
5279 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5280 }
5281 }
5282}
5283
5284impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5285 fn from(options: lsp::RenameFileOptions) -> Self {
5286 Self {
5287 overwrite: options.overwrite.unwrap_or(false),
5288 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5289 }
5290 }
5291}
5292
5293impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5294 fn from(options: lsp::DeleteFileOptions) -> Self {
5295 Self {
5296 recursive: options.recursive.unwrap_or(false),
5297 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5298 }
5299 }
5300}
5301
5302fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5303 proto::Symbol {
5304 source_worktree_id: symbol.source_worktree_id.to_proto(),
5305 worktree_id: symbol.worktree_id.to_proto(),
5306 language_server_name: symbol.language_server_name.0.to_string(),
5307 name: symbol.name.clone(),
5308 kind: unsafe { mem::transmute(symbol.kind) },
5309 path: symbol.path.to_string_lossy().to_string(),
5310 start: Some(proto::Point {
5311 row: symbol.range.start.row,
5312 column: symbol.range.start.column,
5313 }),
5314 end: Some(proto::Point {
5315 row: symbol.range.end.row,
5316 column: symbol.range.end.column,
5317 }),
5318 signature: symbol.signature.to_vec(),
5319 }
5320}
5321
5322fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5323 let mut path_components = path.components();
5324 let mut base_components = base.components();
5325 let mut components: Vec<Component> = Vec::new();
5326 loop {
5327 match (path_components.next(), base_components.next()) {
5328 (None, None) => break,
5329 (Some(a), None) => {
5330 components.push(a);
5331 components.extend(path_components.by_ref());
5332 break;
5333 }
5334 (None, _) => components.push(Component::ParentDir),
5335 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5336 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5337 (Some(a), Some(_)) => {
5338 components.push(Component::ParentDir);
5339 for _ in base_components {
5340 components.push(Component::ParentDir);
5341 }
5342 components.push(a);
5343 components.extend(path_components.by_ref());
5344 break;
5345 }
5346 }
5347 }
5348 components.iter().map(|c| c.as_os_str()).collect()
5349}
5350
5351impl Item for Buffer {
5352 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5353 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5354 }
5355}
5356
5357#[cfg(test)]
5358mod tests {
5359 use crate::worktree::WorktreeHandle;
5360
5361 use super::{Event, *};
5362 use fs::RealFs;
5363 use futures::{future, StreamExt};
5364 use gpui::test::subscribe;
5365 use language::{
5366 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5367 OffsetRangeExt, Point, ToPoint,
5368 };
5369 use lsp::Url;
5370 use serde_json::json;
5371 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5372 use unindent::Unindent as _;
5373 use util::{assert_set_eq, test::temp_tree};
5374
5375 #[gpui::test]
5376 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5377 let dir = temp_tree(json!({
5378 "root": {
5379 "apple": "",
5380 "banana": {
5381 "carrot": {
5382 "date": "",
5383 "endive": "",
5384 }
5385 },
5386 "fennel": {
5387 "grape": "",
5388 }
5389 }
5390 }));
5391
5392 let root_link_path = dir.path().join("root_link");
5393 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5394 unix::fs::symlink(
5395 &dir.path().join("root/fennel"),
5396 &dir.path().join("root/finnochio"),
5397 )
5398 .unwrap();
5399
5400 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5401
5402 project.read_with(cx, |project, cx| {
5403 let tree = project.worktrees(cx).next().unwrap().read(cx);
5404 assert_eq!(tree.file_count(), 5);
5405 assert_eq!(
5406 tree.inode_for_path("fennel/grape"),
5407 tree.inode_for_path("finnochio/grape")
5408 );
5409 });
5410
5411 let cancel_flag = Default::default();
5412 let results = project
5413 .read_with(cx, |project, cx| {
5414 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5415 })
5416 .await;
5417 assert_eq!(
5418 results
5419 .into_iter()
5420 .map(|result| result.path)
5421 .collect::<Vec<Arc<Path>>>(),
5422 vec![
5423 PathBuf::from("banana/carrot/date").into(),
5424 PathBuf::from("banana/carrot/endive").into(),
5425 ]
5426 );
5427 }
5428
5429 #[gpui::test]
5430 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5431 cx.foreground().forbid_parking();
5432
5433 let mut rust_language = Language::new(
5434 LanguageConfig {
5435 name: "Rust".into(),
5436 path_suffixes: vec!["rs".to_string()],
5437 ..Default::default()
5438 },
5439 Some(tree_sitter_rust::language()),
5440 );
5441 let mut json_language = Language::new(
5442 LanguageConfig {
5443 name: "JSON".into(),
5444 path_suffixes: vec!["json".to_string()],
5445 ..Default::default()
5446 },
5447 None,
5448 );
5449 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5450 name: "the-rust-language-server",
5451 capabilities: lsp::ServerCapabilities {
5452 completion_provider: Some(lsp::CompletionOptions {
5453 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5454 ..Default::default()
5455 }),
5456 ..Default::default()
5457 },
5458 ..Default::default()
5459 });
5460 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5461 name: "the-json-language-server",
5462 capabilities: lsp::ServerCapabilities {
5463 completion_provider: Some(lsp::CompletionOptions {
5464 trigger_characters: Some(vec![":".to_string()]),
5465 ..Default::default()
5466 }),
5467 ..Default::default()
5468 },
5469 ..Default::default()
5470 });
5471
5472 let fs = FakeFs::new(cx.background());
5473 fs.insert_tree(
5474 "/the-root",
5475 json!({
5476 "test.rs": "const A: i32 = 1;",
5477 "test2.rs": "",
5478 "Cargo.toml": "a = 1",
5479 "package.json": "{\"a\": 1}",
5480 }),
5481 )
5482 .await;
5483
5484 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5485 project.update(cx, |project, _| {
5486 project.languages.add(Arc::new(rust_language));
5487 project.languages.add(Arc::new(json_language));
5488 });
5489
5490 // Open a buffer without an associated language server.
5491 let toml_buffer = project
5492 .update(cx, |project, cx| {
5493 project.open_local_buffer("/the-root/Cargo.toml", cx)
5494 })
5495 .await
5496 .unwrap();
5497
5498 // Open a buffer with an associated language server.
5499 let rust_buffer = project
5500 .update(cx, |project, cx| {
5501 project.open_local_buffer("/the-root/test.rs", cx)
5502 })
5503 .await
5504 .unwrap();
5505
5506 // A server is started up, and it is notified about Rust files.
5507 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5508 assert_eq!(
5509 fake_rust_server
5510 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5511 .await
5512 .text_document,
5513 lsp::TextDocumentItem {
5514 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5515 version: 0,
5516 text: "const A: i32 = 1;".to_string(),
5517 language_id: Default::default()
5518 }
5519 );
5520
5521 // The buffer is configured based on the language server's capabilities.
5522 rust_buffer.read_with(cx, |buffer, _| {
5523 assert_eq!(
5524 buffer.completion_triggers(),
5525 &[".".to_string(), "::".to_string()]
5526 );
5527 });
5528 toml_buffer.read_with(cx, |buffer, _| {
5529 assert!(buffer.completion_triggers().is_empty());
5530 });
5531
5532 // Edit a buffer. The changes are reported to the language server.
5533 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5534 assert_eq!(
5535 fake_rust_server
5536 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5537 .await
5538 .text_document,
5539 lsp::VersionedTextDocumentIdentifier::new(
5540 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5541 1
5542 )
5543 );
5544
5545 // Open a third buffer with a different associated language server.
5546 let json_buffer = project
5547 .update(cx, |project, cx| {
5548 project.open_local_buffer("/the-root/package.json", cx)
5549 })
5550 .await
5551 .unwrap();
5552
5553 // A json language server is started up and is only notified about the json buffer.
5554 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5555 assert_eq!(
5556 fake_json_server
5557 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5558 .await
5559 .text_document,
5560 lsp::TextDocumentItem {
5561 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5562 version: 0,
5563 text: "{\"a\": 1}".to_string(),
5564 language_id: Default::default()
5565 }
5566 );
5567
5568 // This buffer is configured based on the second language server's
5569 // capabilities.
5570 json_buffer.read_with(cx, |buffer, _| {
5571 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5572 });
5573
5574 // When opening another buffer whose language server is already running,
5575 // it is also configured based on the existing language server's capabilities.
5576 let rust_buffer2 = project
5577 .update(cx, |project, cx| {
5578 project.open_local_buffer("/the-root/test2.rs", cx)
5579 })
5580 .await
5581 .unwrap();
5582 rust_buffer2.read_with(cx, |buffer, _| {
5583 assert_eq!(
5584 buffer.completion_triggers(),
5585 &[".".to_string(), "::".to_string()]
5586 );
5587 });
5588
5589 // Changes are reported only to servers matching the buffer's language.
5590 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5591 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5592 assert_eq!(
5593 fake_rust_server
5594 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5595 .await
5596 .text_document,
5597 lsp::VersionedTextDocumentIdentifier::new(
5598 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5599 1
5600 )
5601 );
5602
5603 // Save notifications are reported to all servers.
5604 toml_buffer
5605 .update(cx, |buffer, cx| buffer.save(cx))
5606 .await
5607 .unwrap();
5608 assert_eq!(
5609 fake_rust_server
5610 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5611 .await
5612 .text_document,
5613 lsp::TextDocumentIdentifier::new(
5614 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5615 )
5616 );
5617 assert_eq!(
5618 fake_json_server
5619 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5620 .await
5621 .text_document,
5622 lsp::TextDocumentIdentifier::new(
5623 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5624 )
5625 );
5626
5627 // Renames are reported only to servers matching the buffer's language.
5628 fs.rename(
5629 Path::new("/the-root/test2.rs"),
5630 Path::new("/the-root/test3.rs"),
5631 Default::default(),
5632 )
5633 .await
5634 .unwrap();
5635 assert_eq!(
5636 fake_rust_server
5637 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5638 .await
5639 .text_document,
5640 lsp::TextDocumentIdentifier::new(
5641 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5642 ),
5643 );
5644 assert_eq!(
5645 fake_rust_server
5646 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5647 .await
5648 .text_document,
5649 lsp::TextDocumentItem {
5650 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5651 version: 0,
5652 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5653 language_id: Default::default()
5654 },
5655 );
5656
5657 rust_buffer2.update(cx, |buffer, cx| {
5658 buffer.update_diagnostics(
5659 DiagnosticSet::from_sorted_entries(
5660 vec![DiagnosticEntry {
5661 diagnostic: Default::default(),
5662 range: Anchor::MIN..Anchor::MAX,
5663 }],
5664 &buffer.snapshot(),
5665 ),
5666 cx,
5667 );
5668 assert_eq!(
5669 buffer
5670 .snapshot()
5671 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5672 .count(),
5673 1
5674 );
5675 });
5676
5677 // When the rename changes the extension of the file, the buffer gets closed on the old
5678 // language server and gets opened on the new one.
5679 fs.rename(
5680 Path::new("/the-root/test3.rs"),
5681 Path::new("/the-root/test3.json"),
5682 Default::default(),
5683 )
5684 .await
5685 .unwrap();
5686 assert_eq!(
5687 fake_rust_server
5688 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5689 .await
5690 .text_document,
5691 lsp::TextDocumentIdentifier::new(
5692 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5693 ),
5694 );
5695 assert_eq!(
5696 fake_json_server
5697 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5698 .await
5699 .text_document,
5700 lsp::TextDocumentItem {
5701 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5702 version: 0,
5703 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5704 language_id: Default::default()
5705 },
5706 );
5707
5708 // We clear the diagnostics, since the language has changed.
5709 rust_buffer2.read_with(cx, |buffer, _| {
5710 assert_eq!(
5711 buffer
5712 .snapshot()
5713 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5714 .count(),
5715 0
5716 );
5717 });
5718
5719 // The renamed file's version resets after changing language server.
5720 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5721 assert_eq!(
5722 fake_json_server
5723 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5724 .await
5725 .text_document,
5726 lsp::VersionedTextDocumentIdentifier::new(
5727 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5728 1
5729 )
5730 );
5731
5732 // Restart language servers
5733 project.update(cx, |project, cx| {
5734 project.restart_language_servers_for_buffers(
5735 vec![rust_buffer.clone(), json_buffer.clone()],
5736 cx,
5737 );
5738 });
5739
5740 let mut rust_shutdown_requests = fake_rust_server
5741 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5742 let mut json_shutdown_requests = fake_json_server
5743 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5744 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5745
5746 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5747 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5748
5749 // Ensure rust document is reopened in new rust language server
5750 assert_eq!(
5751 fake_rust_server
5752 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5753 .await
5754 .text_document,
5755 lsp::TextDocumentItem {
5756 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5757 version: 1,
5758 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5759 language_id: Default::default()
5760 }
5761 );
5762
5763 // Ensure json documents are reopened in new json language server
5764 assert_set_eq!(
5765 [
5766 fake_json_server
5767 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5768 .await
5769 .text_document,
5770 fake_json_server
5771 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5772 .await
5773 .text_document,
5774 ],
5775 [
5776 lsp::TextDocumentItem {
5777 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5778 version: 0,
5779 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5780 language_id: Default::default()
5781 },
5782 lsp::TextDocumentItem {
5783 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5784 version: 1,
5785 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5786 language_id: Default::default()
5787 }
5788 ]
5789 );
5790
5791 // Close notifications are reported only to servers matching the buffer's language.
5792 cx.update(|_| drop(json_buffer));
5793 let close_message = lsp::DidCloseTextDocumentParams {
5794 text_document: lsp::TextDocumentIdentifier::new(
5795 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5796 ),
5797 };
5798 assert_eq!(
5799 fake_json_server
5800 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5801 .await,
5802 close_message,
5803 );
5804 }
5805
5806 #[gpui::test]
5807 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5808 cx.foreground().forbid_parking();
5809
5810 let fs = FakeFs::new(cx.background());
5811 fs.insert_tree(
5812 "/dir",
5813 json!({
5814 "a.rs": "let a = 1;",
5815 "b.rs": "let b = 2;"
5816 }),
5817 )
5818 .await;
5819
5820 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
5821
5822 let buffer_a = project
5823 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5824 .await
5825 .unwrap();
5826 let buffer_b = project
5827 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5828 .await
5829 .unwrap();
5830
5831 project.update(cx, |project, cx| {
5832 project
5833 .update_diagnostics(
5834 lsp::PublishDiagnosticsParams {
5835 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5836 version: None,
5837 diagnostics: vec![lsp::Diagnostic {
5838 range: lsp::Range::new(
5839 lsp::Position::new(0, 4),
5840 lsp::Position::new(0, 5),
5841 ),
5842 severity: Some(lsp::DiagnosticSeverity::ERROR),
5843 message: "error 1".to_string(),
5844 ..Default::default()
5845 }],
5846 },
5847 &[],
5848 cx,
5849 )
5850 .unwrap();
5851 project
5852 .update_diagnostics(
5853 lsp::PublishDiagnosticsParams {
5854 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5855 version: None,
5856 diagnostics: vec![lsp::Diagnostic {
5857 range: lsp::Range::new(
5858 lsp::Position::new(0, 4),
5859 lsp::Position::new(0, 5),
5860 ),
5861 severity: Some(lsp::DiagnosticSeverity::WARNING),
5862 message: "error 2".to_string(),
5863 ..Default::default()
5864 }],
5865 },
5866 &[],
5867 cx,
5868 )
5869 .unwrap();
5870 });
5871
5872 buffer_a.read_with(cx, |buffer, _| {
5873 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5874 assert_eq!(
5875 chunks
5876 .iter()
5877 .map(|(s, d)| (s.as_str(), *d))
5878 .collect::<Vec<_>>(),
5879 &[
5880 ("let ", None),
5881 ("a", Some(DiagnosticSeverity::ERROR)),
5882 (" = 1;", None),
5883 ]
5884 );
5885 });
5886 buffer_b.read_with(cx, |buffer, _| {
5887 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5888 assert_eq!(
5889 chunks
5890 .iter()
5891 .map(|(s, d)| (s.as_str(), *d))
5892 .collect::<Vec<_>>(),
5893 &[
5894 ("let ", None),
5895 ("b", Some(DiagnosticSeverity::WARNING)),
5896 (" = 2;", None),
5897 ]
5898 );
5899 });
5900 }
5901
5902 #[gpui::test]
5903 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5904 cx.foreground().forbid_parking();
5905
5906 let progress_token = "the-progress-token";
5907 let mut language = Language::new(
5908 LanguageConfig {
5909 name: "Rust".into(),
5910 path_suffixes: vec!["rs".to_string()],
5911 ..Default::default()
5912 },
5913 Some(tree_sitter_rust::language()),
5914 );
5915 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5916 disk_based_diagnostics_progress_token: Some(progress_token),
5917 disk_based_diagnostics_sources: &["disk"],
5918 ..Default::default()
5919 });
5920
5921 let fs = FakeFs::new(cx.background());
5922 fs.insert_tree(
5923 "/dir",
5924 json!({
5925 "a.rs": "fn a() { A }",
5926 "b.rs": "const y: i32 = 1",
5927 }),
5928 )
5929 .await;
5930
5931 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5932 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5933 let worktree_id =
5934 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5935
5936 // Cause worktree to start the fake language server
5937 let _buffer = project
5938 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5939 .await
5940 .unwrap();
5941
5942 let mut events = subscribe(&project, cx);
5943
5944 let mut fake_server = fake_servers.next().await.unwrap();
5945 fake_server.start_progress(progress_token).await;
5946 assert_eq!(
5947 events.next().await.unwrap(),
5948 Event::DiskBasedDiagnosticsStarted
5949 );
5950
5951 fake_server.start_progress(progress_token).await;
5952 fake_server.end_progress(progress_token).await;
5953 fake_server.start_progress(progress_token).await;
5954
5955 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5956 lsp::PublishDiagnosticsParams {
5957 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5958 version: None,
5959 diagnostics: vec![lsp::Diagnostic {
5960 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5961 severity: Some(lsp::DiagnosticSeverity::ERROR),
5962 message: "undefined variable 'A'".to_string(),
5963 ..Default::default()
5964 }],
5965 },
5966 );
5967 assert_eq!(
5968 events.next().await.unwrap(),
5969 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5970 );
5971
5972 fake_server.end_progress(progress_token).await;
5973 fake_server.end_progress(progress_token).await;
5974 assert_eq!(
5975 events.next().await.unwrap(),
5976 Event::DiskBasedDiagnosticsUpdated
5977 );
5978 assert_eq!(
5979 events.next().await.unwrap(),
5980 Event::DiskBasedDiagnosticsFinished
5981 );
5982
5983 let buffer = project
5984 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5985 .await
5986 .unwrap();
5987
5988 buffer.read_with(cx, |buffer, _| {
5989 let snapshot = buffer.snapshot();
5990 let diagnostics = snapshot
5991 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5992 .collect::<Vec<_>>();
5993 assert_eq!(
5994 diagnostics,
5995 &[DiagnosticEntry {
5996 range: Point::new(0, 9)..Point::new(0, 10),
5997 diagnostic: Diagnostic {
5998 severity: lsp::DiagnosticSeverity::ERROR,
5999 message: "undefined variable 'A'".to_string(),
6000 group_id: 0,
6001 is_primary: true,
6002 ..Default::default()
6003 }
6004 }]
6005 )
6006 });
6007
6008 // Ensure publishing empty diagnostics twice only results in one update event.
6009 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6010 lsp::PublishDiagnosticsParams {
6011 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6012 version: None,
6013 diagnostics: Default::default(),
6014 },
6015 );
6016 assert_eq!(
6017 events.next().await.unwrap(),
6018 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6019 );
6020
6021 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6022 lsp::PublishDiagnosticsParams {
6023 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6024 version: None,
6025 diagnostics: Default::default(),
6026 },
6027 );
6028 cx.foreground().run_until_parked();
6029 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6030 }
6031
6032 #[gpui::test]
6033 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6034 cx.foreground().forbid_parking();
6035
6036 let progress_token = "the-progress-token";
6037 let mut language = Language::new(
6038 LanguageConfig {
6039 path_suffixes: vec!["rs".to_string()],
6040 ..Default::default()
6041 },
6042 None,
6043 );
6044 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6045 disk_based_diagnostics_sources: &["disk"],
6046 disk_based_diagnostics_progress_token: Some(progress_token),
6047 ..Default::default()
6048 });
6049
6050 let fs = FakeFs::new(cx.background());
6051 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6052
6053 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6054 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6055
6056 let buffer = project
6057 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6058 .await
6059 .unwrap();
6060
6061 // Simulate diagnostics starting to update.
6062 let mut fake_server = fake_servers.next().await.unwrap();
6063 fake_server.start_progress(progress_token).await;
6064
6065 // Restart the server before the diagnostics finish updating.
6066 project.update(cx, |project, cx| {
6067 project.restart_language_servers_for_buffers([buffer], cx);
6068 });
6069 let mut events = subscribe(&project, cx);
6070
6071 // Simulate the newly started server sending more diagnostics.
6072 let mut fake_server = fake_servers.next().await.unwrap();
6073 fake_server.start_progress(progress_token).await;
6074 assert_eq!(
6075 events.next().await.unwrap(),
6076 Event::DiskBasedDiagnosticsStarted
6077 );
6078
6079 // All diagnostics are considered done, despite the old server's diagnostic
6080 // task never completing.
6081 fake_server.end_progress(progress_token).await;
6082 assert_eq!(
6083 events.next().await.unwrap(),
6084 Event::DiskBasedDiagnosticsUpdated
6085 );
6086 assert_eq!(
6087 events.next().await.unwrap(),
6088 Event::DiskBasedDiagnosticsFinished
6089 );
6090 project.read_with(cx, |project, _| {
6091 assert!(!project.is_running_disk_based_diagnostics());
6092 });
6093 }
6094
6095 #[gpui::test]
6096 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6097 cx.foreground().forbid_parking();
6098
6099 let mut language = Language::new(
6100 LanguageConfig {
6101 name: "Rust".into(),
6102 path_suffixes: vec!["rs".to_string()],
6103 ..Default::default()
6104 },
6105 Some(tree_sitter_rust::language()),
6106 );
6107 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6108 disk_based_diagnostics_sources: &["disk"],
6109 ..Default::default()
6110 });
6111
6112 let text = "
6113 fn a() { A }
6114 fn b() { BB }
6115 fn c() { CCC }
6116 "
6117 .unindent();
6118
6119 let fs = FakeFs::new(cx.background());
6120 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6121
6122 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6123 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6124
6125 let buffer = project
6126 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6127 .await
6128 .unwrap();
6129
6130 let mut fake_server = fake_servers.next().await.unwrap();
6131 let open_notification = fake_server
6132 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6133 .await;
6134
6135 // Edit the buffer, moving the content down
6136 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6137 let change_notification_1 = fake_server
6138 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6139 .await;
6140 assert!(
6141 change_notification_1.text_document.version > open_notification.text_document.version
6142 );
6143
6144 // Report some diagnostics for the initial version of the buffer
6145 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6146 lsp::PublishDiagnosticsParams {
6147 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6148 version: Some(open_notification.text_document.version),
6149 diagnostics: vec![
6150 lsp::Diagnostic {
6151 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6152 severity: Some(DiagnosticSeverity::ERROR),
6153 message: "undefined variable 'A'".to_string(),
6154 source: Some("disk".to_string()),
6155 ..Default::default()
6156 },
6157 lsp::Diagnostic {
6158 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6159 severity: Some(DiagnosticSeverity::ERROR),
6160 message: "undefined variable 'BB'".to_string(),
6161 source: Some("disk".to_string()),
6162 ..Default::default()
6163 },
6164 lsp::Diagnostic {
6165 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6166 severity: Some(DiagnosticSeverity::ERROR),
6167 source: Some("disk".to_string()),
6168 message: "undefined variable 'CCC'".to_string(),
6169 ..Default::default()
6170 },
6171 ],
6172 },
6173 );
6174
6175 // The diagnostics have moved down since they were created.
6176 buffer.next_notification(cx).await;
6177 buffer.read_with(cx, |buffer, _| {
6178 assert_eq!(
6179 buffer
6180 .snapshot()
6181 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6182 .collect::<Vec<_>>(),
6183 &[
6184 DiagnosticEntry {
6185 range: Point::new(3, 9)..Point::new(3, 11),
6186 diagnostic: Diagnostic {
6187 severity: DiagnosticSeverity::ERROR,
6188 message: "undefined variable 'BB'".to_string(),
6189 is_disk_based: true,
6190 group_id: 1,
6191 is_primary: true,
6192 ..Default::default()
6193 },
6194 },
6195 DiagnosticEntry {
6196 range: Point::new(4, 9)..Point::new(4, 12),
6197 diagnostic: Diagnostic {
6198 severity: DiagnosticSeverity::ERROR,
6199 message: "undefined variable 'CCC'".to_string(),
6200 is_disk_based: true,
6201 group_id: 2,
6202 is_primary: true,
6203 ..Default::default()
6204 }
6205 }
6206 ]
6207 );
6208 assert_eq!(
6209 chunks_with_diagnostics(buffer, 0..buffer.len()),
6210 [
6211 ("\n\nfn a() { ".to_string(), None),
6212 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6213 (" }\nfn b() { ".to_string(), None),
6214 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6215 (" }\nfn c() { ".to_string(), None),
6216 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6217 (" }\n".to_string(), None),
6218 ]
6219 );
6220 assert_eq!(
6221 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6222 [
6223 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6224 (" }\nfn c() { ".to_string(), None),
6225 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6226 ]
6227 );
6228 });
6229
6230 // Ensure overlapping diagnostics are highlighted correctly.
6231 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6232 lsp::PublishDiagnosticsParams {
6233 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6234 version: Some(open_notification.text_document.version),
6235 diagnostics: vec![
6236 lsp::Diagnostic {
6237 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6238 severity: Some(DiagnosticSeverity::ERROR),
6239 message: "undefined variable 'A'".to_string(),
6240 source: Some("disk".to_string()),
6241 ..Default::default()
6242 },
6243 lsp::Diagnostic {
6244 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6245 severity: Some(DiagnosticSeverity::WARNING),
6246 message: "unreachable statement".to_string(),
6247 source: Some("disk".to_string()),
6248 ..Default::default()
6249 },
6250 ],
6251 },
6252 );
6253
6254 buffer.next_notification(cx).await;
6255 buffer.read_with(cx, |buffer, _| {
6256 assert_eq!(
6257 buffer
6258 .snapshot()
6259 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6260 .collect::<Vec<_>>(),
6261 &[
6262 DiagnosticEntry {
6263 range: Point::new(2, 9)..Point::new(2, 12),
6264 diagnostic: Diagnostic {
6265 severity: DiagnosticSeverity::WARNING,
6266 message: "unreachable statement".to_string(),
6267 is_disk_based: true,
6268 group_id: 1,
6269 is_primary: true,
6270 ..Default::default()
6271 }
6272 },
6273 DiagnosticEntry {
6274 range: Point::new(2, 9)..Point::new(2, 10),
6275 diagnostic: Diagnostic {
6276 severity: DiagnosticSeverity::ERROR,
6277 message: "undefined variable 'A'".to_string(),
6278 is_disk_based: true,
6279 group_id: 0,
6280 is_primary: true,
6281 ..Default::default()
6282 },
6283 }
6284 ]
6285 );
6286 assert_eq!(
6287 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6288 [
6289 ("fn a() { ".to_string(), None),
6290 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6291 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6292 ("\n".to_string(), None),
6293 ]
6294 );
6295 assert_eq!(
6296 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6297 [
6298 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6299 ("\n".to_string(), None),
6300 ]
6301 );
6302 });
6303
6304 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6305 // changes since the last save.
6306 buffer.update(cx, |buffer, cx| {
6307 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6308 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6309 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6310 });
6311 let change_notification_2 = fake_server
6312 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6313 .await;
6314 assert!(
6315 change_notification_2.text_document.version
6316 > change_notification_1.text_document.version
6317 );
6318
6319 // Handle out-of-order diagnostics
6320 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6321 lsp::PublishDiagnosticsParams {
6322 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6323 version: Some(change_notification_2.text_document.version),
6324 diagnostics: vec![
6325 lsp::Diagnostic {
6326 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6327 severity: Some(DiagnosticSeverity::ERROR),
6328 message: "undefined variable 'BB'".to_string(),
6329 source: Some("disk".to_string()),
6330 ..Default::default()
6331 },
6332 lsp::Diagnostic {
6333 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6334 severity: Some(DiagnosticSeverity::WARNING),
6335 message: "undefined variable 'A'".to_string(),
6336 source: Some("disk".to_string()),
6337 ..Default::default()
6338 },
6339 ],
6340 },
6341 );
6342
6343 buffer.next_notification(cx).await;
6344 buffer.read_with(cx, |buffer, _| {
6345 assert_eq!(
6346 buffer
6347 .snapshot()
6348 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6349 .collect::<Vec<_>>(),
6350 &[
6351 DiagnosticEntry {
6352 range: Point::new(2, 21)..Point::new(2, 22),
6353 diagnostic: Diagnostic {
6354 severity: DiagnosticSeverity::WARNING,
6355 message: "undefined variable 'A'".to_string(),
6356 is_disk_based: true,
6357 group_id: 1,
6358 is_primary: true,
6359 ..Default::default()
6360 }
6361 },
6362 DiagnosticEntry {
6363 range: Point::new(3, 9)..Point::new(3, 14),
6364 diagnostic: Diagnostic {
6365 severity: DiagnosticSeverity::ERROR,
6366 message: "undefined variable 'BB'".to_string(),
6367 is_disk_based: true,
6368 group_id: 0,
6369 is_primary: true,
6370 ..Default::default()
6371 },
6372 }
6373 ]
6374 );
6375 });
6376 }
6377
6378 #[gpui::test]
6379 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6380 cx.foreground().forbid_parking();
6381
6382 let text = concat!(
6383 "let one = ;\n", //
6384 "let two = \n",
6385 "let three = 3;\n",
6386 );
6387
6388 let fs = FakeFs::new(cx.background());
6389 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6390
6391 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6392 let buffer = project
6393 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6394 .await
6395 .unwrap();
6396
6397 project.update(cx, |project, cx| {
6398 project
6399 .update_buffer_diagnostics(
6400 &buffer,
6401 vec![
6402 DiagnosticEntry {
6403 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6404 diagnostic: Diagnostic {
6405 severity: DiagnosticSeverity::ERROR,
6406 message: "syntax error 1".to_string(),
6407 ..Default::default()
6408 },
6409 },
6410 DiagnosticEntry {
6411 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6412 diagnostic: Diagnostic {
6413 severity: DiagnosticSeverity::ERROR,
6414 message: "syntax error 2".to_string(),
6415 ..Default::default()
6416 },
6417 },
6418 ],
6419 None,
6420 cx,
6421 )
6422 .unwrap();
6423 });
6424
6425 // An empty range is extended forward to include the following character.
6426 // At the end of a line, an empty range is extended backward to include
6427 // the preceding character.
6428 buffer.read_with(cx, |buffer, _| {
6429 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6430 assert_eq!(
6431 chunks
6432 .iter()
6433 .map(|(s, d)| (s.as_str(), *d))
6434 .collect::<Vec<_>>(),
6435 &[
6436 ("let one = ", None),
6437 (";", Some(DiagnosticSeverity::ERROR)),
6438 ("\nlet two =", None),
6439 (" ", Some(DiagnosticSeverity::ERROR)),
6440 ("\nlet three = 3;\n", None)
6441 ]
6442 );
6443 });
6444 }
6445
6446 #[gpui::test]
6447 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6448 cx.foreground().forbid_parking();
6449
6450 let mut language = Language::new(
6451 LanguageConfig {
6452 name: "Rust".into(),
6453 path_suffixes: vec!["rs".to_string()],
6454 ..Default::default()
6455 },
6456 Some(tree_sitter_rust::language()),
6457 );
6458 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6459
6460 let text = "
6461 fn a() {
6462 f1();
6463 }
6464 fn b() {
6465 f2();
6466 }
6467 fn c() {
6468 f3();
6469 }
6470 "
6471 .unindent();
6472
6473 let fs = FakeFs::new(cx.background());
6474 fs.insert_tree(
6475 "/dir",
6476 json!({
6477 "a.rs": text.clone(),
6478 }),
6479 )
6480 .await;
6481
6482 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6483 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6484 let buffer = project
6485 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6486 .await
6487 .unwrap();
6488
6489 let mut fake_server = fake_servers.next().await.unwrap();
6490 let lsp_document_version = fake_server
6491 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6492 .await
6493 .text_document
6494 .version;
6495
6496 // Simulate editing the buffer after the language server computes some edits.
6497 buffer.update(cx, |buffer, cx| {
6498 buffer.edit(
6499 [(
6500 Point::new(0, 0)..Point::new(0, 0),
6501 "// above first function\n",
6502 )],
6503 cx,
6504 );
6505 buffer.edit(
6506 [(
6507 Point::new(2, 0)..Point::new(2, 0),
6508 " // inside first function\n",
6509 )],
6510 cx,
6511 );
6512 buffer.edit(
6513 [(
6514 Point::new(6, 4)..Point::new(6, 4),
6515 "// inside second function ",
6516 )],
6517 cx,
6518 );
6519
6520 assert_eq!(
6521 buffer.text(),
6522 "
6523 // above first function
6524 fn a() {
6525 // inside first function
6526 f1();
6527 }
6528 fn b() {
6529 // inside second function f2();
6530 }
6531 fn c() {
6532 f3();
6533 }
6534 "
6535 .unindent()
6536 );
6537 });
6538
6539 let edits = project
6540 .update(cx, |project, cx| {
6541 project.edits_from_lsp(
6542 &buffer,
6543 vec![
6544 // replace body of first function
6545 lsp::TextEdit {
6546 range: lsp::Range::new(
6547 lsp::Position::new(0, 0),
6548 lsp::Position::new(3, 0),
6549 ),
6550 new_text: "
6551 fn a() {
6552 f10();
6553 }
6554 "
6555 .unindent(),
6556 },
6557 // edit inside second function
6558 lsp::TextEdit {
6559 range: lsp::Range::new(
6560 lsp::Position::new(4, 6),
6561 lsp::Position::new(4, 6),
6562 ),
6563 new_text: "00".into(),
6564 },
6565 // edit inside third function via two distinct edits
6566 lsp::TextEdit {
6567 range: lsp::Range::new(
6568 lsp::Position::new(7, 5),
6569 lsp::Position::new(7, 5),
6570 ),
6571 new_text: "4000".into(),
6572 },
6573 lsp::TextEdit {
6574 range: lsp::Range::new(
6575 lsp::Position::new(7, 5),
6576 lsp::Position::new(7, 6),
6577 ),
6578 new_text: "".into(),
6579 },
6580 ],
6581 Some(lsp_document_version),
6582 cx,
6583 )
6584 })
6585 .await
6586 .unwrap();
6587
6588 buffer.update(cx, |buffer, cx| {
6589 for (range, new_text) in edits {
6590 buffer.edit([(range, new_text)], cx);
6591 }
6592 assert_eq!(
6593 buffer.text(),
6594 "
6595 // above first function
6596 fn a() {
6597 // inside first function
6598 f10();
6599 }
6600 fn b() {
6601 // inside second function f200();
6602 }
6603 fn c() {
6604 f4000();
6605 }
6606 "
6607 .unindent()
6608 );
6609 });
6610 }
6611
6612 #[gpui::test]
6613 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6614 cx.foreground().forbid_parking();
6615
6616 let text = "
6617 use a::b;
6618 use a::c;
6619
6620 fn f() {
6621 b();
6622 c();
6623 }
6624 "
6625 .unindent();
6626
6627 let fs = FakeFs::new(cx.background());
6628 fs.insert_tree(
6629 "/dir",
6630 json!({
6631 "a.rs": text.clone(),
6632 }),
6633 )
6634 .await;
6635
6636 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6637 let buffer = project
6638 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6639 .await
6640 .unwrap();
6641
6642 // Simulate the language server sending us a small edit in the form of a very large diff.
6643 // Rust-analyzer does this when performing a merge-imports code action.
6644 let edits = project
6645 .update(cx, |project, cx| {
6646 project.edits_from_lsp(
6647 &buffer,
6648 [
6649 // Replace the first use statement without editing the semicolon.
6650 lsp::TextEdit {
6651 range: lsp::Range::new(
6652 lsp::Position::new(0, 4),
6653 lsp::Position::new(0, 8),
6654 ),
6655 new_text: "a::{b, c}".into(),
6656 },
6657 // Reinsert the remainder of the file between the semicolon and the final
6658 // newline of the file.
6659 lsp::TextEdit {
6660 range: lsp::Range::new(
6661 lsp::Position::new(0, 9),
6662 lsp::Position::new(0, 9),
6663 ),
6664 new_text: "\n\n".into(),
6665 },
6666 lsp::TextEdit {
6667 range: lsp::Range::new(
6668 lsp::Position::new(0, 9),
6669 lsp::Position::new(0, 9),
6670 ),
6671 new_text: "
6672 fn f() {
6673 b();
6674 c();
6675 }"
6676 .unindent(),
6677 },
6678 // Delete everything after the first newline of the file.
6679 lsp::TextEdit {
6680 range: lsp::Range::new(
6681 lsp::Position::new(1, 0),
6682 lsp::Position::new(7, 0),
6683 ),
6684 new_text: "".into(),
6685 },
6686 ],
6687 None,
6688 cx,
6689 )
6690 })
6691 .await
6692 .unwrap();
6693
6694 buffer.update(cx, |buffer, cx| {
6695 let edits = edits
6696 .into_iter()
6697 .map(|(range, text)| {
6698 (
6699 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6700 text,
6701 )
6702 })
6703 .collect::<Vec<_>>();
6704
6705 assert_eq!(
6706 edits,
6707 [
6708 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6709 (Point::new(1, 0)..Point::new(2, 0), "".into())
6710 ]
6711 );
6712
6713 for (range, new_text) in edits {
6714 buffer.edit([(range, new_text)], cx);
6715 }
6716 assert_eq!(
6717 buffer.text(),
6718 "
6719 use a::{b, c};
6720
6721 fn f() {
6722 b();
6723 c();
6724 }
6725 "
6726 .unindent()
6727 );
6728 });
6729 }
6730
6731 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6732 buffer: &Buffer,
6733 range: Range<T>,
6734 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6735 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6736 for chunk in buffer.snapshot().chunks(range, true) {
6737 if chunks.last().map_or(false, |prev_chunk| {
6738 prev_chunk.1 == chunk.diagnostic_severity
6739 }) {
6740 chunks.last_mut().unwrap().0.push_str(chunk.text);
6741 } else {
6742 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6743 }
6744 }
6745 chunks
6746 }
6747
6748 #[gpui::test]
6749 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6750 let dir = temp_tree(json!({
6751 "root": {
6752 "dir1": {},
6753 "dir2": {
6754 "dir3": {}
6755 }
6756 }
6757 }));
6758
6759 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6760 let cancel_flag = Default::default();
6761 let results = project
6762 .read_with(cx, |project, cx| {
6763 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6764 })
6765 .await;
6766
6767 assert!(results.is_empty());
6768 }
6769
6770 #[gpui::test(iterations = 10)]
6771 async fn test_definition(cx: &mut gpui::TestAppContext) {
6772 let mut language = Language::new(
6773 LanguageConfig {
6774 name: "Rust".into(),
6775 path_suffixes: vec!["rs".to_string()],
6776 ..Default::default()
6777 },
6778 Some(tree_sitter_rust::language()),
6779 );
6780 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6781
6782 let fs = FakeFs::new(cx.background());
6783 fs.insert_tree(
6784 "/dir",
6785 json!({
6786 "a.rs": "const fn a() { A }",
6787 "b.rs": "const y: i32 = crate::a()",
6788 }),
6789 )
6790 .await;
6791
6792 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
6793 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6794
6795 let buffer = project
6796 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6797 .await
6798 .unwrap();
6799
6800 let fake_server = fake_servers.next().await.unwrap();
6801 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6802 let params = params.text_document_position_params;
6803 assert_eq!(
6804 params.text_document.uri.to_file_path().unwrap(),
6805 Path::new("/dir/b.rs"),
6806 );
6807 assert_eq!(params.position, lsp::Position::new(0, 22));
6808
6809 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6810 lsp::Location::new(
6811 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6812 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6813 ),
6814 )))
6815 });
6816
6817 let mut definitions = project
6818 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6819 .await
6820 .unwrap();
6821
6822 assert_eq!(definitions.len(), 1);
6823 let definition = definitions.pop().unwrap();
6824 cx.update(|cx| {
6825 let target_buffer = definition.buffer.read(cx);
6826 assert_eq!(
6827 target_buffer
6828 .file()
6829 .unwrap()
6830 .as_local()
6831 .unwrap()
6832 .abs_path(cx),
6833 Path::new("/dir/a.rs"),
6834 );
6835 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6836 assert_eq!(
6837 list_worktrees(&project, cx),
6838 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6839 );
6840
6841 drop(definition);
6842 });
6843 cx.read(|cx| {
6844 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6845 });
6846
6847 fn list_worktrees<'a>(
6848 project: &'a ModelHandle<Project>,
6849 cx: &'a AppContext,
6850 ) -> Vec<(&'a Path, bool)> {
6851 project
6852 .read(cx)
6853 .worktrees(cx)
6854 .map(|worktree| {
6855 let worktree = worktree.read(cx);
6856 (
6857 worktree.as_local().unwrap().abs_path().as_ref(),
6858 worktree.is_visible(),
6859 )
6860 })
6861 .collect::<Vec<_>>()
6862 }
6863 }
6864
6865 #[gpui::test]
6866 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6867 let mut language = Language::new(
6868 LanguageConfig {
6869 name: "TypeScript".into(),
6870 path_suffixes: vec!["ts".to_string()],
6871 ..Default::default()
6872 },
6873 Some(tree_sitter_typescript::language_typescript()),
6874 );
6875 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6876
6877 let fs = FakeFs::new(cx.background());
6878 fs.insert_tree(
6879 "/dir",
6880 json!({
6881 "a.ts": "",
6882 }),
6883 )
6884 .await;
6885
6886 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6887 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6888 let buffer = project
6889 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6890 .await
6891 .unwrap();
6892
6893 let fake_server = fake_language_servers.next().await.unwrap();
6894
6895 let text = "let a = b.fqn";
6896 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6897 let completions = project.update(cx, |project, cx| {
6898 project.completions(&buffer, text.len(), cx)
6899 });
6900
6901 fake_server
6902 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6903 Ok(Some(lsp::CompletionResponse::Array(vec![
6904 lsp::CompletionItem {
6905 label: "fullyQualifiedName?".into(),
6906 insert_text: Some("fullyQualifiedName".into()),
6907 ..Default::default()
6908 },
6909 ])))
6910 })
6911 .next()
6912 .await;
6913 let completions = completions.await.unwrap();
6914 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6915 assert_eq!(completions.len(), 1);
6916 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6917 assert_eq!(
6918 completions[0].old_range.to_offset(&snapshot),
6919 text.len() - 3..text.len()
6920 );
6921 }
6922
6923 #[gpui::test(iterations = 10)]
6924 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6925 let mut language = Language::new(
6926 LanguageConfig {
6927 name: "TypeScript".into(),
6928 path_suffixes: vec!["ts".to_string()],
6929 ..Default::default()
6930 },
6931 None,
6932 );
6933 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6934
6935 let fs = FakeFs::new(cx.background());
6936 fs.insert_tree(
6937 "/dir",
6938 json!({
6939 "a.ts": "a",
6940 }),
6941 )
6942 .await;
6943
6944 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6945 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6946 let buffer = project
6947 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6948 .await
6949 .unwrap();
6950
6951 let fake_server = fake_language_servers.next().await.unwrap();
6952
6953 // Language server returns code actions that contain commands, and not edits.
6954 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6955 fake_server
6956 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6957 Ok(Some(vec![
6958 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6959 title: "The code action".into(),
6960 command: Some(lsp::Command {
6961 title: "The command".into(),
6962 command: "_the/command".into(),
6963 arguments: Some(vec![json!("the-argument")]),
6964 }),
6965 ..Default::default()
6966 }),
6967 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6968 title: "two".into(),
6969 ..Default::default()
6970 }),
6971 ]))
6972 })
6973 .next()
6974 .await;
6975
6976 let action = actions.await.unwrap()[0].clone();
6977 let apply = project.update(cx, |project, cx| {
6978 project.apply_code_action(buffer.clone(), action, true, cx)
6979 });
6980
6981 // Resolving the code action does not populate its edits. In absence of
6982 // edits, we must execute the given command.
6983 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6984 |action, _| async move { Ok(action) },
6985 );
6986
6987 // While executing the command, the language server sends the editor
6988 // a `workspaceEdit` request.
6989 fake_server
6990 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6991 let fake = fake_server.clone();
6992 move |params, _| {
6993 assert_eq!(params.command, "_the/command");
6994 let fake = fake.clone();
6995 async move {
6996 fake.server
6997 .request::<lsp::request::ApplyWorkspaceEdit>(
6998 lsp::ApplyWorkspaceEditParams {
6999 label: None,
7000 edit: lsp::WorkspaceEdit {
7001 changes: Some(
7002 [(
7003 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7004 vec![lsp::TextEdit {
7005 range: lsp::Range::new(
7006 lsp::Position::new(0, 0),
7007 lsp::Position::new(0, 0),
7008 ),
7009 new_text: "X".into(),
7010 }],
7011 )]
7012 .into_iter()
7013 .collect(),
7014 ),
7015 ..Default::default()
7016 },
7017 },
7018 )
7019 .await
7020 .unwrap();
7021 Ok(Some(json!(null)))
7022 }
7023 }
7024 })
7025 .next()
7026 .await;
7027
7028 // Applying the code action returns a project transaction containing the edits
7029 // sent by the language server in its `workspaceEdit` request.
7030 let transaction = apply.await.unwrap();
7031 assert!(transaction.0.contains_key(&buffer));
7032 buffer.update(cx, |buffer, cx| {
7033 assert_eq!(buffer.text(), "Xa");
7034 buffer.undo(cx);
7035 assert_eq!(buffer.text(), "a");
7036 });
7037 }
7038
7039 #[gpui::test]
7040 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7041 let fs = FakeFs::new(cx.background());
7042 fs.insert_tree(
7043 "/dir",
7044 json!({
7045 "file1": "the old contents",
7046 }),
7047 )
7048 .await;
7049
7050 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7051 let buffer = project
7052 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7053 .await
7054 .unwrap();
7055 buffer
7056 .update(cx, |buffer, cx| {
7057 assert_eq!(buffer.text(), "the old contents");
7058 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7059 buffer.save(cx)
7060 })
7061 .await
7062 .unwrap();
7063
7064 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7065 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7066 }
7067
7068 #[gpui::test]
7069 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7070 let fs = FakeFs::new(cx.background());
7071 fs.insert_tree(
7072 "/dir",
7073 json!({
7074 "file1": "the old contents",
7075 }),
7076 )
7077 .await;
7078
7079 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7080 let buffer = project
7081 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7082 .await
7083 .unwrap();
7084 buffer
7085 .update(cx, |buffer, cx| {
7086 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7087 buffer.save(cx)
7088 })
7089 .await
7090 .unwrap();
7091
7092 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7093 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7094 }
7095
7096 #[gpui::test]
7097 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7098 let fs = FakeFs::new(cx.background());
7099 fs.insert_tree("/dir", json!({})).await;
7100
7101 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7102 let buffer = project.update(cx, |project, cx| {
7103 project.create_buffer("", None, cx).unwrap()
7104 });
7105 buffer.update(cx, |buffer, cx| {
7106 buffer.edit([(0..0, "abc")], cx);
7107 assert!(buffer.is_dirty());
7108 assert!(!buffer.has_conflict());
7109 });
7110 project
7111 .update(cx, |project, cx| {
7112 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7113 })
7114 .await
7115 .unwrap();
7116 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7117 buffer.read_with(cx, |buffer, cx| {
7118 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7119 assert!(!buffer.is_dirty());
7120 assert!(!buffer.has_conflict());
7121 });
7122
7123 let opened_buffer = project
7124 .update(cx, |project, cx| {
7125 project.open_local_buffer("/dir/file1", cx)
7126 })
7127 .await
7128 .unwrap();
7129 assert_eq!(opened_buffer, buffer);
7130 }
7131
7132 #[gpui::test(retries = 5)]
7133 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7134 let dir = temp_tree(json!({
7135 "a": {
7136 "file1": "",
7137 "file2": "",
7138 "file3": "",
7139 },
7140 "b": {
7141 "c": {
7142 "file4": "",
7143 "file5": "",
7144 }
7145 }
7146 }));
7147
7148 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7149 let rpc = project.read_with(cx, |p, _| p.client.clone());
7150
7151 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7152 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7153 async move { buffer.await.unwrap() }
7154 };
7155 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7156 project.read_with(cx, |project, cx| {
7157 let tree = project.worktrees(cx).next().unwrap();
7158 tree.read(cx)
7159 .entry_for_path(path)
7160 .expect(&format!("no entry for path {}", path))
7161 .id
7162 })
7163 };
7164
7165 let buffer2 = buffer_for_path("a/file2", cx).await;
7166 let buffer3 = buffer_for_path("a/file3", cx).await;
7167 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7168 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7169
7170 let file2_id = id_for_path("a/file2", &cx);
7171 let file3_id = id_for_path("a/file3", &cx);
7172 let file4_id = id_for_path("b/c/file4", &cx);
7173
7174 // Create a remote copy of this worktree.
7175 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7176 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7177 let (remote, load_task) = cx.update(|cx| {
7178 Worktree::remote(
7179 1,
7180 1,
7181 initial_snapshot.to_proto(&Default::default(), true),
7182 rpc.clone(),
7183 cx,
7184 )
7185 });
7186 // tree
7187 load_task.await;
7188
7189 cx.read(|cx| {
7190 assert!(!buffer2.read(cx).is_dirty());
7191 assert!(!buffer3.read(cx).is_dirty());
7192 assert!(!buffer4.read(cx).is_dirty());
7193 assert!(!buffer5.read(cx).is_dirty());
7194 });
7195
7196 // Rename and delete files and directories.
7197 tree.flush_fs_events(&cx).await;
7198 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7199 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7200 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7201 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7202 tree.flush_fs_events(&cx).await;
7203
7204 let expected_paths = vec![
7205 "a",
7206 "a/file1",
7207 "a/file2.new",
7208 "b",
7209 "d",
7210 "d/file3",
7211 "d/file4",
7212 ];
7213
7214 cx.read(|app| {
7215 assert_eq!(
7216 tree.read(app)
7217 .paths()
7218 .map(|p| p.to_str().unwrap())
7219 .collect::<Vec<_>>(),
7220 expected_paths
7221 );
7222
7223 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7224 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7225 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7226
7227 assert_eq!(
7228 buffer2.read(app).file().unwrap().path().as_ref(),
7229 Path::new("a/file2.new")
7230 );
7231 assert_eq!(
7232 buffer3.read(app).file().unwrap().path().as_ref(),
7233 Path::new("d/file3")
7234 );
7235 assert_eq!(
7236 buffer4.read(app).file().unwrap().path().as_ref(),
7237 Path::new("d/file4")
7238 );
7239 assert_eq!(
7240 buffer5.read(app).file().unwrap().path().as_ref(),
7241 Path::new("b/c/file5")
7242 );
7243
7244 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7245 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7246 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7247 assert!(buffer5.read(app).file().unwrap().is_deleted());
7248 });
7249
7250 // Update the remote worktree. Check that it becomes consistent with the
7251 // local worktree.
7252 remote.update(cx, |remote, cx| {
7253 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7254 &initial_snapshot,
7255 1,
7256 1,
7257 true,
7258 );
7259 remote
7260 .as_remote_mut()
7261 .unwrap()
7262 .snapshot
7263 .apply_remote_update(update_message)
7264 .unwrap();
7265
7266 assert_eq!(
7267 remote
7268 .paths()
7269 .map(|p| p.to_str().unwrap())
7270 .collect::<Vec<_>>(),
7271 expected_paths
7272 );
7273 });
7274 }
7275
7276 #[gpui::test]
7277 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7278 let fs = FakeFs::new(cx.background());
7279 fs.insert_tree(
7280 "/dir",
7281 json!({
7282 "a.txt": "a-contents",
7283 "b.txt": "b-contents",
7284 }),
7285 )
7286 .await;
7287
7288 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7289
7290 // Spawn multiple tasks to open paths, repeating some paths.
7291 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7292 (
7293 p.open_local_buffer("/dir/a.txt", cx),
7294 p.open_local_buffer("/dir/b.txt", cx),
7295 p.open_local_buffer("/dir/a.txt", cx),
7296 )
7297 });
7298
7299 let buffer_a_1 = buffer_a_1.await.unwrap();
7300 let buffer_a_2 = buffer_a_2.await.unwrap();
7301 let buffer_b = buffer_b.await.unwrap();
7302 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7303 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7304
7305 // There is only one buffer per path.
7306 let buffer_a_id = buffer_a_1.id();
7307 assert_eq!(buffer_a_2.id(), buffer_a_id);
7308
7309 // Open the same path again while it is still open.
7310 drop(buffer_a_1);
7311 let buffer_a_3 = project
7312 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7313 .await
7314 .unwrap();
7315
7316 // There's still only one buffer per path.
7317 assert_eq!(buffer_a_3.id(), buffer_a_id);
7318 }
7319
7320 #[gpui::test]
7321 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7322 let fs = FakeFs::new(cx.background());
7323 fs.insert_tree(
7324 "/dir",
7325 json!({
7326 "file1": "abc",
7327 "file2": "def",
7328 "file3": "ghi",
7329 }),
7330 )
7331 .await;
7332
7333 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7334
7335 let buffer1 = project
7336 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7337 .await
7338 .unwrap();
7339 let events = Rc::new(RefCell::new(Vec::new()));
7340
7341 // initially, the buffer isn't dirty.
7342 buffer1.update(cx, |buffer, cx| {
7343 cx.subscribe(&buffer1, {
7344 let events = events.clone();
7345 move |_, _, event, _| match event {
7346 BufferEvent::Operation(_) => {}
7347 _ => events.borrow_mut().push(event.clone()),
7348 }
7349 })
7350 .detach();
7351
7352 assert!(!buffer.is_dirty());
7353 assert!(events.borrow().is_empty());
7354
7355 buffer.edit([(1..2, "")], cx);
7356 });
7357
7358 // after the first edit, the buffer is dirty, and emits a dirtied event.
7359 buffer1.update(cx, |buffer, cx| {
7360 assert!(buffer.text() == "ac");
7361 assert!(buffer.is_dirty());
7362 assert_eq!(
7363 *events.borrow(),
7364 &[language::Event::Edited, language::Event::Dirtied]
7365 );
7366 events.borrow_mut().clear();
7367 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7368 });
7369
7370 // after saving, the buffer is not dirty, and emits a saved event.
7371 buffer1.update(cx, |buffer, cx| {
7372 assert!(!buffer.is_dirty());
7373 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7374 events.borrow_mut().clear();
7375
7376 buffer.edit([(1..1, "B")], cx);
7377 buffer.edit([(2..2, "D")], cx);
7378 });
7379
7380 // after editing again, the buffer is dirty, and emits another dirty event.
7381 buffer1.update(cx, |buffer, cx| {
7382 assert!(buffer.text() == "aBDc");
7383 assert!(buffer.is_dirty());
7384 assert_eq!(
7385 *events.borrow(),
7386 &[
7387 language::Event::Edited,
7388 language::Event::Dirtied,
7389 language::Event::Edited,
7390 ],
7391 );
7392 events.borrow_mut().clear();
7393
7394 // TODO - currently, after restoring the buffer to its
7395 // previously-saved state, the is still considered dirty.
7396 buffer.edit([(1..3, "")], cx);
7397 assert!(buffer.text() == "ac");
7398 assert!(buffer.is_dirty());
7399 });
7400
7401 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7402
7403 // When a file is deleted, the buffer is considered dirty.
7404 let events = Rc::new(RefCell::new(Vec::new()));
7405 let buffer2 = project
7406 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7407 .await
7408 .unwrap();
7409 buffer2.update(cx, |_, cx| {
7410 cx.subscribe(&buffer2, {
7411 let events = events.clone();
7412 move |_, _, event, _| events.borrow_mut().push(event.clone())
7413 })
7414 .detach();
7415 });
7416
7417 fs.remove_file("/dir/file2".as_ref(), Default::default())
7418 .await
7419 .unwrap();
7420 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7421 assert_eq!(
7422 *events.borrow(),
7423 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7424 );
7425
7426 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7427 let events = Rc::new(RefCell::new(Vec::new()));
7428 let buffer3 = project
7429 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7430 .await
7431 .unwrap();
7432 buffer3.update(cx, |_, cx| {
7433 cx.subscribe(&buffer3, {
7434 let events = events.clone();
7435 move |_, _, event, _| events.borrow_mut().push(event.clone())
7436 })
7437 .detach();
7438 });
7439
7440 buffer3.update(cx, |buffer, cx| {
7441 buffer.edit([(0..0, "x")], cx);
7442 });
7443 events.borrow_mut().clear();
7444 fs.remove_file("/dir/file3".as_ref(), Default::default())
7445 .await
7446 .unwrap();
7447 buffer3
7448 .condition(&cx, |_, _| !events.borrow().is_empty())
7449 .await;
7450 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7451 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7452 }
7453
7454 #[gpui::test]
7455 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7456 let initial_contents = "aaa\nbbbbb\nc\n";
7457 let fs = FakeFs::new(cx.background());
7458 fs.insert_tree(
7459 "/dir",
7460 json!({
7461 "the-file": initial_contents,
7462 }),
7463 )
7464 .await;
7465 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7466 let buffer = project
7467 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7468 .await
7469 .unwrap();
7470
7471 let anchors = (0..3)
7472 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7473 .collect::<Vec<_>>();
7474
7475 // Change the file on disk, adding two new lines of text, and removing
7476 // one line.
7477 buffer.read_with(cx, |buffer, _| {
7478 assert!(!buffer.is_dirty());
7479 assert!(!buffer.has_conflict());
7480 });
7481 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7482 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7483 .await
7484 .unwrap();
7485
7486 // Because the buffer was not modified, it is reloaded from disk. Its
7487 // contents are edited according to the diff between the old and new
7488 // file contents.
7489 buffer
7490 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7491 .await;
7492
7493 buffer.update(cx, |buffer, _| {
7494 assert_eq!(buffer.text(), new_contents);
7495 assert!(!buffer.is_dirty());
7496 assert!(!buffer.has_conflict());
7497
7498 let anchor_positions = anchors
7499 .iter()
7500 .map(|anchor| anchor.to_point(&*buffer))
7501 .collect::<Vec<_>>();
7502 assert_eq!(
7503 anchor_positions,
7504 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7505 );
7506 });
7507
7508 // Modify the buffer
7509 buffer.update(cx, |buffer, cx| {
7510 buffer.edit([(0..0, " ")], cx);
7511 assert!(buffer.is_dirty());
7512 assert!(!buffer.has_conflict());
7513 });
7514
7515 // Change the file on disk again, adding blank lines to the beginning.
7516 fs.save(
7517 "/dir/the-file".as_ref(),
7518 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7519 )
7520 .await
7521 .unwrap();
7522
7523 // Because the buffer is modified, it doesn't reload from disk, but is
7524 // marked as having a conflict.
7525 buffer
7526 .condition(&cx, |buffer, _| buffer.has_conflict())
7527 .await;
7528 }
7529
7530 #[gpui::test]
7531 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7532 cx.foreground().forbid_parking();
7533
7534 let fs = FakeFs::new(cx.background());
7535 fs.insert_tree(
7536 "/the-dir",
7537 json!({
7538 "a.rs": "
7539 fn foo(mut v: Vec<usize>) {
7540 for x in &v {
7541 v.push(1);
7542 }
7543 }
7544 "
7545 .unindent(),
7546 }),
7547 )
7548 .await;
7549
7550 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7551 let buffer = project
7552 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7553 .await
7554 .unwrap();
7555
7556 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7557 let message = lsp::PublishDiagnosticsParams {
7558 uri: buffer_uri.clone(),
7559 diagnostics: vec![
7560 lsp::Diagnostic {
7561 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7562 severity: Some(DiagnosticSeverity::WARNING),
7563 message: "error 1".to_string(),
7564 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7565 location: lsp::Location {
7566 uri: buffer_uri.clone(),
7567 range: lsp::Range::new(
7568 lsp::Position::new(1, 8),
7569 lsp::Position::new(1, 9),
7570 ),
7571 },
7572 message: "error 1 hint 1".to_string(),
7573 }]),
7574 ..Default::default()
7575 },
7576 lsp::Diagnostic {
7577 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7578 severity: Some(DiagnosticSeverity::HINT),
7579 message: "error 1 hint 1".to_string(),
7580 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7581 location: lsp::Location {
7582 uri: buffer_uri.clone(),
7583 range: lsp::Range::new(
7584 lsp::Position::new(1, 8),
7585 lsp::Position::new(1, 9),
7586 ),
7587 },
7588 message: "original diagnostic".to_string(),
7589 }]),
7590 ..Default::default()
7591 },
7592 lsp::Diagnostic {
7593 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7594 severity: Some(DiagnosticSeverity::ERROR),
7595 message: "error 2".to_string(),
7596 related_information: Some(vec![
7597 lsp::DiagnosticRelatedInformation {
7598 location: lsp::Location {
7599 uri: buffer_uri.clone(),
7600 range: lsp::Range::new(
7601 lsp::Position::new(1, 13),
7602 lsp::Position::new(1, 15),
7603 ),
7604 },
7605 message: "error 2 hint 1".to_string(),
7606 },
7607 lsp::DiagnosticRelatedInformation {
7608 location: lsp::Location {
7609 uri: buffer_uri.clone(),
7610 range: lsp::Range::new(
7611 lsp::Position::new(1, 13),
7612 lsp::Position::new(1, 15),
7613 ),
7614 },
7615 message: "error 2 hint 2".to_string(),
7616 },
7617 ]),
7618 ..Default::default()
7619 },
7620 lsp::Diagnostic {
7621 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7622 severity: Some(DiagnosticSeverity::HINT),
7623 message: "error 2 hint 1".to_string(),
7624 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7625 location: lsp::Location {
7626 uri: buffer_uri.clone(),
7627 range: lsp::Range::new(
7628 lsp::Position::new(2, 8),
7629 lsp::Position::new(2, 17),
7630 ),
7631 },
7632 message: "original diagnostic".to_string(),
7633 }]),
7634 ..Default::default()
7635 },
7636 lsp::Diagnostic {
7637 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7638 severity: Some(DiagnosticSeverity::HINT),
7639 message: "error 2 hint 2".to_string(),
7640 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7641 location: lsp::Location {
7642 uri: buffer_uri.clone(),
7643 range: lsp::Range::new(
7644 lsp::Position::new(2, 8),
7645 lsp::Position::new(2, 17),
7646 ),
7647 },
7648 message: "original diagnostic".to_string(),
7649 }]),
7650 ..Default::default()
7651 },
7652 ],
7653 version: None,
7654 };
7655
7656 project
7657 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7658 .unwrap();
7659 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7660
7661 assert_eq!(
7662 buffer
7663 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7664 .collect::<Vec<_>>(),
7665 &[
7666 DiagnosticEntry {
7667 range: Point::new(1, 8)..Point::new(1, 9),
7668 diagnostic: Diagnostic {
7669 severity: DiagnosticSeverity::WARNING,
7670 message: "error 1".to_string(),
7671 group_id: 0,
7672 is_primary: true,
7673 ..Default::default()
7674 }
7675 },
7676 DiagnosticEntry {
7677 range: Point::new(1, 8)..Point::new(1, 9),
7678 diagnostic: Diagnostic {
7679 severity: DiagnosticSeverity::HINT,
7680 message: "error 1 hint 1".to_string(),
7681 group_id: 0,
7682 is_primary: false,
7683 ..Default::default()
7684 }
7685 },
7686 DiagnosticEntry {
7687 range: Point::new(1, 13)..Point::new(1, 15),
7688 diagnostic: Diagnostic {
7689 severity: DiagnosticSeverity::HINT,
7690 message: "error 2 hint 1".to_string(),
7691 group_id: 1,
7692 is_primary: false,
7693 ..Default::default()
7694 }
7695 },
7696 DiagnosticEntry {
7697 range: Point::new(1, 13)..Point::new(1, 15),
7698 diagnostic: Diagnostic {
7699 severity: DiagnosticSeverity::HINT,
7700 message: "error 2 hint 2".to_string(),
7701 group_id: 1,
7702 is_primary: false,
7703 ..Default::default()
7704 }
7705 },
7706 DiagnosticEntry {
7707 range: Point::new(2, 8)..Point::new(2, 17),
7708 diagnostic: Diagnostic {
7709 severity: DiagnosticSeverity::ERROR,
7710 message: "error 2".to_string(),
7711 group_id: 1,
7712 is_primary: true,
7713 ..Default::default()
7714 }
7715 }
7716 ]
7717 );
7718
7719 assert_eq!(
7720 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7721 &[
7722 DiagnosticEntry {
7723 range: Point::new(1, 8)..Point::new(1, 9),
7724 diagnostic: Diagnostic {
7725 severity: DiagnosticSeverity::WARNING,
7726 message: "error 1".to_string(),
7727 group_id: 0,
7728 is_primary: true,
7729 ..Default::default()
7730 }
7731 },
7732 DiagnosticEntry {
7733 range: Point::new(1, 8)..Point::new(1, 9),
7734 diagnostic: Diagnostic {
7735 severity: DiagnosticSeverity::HINT,
7736 message: "error 1 hint 1".to_string(),
7737 group_id: 0,
7738 is_primary: false,
7739 ..Default::default()
7740 }
7741 },
7742 ]
7743 );
7744 assert_eq!(
7745 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7746 &[
7747 DiagnosticEntry {
7748 range: Point::new(1, 13)..Point::new(1, 15),
7749 diagnostic: Diagnostic {
7750 severity: DiagnosticSeverity::HINT,
7751 message: "error 2 hint 1".to_string(),
7752 group_id: 1,
7753 is_primary: false,
7754 ..Default::default()
7755 }
7756 },
7757 DiagnosticEntry {
7758 range: Point::new(1, 13)..Point::new(1, 15),
7759 diagnostic: Diagnostic {
7760 severity: DiagnosticSeverity::HINT,
7761 message: "error 2 hint 2".to_string(),
7762 group_id: 1,
7763 is_primary: false,
7764 ..Default::default()
7765 }
7766 },
7767 DiagnosticEntry {
7768 range: Point::new(2, 8)..Point::new(2, 17),
7769 diagnostic: Diagnostic {
7770 severity: DiagnosticSeverity::ERROR,
7771 message: "error 2".to_string(),
7772 group_id: 1,
7773 is_primary: true,
7774 ..Default::default()
7775 }
7776 }
7777 ]
7778 );
7779 }
7780
7781 #[gpui::test]
7782 async fn test_rename(cx: &mut gpui::TestAppContext) {
7783 cx.foreground().forbid_parking();
7784
7785 let mut language = Language::new(
7786 LanguageConfig {
7787 name: "Rust".into(),
7788 path_suffixes: vec!["rs".to_string()],
7789 ..Default::default()
7790 },
7791 Some(tree_sitter_rust::language()),
7792 );
7793 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7794 capabilities: lsp::ServerCapabilities {
7795 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7796 prepare_provider: Some(true),
7797 work_done_progress_options: Default::default(),
7798 })),
7799 ..Default::default()
7800 },
7801 ..Default::default()
7802 });
7803
7804 let fs = FakeFs::new(cx.background());
7805 fs.insert_tree(
7806 "/dir",
7807 json!({
7808 "one.rs": "const ONE: usize = 1;",
7809 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7810 }),
7811 )
7812 .await;
7813
7814 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7815 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7816 let buffer = project
7817 .update(cx, |project, cx| {
7818 project.open_local_buffer("/dir/one.rs", cx)
7819 })
7820 .await
7821 .unwrap();
7822
7823 let fake_server = fake_servers.next().await.unwrap();
7824
7825 let response = project.update(cx, |project, cx| {
7826 project.prepare_rename(buffer.clone(), 7, cx)
7827 });
7828 fake_server
7829 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7830 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7831 assert_eq!(params.position, lsp::Position::new(0, 7));
7832 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7833 lsp::Position::new(0, 6),
7834 lsp::Position::new(0, 9),
7835 ))))
7836 })
7837 .next()
7838 .await
7839 .unwrap();
7840 let range = response.await.unwrap().unwrap();
7841 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7842 assert_eq!(range, 6..9);
7843
7844 let response = project.update(cx, |project, cx| {
7845 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7846 });
7847 fake_server
7848 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7849 assert_eq!(
7850 params.text_document_position.text_document.uri.as_str(),
7851 "file:///dir/one.rs"
7852 );
7853 assert_eq!(
7854 params.text_document_position.position,
7855 lsp::Position::new(0, 7)
7856 );
7857 assert_eq!(params.new_name, "THREE");
7858 Ok(Some(lsp::WorkspaceEdit {
7859 changes: Some(
7860 [
7861 (
7862 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7863 vec![lsp::TextEdit::new(
7864 lsp::Range::new(
7865 lsp::Position::new(0, 6),
7866 lsp::Position::new(0, 9),
7867 ),
7868 "THREE".to_string(),
7869 )],
7870 ),
7871 (
7872 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7873 vec![
7874 lsp::TextEdit::new(
7875 lsp::Range::new(
7876 lsp::Position::new(0, 24),
7877 lsp::Position::new(0, 27),
7878 ),
7879 "THREE".to_string(),
7880 ),
7881 lsp::TextEdit::new(
7882 lsp::Range::new(
7883 lsp::Position::new(0, 35),
7884 lsp::Position::new(0, 38),
7885 ),
7886 "THREE".to_string(),
7887 ),
7888 ],
7889 ),
7890 ]
7891 .into_iter()
7892 .collect(),
7893 ),
7894 ..Default::default()
7895 }))
7896 })
7897 .next()
7898 .await
7899 .unwrap();
7900 let mut transaction = response.await.unwrap().0;
7901 assert_eq!(transaction.len(), 2);
7902 assert_eq!(
7903 transaction
7904 .remove_entry(&buffer)
7905 .unwrap()
7906 .0
7907 .read_with(cx, |buffer, _| buffer.text()),
7908 "const THREE: usize = 1;"
7909 );
7910 assert_eq!(
7911 transaction
7912 .into_keys()
7913 .next()
7914 .unwrap()
7915 .read_with(cx, |buffer, _| buffer.text()),
7916 "const TWO: usize = one::THREE + one::THREE;"
7917 );
7918 }
7919
7920 #[gpui::test]
7921 async fn test_search(cx: &mut gpui::TestAppContext) {
7922 let fs = FakeFs::new(cx.background());
7923 fs.insert_tree(
7924 "/dir",
7925 json!({
7926 "one.rs": "const ONE: usize = 1;",
7927 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7928 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7929 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7930 }),
7931 )
7932 .await;
7933 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7934 assert_eq!(
7935 search(&project, SearchQuery::text("TWO", false, true), cx)
7936 .await
7937 .unwrap(),
7938 HashMap::from_iter([
7939 ("two.rs".to_string(), vec![6..9]),
7940 ("three.rs".to_string(), vec![37..40])
7941 ])
7942 );
7943
7944 let buffer_4 = project
7945 .update(cx, |project, cx| {
7946 project.open_local_buffer("/dir/four.rs", cx)
7947 })
7948 .await
7949 .unwrap();
7950 buffer_4.update(cx, |buffer, cx| {
7951 let text = "two::TWO";
7952 buffer.edit([(20..28, text), (31..43, text)], cx);
7953 });
7954
7955 assert_eq!(
7956 search(&project, SearchQuery::text("TWO", false, true), cx)
7957 .await
7958 .unwrap(),
7959 HashMap::from_iter([
7960 ("two.rs".to_string(), vec![6..9]),
7961 ("three.rs".to_string(), vec![37..40]),
7962 ("four.rs".to_string(), vec![25..28, 36..39])
7963 ])
7964 );
7965
7966 async fn search(
7967 project: &ModelHandle<Project>,
7968 query: SearchQuery,
7969 cx: &mut gpui::TestAppContext,
7970 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7971 let results = project
7972 .update(cx, |project, cx| project.search(query, cx))
7973 .await?;
7974
7975 Ok(results
7976 .into_iter()
7977 .map(|(buffer, ranges)| {
7978 buffer.read_with(cx, |buffer, _| {
7979 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7980 let ranges = ranges
7981 .into_iter()
7982 .map(|range| range.to_offset(buffer))
7983 .collect::<Vec<_>>();
7984 (path, ranges)
7985 })
7986 })
7987 .collect())
7988 }
7989 }
7990}