1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use thiserror::Error;
53use util::{post_inc, ResultExt, TryFutureExt as _};
54
55pub use fs::*;
56pub use worktree::*;
57
58pub trait Item: Entity {
59 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
60}
61
62pub struct Project {
63 worktrees: Vec<WorktreeHandle>,
64 active_entry: Option<ProjectEntryId>,
65 languages: Arc<LanguageRegistry>,
66 language_servers:
67 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
68 started_language_servers:
69 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
70 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
71 language_server_settings: Arc<Mutex<serde_json::Value>>,
72 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
73 next_language_server_id: usize,
74 client: Arc<client::Client>,
75 next_entry_id: Arc<AtomicUsize>,
76 user_store: ModelHandle<UserStore>,
77 fs: Arc<dyn Fs>,
78 client_state: ProjectClientState,
79 collaborators: HashMap<PeerId, Collaborator>,
80 subscriptions: Vec<client::Subscription>,
81 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
82 shared_buffers: HashMap<PeerId, HashSet<u64>>,
83 loading_buffers: HashMap<
84 ProjectPath,
85 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
86 >,
87 loading_local_worktrees:
88 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
89 opened_buffers: HashMap<u64, OpenBuffer>,
90 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
91 nonce: u128,
92}
93
94#[derive(Error, Debug)]
95pub enum JoinProjectError {
96 #[error("host declined join request")]
97 HostDeclined,
98 #[error("host closed the project")]
99 HostClosedProject,
100 #[error("host went offline")]
101 HostWentOffline,
102 #[error("{0}")]
103 Other(#[from] anyhow::Error),
104}
105
106enum OpenBuffer {
107 Strong(ModelHandle<Buffer>),
108 Weak(WeakModelHandle<Buffer>),
109 Loading(Vec<Operation>),
110}
111
112enum WorktreeHandle {
113 Strong(ModelHandle<Worktree>),
114 Weak(WeakModelHandle<Worktree>),
115}
116
117enum ProjectClientState {
118 Local {
119 is_shared: bool,
120 remote_id_tx: watch::Sender<Option<u64>>,
121 remote_id_rx: watch::Receiver<Option<u64>>,
122 _maintain_remote_id_task: Task<Option<()>>,
123 },
124 Remote {
125 sharing_has_stopped: bool,
126 remote_id: u64,
127 replica_id: ReplicaId,
128 _detect_unshare_task: Task<Option<()>>,
129 },
130}
131
132#[derive(Clone, Debug)]
133pub struct Collaborator {
134 pub user: Arc<User>,
135 pub peer_id: PeerId,
136 pub replica_id: ReplicaId,
137}
138
139#[derive(Clone, Debug, PartialEq, Eq)]
140pub enum Event {
141 ActiveEntryChanged(Option<ProjectEntryId>),
142 WorktreeAdded,
143 WorktreeRemoved(WorktreeId),
144 DiskBasedDiagnosticsStarted,
145 DiskBasedDiagnosticsUpdated,
146 DiskBasedDiagnosticsFinished,
147 DiagnosticsUpdated(ProjectPath),
148 RemoteIdChanged(Option<u64>),
149 CollaboratorLeft(PeerId),
150 ContactRequestedJoin(Arc<User>),
151 ContactCancelledJoinRequest(Arc<User>),
152}
153
154#[derive(Serialize)]
155pub struct LanguageServerStatus {
156 pub name: String,
157 pub pending_work: BTreeMap<String, LanguageServerProgress>,
158 pub pending_diagnostic_updates: isize,
159}
160
161#[derive(Clone, Debug, Serialize)]
162pub struct LanguageServerProgress {
163 pub message: Option<String>,
164 pub percentage: Option<usize>,
165 #[serde(skip_serializing)]
166 pub last_update_at: Instant,
167}
168
169#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
170pub struct ProjectPath {
171 pub worktree_id: WorktreeId,
172 pub path: Arc<Path>,
173}
174
175#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
176pub struct DiagnosticSummary {
177 pub error_count: usize,
178 pub warning_count: usize,
179}
180
181#[derive(Debug)]
182pub struct Location {
183 pub buffer: ModelHandle<Buffer>,
184 pub range: Range<language::Anchor>,
185}
186
187#[derive(Debug)]
188pub struct DocumentHighlight {
189 pub range: Range<language::Anchor>,
190 pub kind: DocumentHighlightKind,
191}
192
193#[derive(Clone, Debug)]
194pub struct Symbol {
195 pub source_worktree_id: WorktreeId,
196 pub worktree_id: WorktreeId,
197 pub language_server_name: LanguageServerName,
198 pub path: PathBuf,
199 pub label: CodeLabel,
200 pub name: String,
201 pub kind: lsp::SymbolKind,
202 pub range: Range<PointUtf16>,
203 pub signature: [u8; 32],
204}
205
206#[derive(Default)]
207pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
208
209impl DiagnosticSummary {
210 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
211 let mut this = Self {
212 error_count: 0,
213 warning_count: 0,
214 };
215
216 for entry in diagnostics {
217 if entry.diagnostic.is_primary {
218 match entry.diagnostic.severity {
219 DiagnosticSeverity::ERROR => this.error_count += 1,
220 DiagnosticSeverity::WARNING => this.warning_count += 1,
221 _ => {}
222 }
223 }
224 }
225
226 this
227 }
228
229 pub fn is_empty(&self) -> bool {
230 self.error_count == 0 && self.warning_count == 0
231 }
232
233 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
234 proto::DiagnosticSummary {
235 path: path.to_string_lossy().to_string(),
236 error_count: self.error_count as u32,
237 warning_count: self.warning_count as u32,
238 }
239 }
240}
241
242#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
243pub struct ProjectEntryId(usize);
244
245impl ProjectEntryId {
246 pub const MAX: Self = Self(usize::MAX);
247
248 pub fn new(counter: &AtomicUsize) -> Self {
249 Self(counter.fetch_add(1, SeqCst))
250 }
251
252 pub fn from_proto(id: u64) -> Self {
253 Self(id as usize)
254 }
255
256 pub fn to_proto(&self) -> u64 {
257 self.0 as u64
258 }
259
260 pub fn to_usize(&self) -> usize {
261 self.0
262 }
263}
264
265impl Project {
266 pub fn init(client: &Arc<Client>) {
267 client.add_model_message_handler(Self::handle_request_join_project);
268 client.add_model_message_handler(Self::handle_add_collaborator);
269 client.add_model_message_handler(Self::handle_buffer_reloaded);
270 client.add_model_message_handler(Self::handle_buffer_saved);
271 client.add_model_message_handler(Self::handle_start_language_server);
272 client.add_model_message_handler(Self::handle_update_language_server);
273 client.add_model_message_handler(Self::handle_remove_collaborator);
274 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
275 client.add_model_message_handler(Self::handle_register_worktree);
276 client.add_model_message_handler(Self::handle_unregister_worktree);
277 client.add_model_message_handler(Self::handle_unregister_project);
278 client.add_model_message_handler(Self::handle_project_unshared);
279 client.add_model_message_handler(Self::handle_update_buffer_file);
280 client.add_model_message_handler(Self::handle_update_buffer);
281 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
282 client.add_model_message_handler(Self::handle_update_worktree);
283 client.add_model_request_handler(Self::handle_create_project_entry);
284 client.add_model_request_handler(Self::handle_rename_project_entry);
285 client.add_model_request_handler(Self::handle_copy_project_entry);
286 client.add_model_request_handler(Self::handle_delete_project_entry);
287 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
288 client.add_model_request_handler(Self::handle_apply_code_action);
289 client.add_model_request_handler(Self::handle_reload_buffers);
290 client.add_model_request_handler(Self::handle_format_buffers);
291 client.add_model_request_handler(Self::handle_get_code_actions);
292 client.add_model_request_handler(Self::handle_get_completions);
293 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
294 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
295 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
296 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
297 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
298 client.add_model_request_handler(Self::handle_search_project);
299 client.add_model_request_handler(Self::handle_get_project_symbols);
300 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
301 client.add_model_request_handler(Self::handle_open_buffer_by_id);
302 client.add_model_request_handler(Self::handle_open_buffer_by_path);
303 client.add_model_request_handler(Self::handle_save_buffer);
304 }
305
306 pub fn local(
307 client: Arc<Client>,
308 user_store: ModelHandle<UserStore>,
309 languages: Arc<LanguageRegistry>,
310 fs: Arc<dyn Fs>,
311 cx: &mut MutableAppContext,
312 ) -> ModelHandle<Self> {
313 cx.add_model(|cx: &mut ModelContext<Self>| {
314 let (remote_id_tx, remote_id_rx) = watch::channel();
315 let _maintain_remote_id_task = cx.spawn_weak({
316 let rpc = client.clone();
317 move |this, mut cx| {
318 async move {
319 let mut status = rpc.status();
320 while let Some(status) = status.next().await {
321 if let Some(this) = this.upgrade(&cx) {
322 if status.is_connected() {
323 this.update(&mut cx, |this, cx| this.register(cx)).await?;
324 } else {
325 this.update(&mut cx, |this, cx| this.unregister(cx));
326 }
327 }
328 }
329 Ok(())
330 }
331 .log_err()
332 }
333 });
334
335 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
336 Self {
337 worktrees: Default::default(),
338 collaborators: Default::default(),
339 opened_buffers: Default::default(),
340 shared_buffers: Default::default(),
341 loading_buffers: Default::default(),
342 loading_local_worktrees: Default::default(),
343 buffer_snapshots: Default::default(),
344 client_state: ProjectClientState::Local {
345 is_shared: false,
346 remote_id_tx,
347 remote_id_rx,
348 _maintain_remote_id_task,
349 },
350 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
351 subscriptions: Vec::new(),
352 active_entry: None,
353 languages,
354 client,
355 user_store,
356 fs,
357 next_entry_id: Default::default(),
358 language_servers: Default::default(),
359 started_language_servers: Default::default(),
360 language_server_statuses: Default::default(),
361 last_workspace_edits_by_language_server: Default::default(),
362 language_server_settings: Default::default(),
363 next_language_server_id: 0,
364 nonce: StdRng::from_entropy().gen(),
365 }
366 })
367 }
368
369 pub async fn remote(
370 remote_id: u64,
371 client: Arc<Client>,
372 user_store: ModelHandle<UserStore>,
373 languages: Arc<LanguageRegistry>,
374 fs: Arc<dyn Fs>,
375 cx: &mut AsyncAppContext,
376 ) -> Result<ModelHandle<Self>, JoinProjectError> {
377 client.authenticate_and_connect(true, &cx).await?;
378
379 let response = client
380 .request(proto::JoinProject {
381 project_id: remote_id,
382 })
383 .await?;
384
385 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
386 proto::join_project_response::Variant::Accept(response) => response,
387 proto::join_project_response::Variant::Decline(decline) => {
388 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
389 Some(proto::join_project_response::decline::Reason::Declined) => {
390 Err(JoinProjectError::HostDeclined)?
391 }
392 Some(proto::join_project_response::decline::Reason::Closed) => {
393 Err(JoinProjectError::HostClosedProject)?
394 }
395 Some(proto::join_project_response::decline::Reason::WentOffline) => {
396 Err(JoinProjectError::HostWentOffline)?
397 }
398 None => Err(anyhow!("missing decline reason"))?,
399 }
400 }
401 };
402
403 let replica_id = response.replica_id as ReplicaId;
404
405 let mut worktrees = Vec::new();
406 for worktree in response.worktrees {
407 let (worktree, load_task) = cx
408 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
409 worktrees.push(worktree);
410 load_task.detach();
411 }
412
413 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
414 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
415 let mut this = Self {
416 worktrees: Vec::new(),
417 loading_buffers: Default::default(),
418 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
419 shared_buffers: Default::default(),
420 loading_local_worktrees: Default::default(),
421 active_entry: None,
422 collaborators: Default::default(),
423 languages,
424 user_store: user_store.clone(),
425 fs,
426 next_entry_id: Default::default(),
427 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
428 client: client.clone(),
429 client_state: ProjectClientState::Remote {
430 sharing_has_stopped: false,
431 remote_id,
432 replica_id,
433 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
434 async move {
435 let mut status = client.status();
436 let is_connected =
437 status.next().await.map_or(false, |s| s.is_connected());
438 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
439 if !is_connected || status.next().await.is_some() {
440 if let Some(this) = this.upgrade(&cx) {
441 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
442 }
443 }
444 Ok(())
445 }
446 .log_err()
447 }),
448 },
449 language_servers: Default::default(),
450 started_language_servers: Default::default(),
451 language_server_settings: Default::default(),
452 language_server_statuses: response
453 .language_servers
454 .into_iter()
455 .map(|server| {
456 (
457 server.id as usize,
458 LanguageServerStatus {
459 name: server.name,
460 pending_work: Default::default(),
461 pending_diagnostic_updates: 0,
462 },
463 )
464 })
465 .collect(),
466 last_workspace_edits_by_language_server: Default::default(),
467 next_language_server_id: 0,
468 opened_buffers: Default::default(),
469 buffer_snapshots: Default::default(),
470 nonce: StdRng::from_entropy().gen(),
471 };
472 for worktree in worktrees {
473 this.add_worktree(&worktree, cx);
474 }
475 this
476 });
477
478 let user_ids = response
479 .collaborators
480 .iter()
481 .map(|peer| peer.user_id)
482 .collect();
483 user_store
484 .update(cx, |user_store, cx| user_store.get_users(user_ids, cx))
485 .await?;
486 let mut collaborators = HashMap::default();
487 for message in response.collaborators {
488 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
489 collaborators.insert(collaborator.peer_id, collaborator);
490 }
491
492 this.update(cx, |this, _| {
493 this.collaborators = collaborators;
494 });
495
496 Ok(this)
497 }
498
499 #[cfg(any(test, feature = "test-support"))]
500 pub async fn test(
501 fs: Arc<dyn Fs>,
502 root_paths: impl IntoIterator<Item = &Path>,
503 cx: &mut gpui::TestAppContext,
504 ) -> ModelHandle<Project> {
505 let languages = Arc::new(LanguageRegistry::test());
506 let http_client = client::test::FakeHttpClient::with_404_response();
507 let client = client::Client::new(http_client.clone());
508 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
509 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
510 for path in root_paths {
511 let (tree, _) = project
512 .update(cx, |project, cx| {
513 project.find_or_create_local_worktree(path, true, cx)
514 })
515 .await
516 .unwrap();
517 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
518 .await;
519 }
520 project
521 }
522
523 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
524 self.opened_buffers
525 .get(&remote_id)
526 .and_then(|buffer| buffer.upgrade(cx))
527 }
528
529 pub fn languages(&self) -> &Arc<LanguageRegistry> {
530 &self.languages
531 }
532
533 pub fn client(&self) -> Arc<Client> {
534 self.client.clone()
535 }
536
537 pub fn user_store(&self) -> ModelHandle<UserStore> {
538 self.user_store.clone()
539 }
540
541 #[cfg(any(test, feature = "test-support"))]
542 pub fn check_invariants(&self, cx: &AppContext) {
543 if self.is_local() {
544 let mut worktree_root_paths = HashMap::default();
545 for worktree in self.worktrees(cx) {
546 let worktree = worktree.read(cx);
547 let abs_path = worktree.as_local().unwrap().abs_path().clone();
548 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
549 assert_eq!(
550 prev_worktree_id,
551 None,
552 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
553 abs_path,
554 worktree.id(),
555 prev_worktree_id
556 )
557 }
558 } else {
559 let replica_id = self.replica_id();
560 for buffer in self.opened_buffers.values() {
561 if let Some(buffer) = buffer.upgrade(cx) {
562 let buffer = buffer.read(cx);
563 assert_eq!(
564 buffer.deferred_ops_len(),
565 0,
566 "replica {}, buffer {} has deferred operations",
567 replica_id,
568 buffer.remote_id()
569 );
570 }
571 }
572 }
573 }
574
575 #[cfg(any(test, feature = "test-support"))]
576 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
577 let path = path.into();
578 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
579 self.opened_buffers.iter().any(|(_, buffer)| {
580 if let Some(buffer) = buffer.upgrade(cx) {
581 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
582 if file.worktree == worktree && file.path() == &path.path {
583 return true;
584 }
585 }
586 }
587 false
588 })
589 } else {
590 false
591 }
592 }
593
594 pub fn fs(&self) -> &Arc<dyn Fs> {
595 &self.fs
596 }
597
598 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
599 self.unshared(cx);
600 for worktree in &self.worktrees {
601 if let Some(worktree) = worktree.upgrade(cx) {
602 worktree.update(cx, |worktree, _| {
603 worktree.as_local_mut().unwrap().unregister();
604 });
605 }
606 }
607
608 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
609 *remote_id_tx.borrow_mut() = None;
610 }
611
612 self.subscriptions.clear();
613 }
614
615 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
616 self.unregister(cx);
617
618 let response = self.client.request(proto::RegisterProject {});
619 cx.spawn(|this, mut cx| async move {
620 let remote_id = response.await?.project_id;
621
622 let mut registrations = Vec::new();
623 this.update(&mut cx, |this, cx| {
624 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
625 *remote_id_tx.borrow_mut() = Some(remote_id);
626 }
627
628 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
629
630 this.subscriptions
631 .push(this.client.add_model_for_remote_entity(remote_id, cx));
632
633 for worktree in &this.worktrees {
634 if let Some(worktree) = worktree.upgrade(cx) {
635 registrations.push(worktree.update(cx, |worktree, cx| {
636 let worktree = worktree.as_local_mut().unwrap();
637 worktree.register(remote_id, cx)
638 }));
639 }
640 }
641 });
642
643 futures::future::try_join_all(registrations).await?;
644 Ok(())
645 })
646 }
647
648 pub fn remote_id(&self) -> Option<u64> {
649 match &self.client_state {
650 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
651 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
652 }
653 }
654
655 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
656 let mut id = None;
657 let mut watch = None;
658 match &self.client_state {
659 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
660 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
661 }
662
663 async move {
664 if let Some(id) = id {
665 return id;
666 }
667 let mut watch = watch.unwrap();
668 loop {
669 let id = *watch.borrow();
670 if let Some(id) = id {
671 return id;
672 }
673 watch.next().await;
674 }
675 }
676 }
677
678 pub fn replica_id(&self) -> ReplicaId {
679 match &self.client_state {
680 ProjectClientState::Local { .. } => 0,
681 ProjectClientState::Remote { replica_id, .. } => *replica_id,
682 }
683 }
684
685 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
686 &self.collaborators
687 }
688
689 pub fn worktrees<'a>(
690 &'a self,
691 cx: &'a AppContext,
692 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
693 self.worktrees
694 .iter()
695 .filter_map(move |worktree| worktree.upgrade(cx))
696 }
697
698 pub fn visible_worktrees<'a>(
699 &'a self,
700 cx: &'a AppContext,
701 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
702 self.worktrees.iter().filter_map(|worktree| {
703 worktree.upgrade(cx).and_then(|worktree| {
704 if worktree.read(cx).is_visible() {
705 Some(worktree)
706 } else {
707 None
708 }
709 })
710 })
711 }
712
713 pub fn worktree_for_id(
714 &self,
715 id: WorktreeId,
716 cx: &AppContext,
717 ) -> Option<ModelHandle<Worktree>> {
718 self.worktrees(cx)
719 .find(|worktree| worktree.read(cx).id() == id)
720 }
721
722 pub fn worktree_for_entry(
723 &self,
724 entry_id: ProjectEntryId,
725 cx: &AppContext,
726 ) -> Option<ModelHandle<Worktree>> {
727 self.worktrees(cx)
728 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
729 }
730
731 pub fn worktree_id_for_entry(
732 &self,
733 entry_id: ProjectEntryId,
734 cx: &AppContext,
735 ) -> Option<WorktreeId> {
736 self.worktree_for_entry(entry_id, cx)
737 .map(|worktree| worktree.read(cx).id())
738 }
739
740 pub fn create_entry(
741 &mut self,
742 project_path: impl Into<ProjectPath>,
743 is_directory: bool,
744 cx: &mut ModelContext<Self>,
745 ) -> Option<Task<Result<Entry>>> {
746 let project_path = project_path.into();
747 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
748 if self.is_local() {
749 Some(worktree.update(cx, |worktree, cx| {
750 worktree
751 .as_local_mut()
752 .unwrap()
753 .create_entry(project_path.path, is_directory, cx)
754 }))
755 } else {
756 let client = self.client.clone();
757 let project_id = self.remote_id().unwrap();
758 Some(cx.spawn_weak(|_, mut cx| async move {
759 let response = client
760 .request(proto::CreateProjectEntry {
761 worktree_id: project_path.worktree_id.to_proto(),
762 project_id,
763 path: project_path.path.as_os_str().as_bytes().to_vec(),
764 is_directory,
765 })
766 .await?;
767 let entry = response
768 .entry
769 .ok_or_else(|| anyhow!("missing entry in response"))?;
770 worktree
771 .update(&mut cx, |worktree, cx| {
772 worktree.as_remote().unwrap().insert_entry(
773 entry,
774 response.worktree_scan_id as usize,
775 cx,
776 )
777 })
778 .await
779 }))
780 }
781 }
782
783 pub fn copy_entry(
784 &mut self,
785 entry_id: ProjectEntryId,
786 new_path: impl Into<Arc<Path>>,
787 cx: &mut ModelContext<Self>,
788 ) -> Option<Task<Result<Entry>>> {
789 let worktree = self.worktree_for_entry(entry_id, cx)?;
790 let new_path = new_path.into();
791 if self.is_local() {
792 worktree.update(cx, |worktree, cx| {
793 worktree
794 .as_local_mut()
795 .unwrap()
796 .copy_entry(entry_id, new_path, cx)
797 })
798 } else {
799 let client = self.client.clone();
800 let project_id = self.remote_id().unwrap();
801
802 Some(cx.spawn_weak(|_, mut cx| async move {
803 let response = client
804 .request(proto::CopyProjectEntry {
805 project_id,
806 entry_id: entry_id.to_proto(),
807 new_path: new_path.as_os_str().as_bytes().to_vec(),
808 })
809 .await?;
810 let entry = response
811 .entry
812 .ok_or_else(|| anyhow!("missing entry in response"))?;
813 worktree
814 .update(&mut cx, |worktree, cx| {
815 worktree.as_remote().unwrap().insert_entry(
816 entry,
817 response.worktree_scan_id as usize,
818 cx,
819 )
820 })
821 .await
822 }))
823 }
824 }
825
826 pub fn rename_entry(
827 &mut self,
828 entry_id: ProjectEntryId,
829 new_path: impl Into<Arc<Path>>,
830 cx: &mut ModelContext<Self>,
831 ) -> Option<Task<Result<Entry>>> {
832 let worktree = self.worktree_for_entry(entry_id, cx)?;
833 let new_path = new_path.into();
834 if self.is_local() {
835 worktree.update(cx, |worktree, cx| {
836 worktree
837 .as_local_mut()
838 .unwrap()
839 .rename_entry(entry_id, new_path, cx)
840 })
841 } else {
842 let client = self.client.clone();
843 let project_id = self.remote_id().unwrap();
844
845 Some(cx.spawn_weak(|_, mut cx| async move {
846 let response = client
847 .request(proto::RenameProjectEntry {
848 project_id,
849 entry_id: entry_id.to_proto(),
850 new_path: new_path.as_os_str().as_bytes().to_vec(),
851 })
852 .await?;
853 let entry = response
854 .entry
855 .ok_or_else(|| anyhow!("missing entry in response"))?;
856 worktree
857 .update(&mut cx, |worktree, cx| {
858 worktree.as_remote().unwrap().insert_entry(
859 entry,
860 response.worktree_scan_id as usize,
861 cx,
862 )
863 })
864 .await
865 }))
866 }
867 }
868
869 pub fn delete_entry(
870 &mut self,
871 entry_id: ProjectEntryId,
872 cx: &mut ModelContext<Self>,
873 ) -> Option<Task<Result<()>>> {
874 let worktree = self.worktree_for_entry(entry_id, cx)?;
875 if self.is_local() {
876 worktree.update(cx, |worktree, cx| {
877 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
878 })
879 } else {
880 let client = self.client.clone();
881 let project_id = self.remote_id().unwrap();
882 Some(cx.spawn_weak(|_, mut cx| async move {
883 let response = client
884 .request(proto::DeleteProjectEntry {
885 project_id,
886 entry_id: entry_id.to_proto(),
887 })
888 .await?;
889 worktree
890 .update(&mut cx, move |worktree, cx| {
891 worktree.as_remote().unwrap().delete_entry(
892 entry_id,
893 response.worktree_scan_id as usize,
894 cx,
895 )
896 })
897 .await
898 }))
899 }
900 }
901
902 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
903 let project_id;
904 if let ProjectClientState::Local {
905 remote_id_rx,
906 is_shared,
907 ..
908 } = &mut self.client_state
909 {
910 if *is_shared {
911 return Task::ready(Ok(()));
912 }
913 *is_shared = true;
914 if let Some(id) = *remote_id_rx.borrow() {
915 project_id = id;
916 } else {
917 return Task::ready(Err(anyhow!("project hasn't been registered")));
918 }
919 } else {
920 return Task::ready(Err(anyhow!("can't share a remote project")));
921 };
922
923 for open_buffer in self.opened_buffers.values_mut() {
924 match open_buffer {
925 OpenBuffer::Strong(_) => {}
926 OpenBuffer::Weak(buffer) => {
927 if let Some(buffer) = buffer.upgrade(cx) {
928 *open_buffer = OpenBuffer::Strong(buffer);
929 }
930 }
931 OpenBuffer::Loading(_) => unreachable!(),
932 }
933 }
934
935 for worktree_handle in self.worktrees.iter_mut() {
936 match worktree_handle {
937 WorktreeHandle::Strong(_) => {}
938 WorktreeHandle::Weak(worktree) => {
939 if let Some(worktree) = worktree.upgrade(cx) {
940 *worktree_handle = WorktreeHandle::Strong(worktree);
941 }
942 }
943 }
944 }
945
946 let mut tasks = Vec::new();
947 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
948 worktree.update(cx, |worktree, cx| {
949 let worktree = worktree.as_local_mut().unwrap();
950 tasks.push(worktree.share(project_id, cx));
951 });
952 }
953
954 cx.spawn(|this, mut cx| async move {
955 for task in tasks {
956 task.await?;
957 }
958 this.update(&mut cx, |_, cx| cx.notify());
959 Ok(())
960 })
961 }
962
963 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
964 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
965 if !*is_shared {
966 return;
967 }
968
969 *is_shared = false;
970 self.collaborators.clear();
971 self.shared_buffers.clear();
972 for worktree_handle in self.worktrees.iter_mut() {
973 if let WorktreeHandle::Strong(worktree) = worktree_handle {
974 let is_visible = worktree.update(cx, |worktree, _| {
975 worktree.as_local_mut().unwrap().unshare();
976 worktree.is_visible()
977 });
978 if !is_visible {
979 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
980 }
981 }
982 }
983
984 for open_buffer in self.opened_buffers.values_mut() {
985 match open_buffer {
986 OpenBuffer::Strong(buffer) => {
987 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
988 }
989 _ => {}
990 }
991 }
992
993 cx.notify();
994 } else {
995 log::error!("attempted to unshare a remote project");
996 }
997 }
998
999 pub fn respond_to_join_request(
1000 &mut self,
1001 requester_id: u64,
1002 allow: bool,
1003 cx: &mut ModelContext<Self>,
1004 ) {
1005 if let Some(project_id) = self.remote_id() {
1006 let share = self.share(cx);
1007 let client = self.client.clone();
1008 cx.foreground()
1009 .spawn(async move {
1010 share.await?;
1011 client.send(proto::RespondToJoinProjectRequest {
1012 requester_id,
1013 project_id,
1014 allow,
1015 })
1016 })
1017 .detach_and_log_err(cx);
1018 }
1019 }
1020
1021 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1022 if let ProjectClientState::Remote {
1023 sharing_has_stopped,
1024 ..
1025 } = &mut self.client_state
1026 {
1027 *sharing_has_stopped = true;
1028 self.collaborators.clear();
1029 cx.notify();
1030 }
1031 }
1032
1033 pub fn is_read_only(&self) -> bool {
1034 match &self.client_state {
1035 ProjectClientState::Local { .. } => false,
1036 ProjectClientState::Remote {
1037 sharing_has_stopped,
1038 ..
1039 } => *sharing_has_stopped,
1040 }
1041 }
1042
1043 pub fn is_local(&self) -> bool {
1044 match &self.client_state {
1045 ProjectClientState::Local { .. } => true,
1046 ProjectClientState::Remote { .. } => false,
1047 }
1048 }
1049
1050 pub fn is_remote(&self) -> bool {
1051 !self.is_local()
1052 }
1053
1054 pub fn create_buffer(
1055 &mut self,
1056 text: &str,
1057 language: Option<Arc<Language>>,
1058 cx: &mut ModelContext<Self>,
1059 ) -> Result<ModelHandle<Buffer>> {
1060 if self.is_remote() {
1061 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1062 }
1063
1064 let buffer = cx.add_model(|cx| {
1065 Buffer::new(self.replica_id(), text, cx)
1066 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1067 });
1068 self.register_buffer(&buffer, cx)?;
1069 Ok(buffer)
1070 }
1071
1072 pub fn open_path(
1073 &mut self,
1074 path: impl Into<ProjectPath>,
1075 cx: &mut ModelContext<Self>,
1076 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1077 let task = self.open_buffer(path, cx);
1078 cx.spawn_weak(|_, cx| async move {
1079 let buffer = task.await?;
1080 let project_entry_id = buffer
1081 .read_with(&cx, |buffer, cx| {
1082 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1083 })
1084 .ok_or_else(|| anyhow!("no project entry"))?;
1085 Ok((project_entry_id, buffer.into()))
1086 })
1087 }
1088
1089 pub fn open_local_buffer(
1090 &mut self,
1091 abs_path: impl AsRef<Path>,
1092 cx: &mut ModelContext<Self>,
1093 ) -> Task<Result<ModelHandle<Buffer>>> {
1094 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1095 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1096 } else {
1097 Task::ready(Err(anyhow!("no such path")))
1098 }
1099 }
1100
1101 pub fn open_buffer(
1102 &mut self,
1103 path: impl Into<ProjectPath>,
1104 cx: &mut ModelContext<Self>,
1105 ) -> Task<Result<ModelHandle<Buffer>>> {
1106 let project_path = path.into();
1107 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1108 worktree
1109 } else {
1110 return Task::ready(Err(anyhow!("no such worktree")));
1111 };
1112
1113 // If there is already a buffer for the given path, then return it.
1114 let existing_buffer = self.get_open_buffer(&project_path, cx);
1115 if let Some(existing_buffer) = existing_buffer {
1116 return Task::ready(Ok(existing_buffer));
1117 }
1118
1119 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1120 // If the given path is already being loaded, then wait for that existing
1121 // task to complete and return the same buffer.
1122 hash_map::Entry::Occupied(e) => e.get().clone(),
1123
1124 // Otherwise, record the fact that this path is now being loaded.
1125 hash_map::Entry::Vacant(entry) => {
1126 let (mut tx, rx) = postage::watch::channel();
1127 entry.insert(rx.clone());
1128
1129 let load_buffer = if worktree.read(cx).is_local() {
1130 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1131 } else {
1132 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1133 };
1134
1135 cx.spawn(move |this, mut cx| async move {
1136 let load_result = load_buffer.await;
1137 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1138 // Record the fact that the buffer is no longer loading.
1139 this.loading_buffers.remove(&project_path);
1140 let buffer = load_result.map_err(Arc::new)?;
1141 Ok(buffer)
1142 }));
1143 })
1144 .detach();
1145 rx
1146 }
1147 };
1148
1149 cx.foreground().spawn(async move {
1150 loop {
1151 if let Some(result) = loading_watch.borrow().as_ref() {
1152 match result {
1153 Ok(buffer) => return Ok(buffer.clone()),
1154 Err(error) => return Err(anyhow!("{}", error)),
1155 }
1156 }
1157 loading_watch.next().await;
1158 }
1159 })
1160 }
1161
1162 fn open_local_buffer_internal(
1163 &mut self,
1164 path: &Arc<Path>,
1165 worktree: &ModelHandle<Worktree>,
1166 cx: &mut ModelContext<Self>,
1167 ) -> Task<Result<ModelHandle<Buffer>>> {
1168 let load_buffer = worktree.update(cx, |worktree, cx| {
1169 let worktree = worktree.as_local_mut().unwrap();
1170 worktree.load_buffer(path, cx)
1171 });
1172 cx.spawn(|this, mut cx| async move {
1173 let buffer = load_buffer.await?;
1174 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1175 Ok(buffer)
1176 })
1177 }
1178
1179 fn open_remote_buffer_internal(
1180 &mut self,
1181 path: &Arc<Path>,
1182 worktree: &ModelHandle<Worktree>,
1183 cx: &mut ModelContext<Self>,
1184 ) -> Task<Result<ModelHandle<Buffer>>> {
1185 let rpc = self.client.clone();
1186 let project_id = self.remote_id().unwrap();
1187 let remote_worktree_id = worktree.read(cx).id();
1188 let path = path.clone();
1189 let path_string = path.to_string_lossy().to_string();
1190 cx.spawn(|this, mut cx| async move {
1191 let response = rpc
1192 .request(proto::OpenBufferByPath {
1193 project_id,
1194 worktree_id: remote_worktree_id.to_proto(),
1195 path: path_string,
1196 })
1197 .await?;
1198 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1199 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1200 .await
1201 })
1202 }
1203
1204 fn open_local_buffer_via_lsp(
1205 &mut self,
1206 abs_path: lsp::Url,
1207 lsp_adapter: Arc<dyn LspAdapter>,
1208 lsp_server: Arc<LanguageServer>,
1209 cx: &mut ModelContext<Self>,
1210 ) -> Task<Result<ModelHandle<Buffer>>> {
1211 cx.spawn(|this, mut cx| async move {
1212 let abs_path = abs_path
1213 .to_file_path()
1214 .map_err(|_| anyhow!("can't convert URI to path"))?;
1215 let (worktree, relative_path) = if let Some(result) =
1216 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1217 {
1218 result
1219 } else {
1220 let worktree = this
1221 .update(&mut cx, |this, cx| {
1222 this.create_local_worktree(&abs_path, false, cx)
1223 })
1224 .await?;
1225 this.update(&mut cx, |this, cx| {
1226 this.language_servers.insert(
1227 (worktree.read(cx).id(), lsp_adapter.name()),
1228 (lsp_adapter, lsp_server),
1229 );
1230 });
1231 (worktree, PathBuf::new())
1232 };
1233
1234 let project_path = ProjectPath {
1235 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1236 path: relative_path.into(),
1237 };
1238 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1239 .await
1240 })
1241 }
1242
1243 pub fn open_buffer_by_id(
1244 &mut self,
1245 id: u64,
1246 cx: &mut ModelContext<Self>,
1247 ) -> Task<Result<ModelHandle<Buffer>>> {
1248 if let Some(buffer) = self.buffer_for_id(id, cx) {
1249 Task::ready(Ok(buffer))
1250 } else if self.is_local() {
1251 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1252 } else if let Some(project_id) = self.remote_id() {
1253 let request = self
1254 .client
1255 .request(proto::OpenBufferById { project_id, id });
1256 cx.spawn(|this, mut cx| async move {
1257 let buffer = request
1258 .await?
1259 .buffer
1260 .ok_or_else(|| anyhow!("invalid buffer"))?;
1261 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1262 .await
1263 })
1264 } else {
1265 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1266 }
1267 }
1268
1269 pub fn save_buffer_as(
1270 &mut self,
1271 buffer: ModelHandle<Buffer>,
1272 abs_path: PathBuf,
1273 cx: &mut ModelContext<Project>,
1274 ) -> Task<Result<()>> {
1275 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1276 let old_path =
1277 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1278 cx.spawn(|this, mut cx| async move {
1279 if let Some(old_path) = old_path {
1280 this.update(&mut cx, |this, cx| {
1281 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1282 });
1283 }
1284 let (worktree, path) = worktree_task.await?;
1285 worktree
1286 .update(&mut cx, |worktree, cx| {
1287 worktree
1288 .as_local_mut()
1289 .unwrap()
1290 .save_buffer_as(buffer.clone(), path, cx)
1291 })
1292 .await?;
1293 this.update(&mut cx, |this, cx| {
1294 this.assign_language_to_buffer(&buffer, cx);
1295 this.register_buffer_with_language_server(&buffer, cx);
1296 });
1297 Ok(())
1298 })
1299 }
1300
1301 pub fn get_open_buffer(
1302 &mut self,
1303 path: &ProjectPath,
1304 cx: &mut ModelContext<Self>,
1305 ) -> Option<ModelHandle<Buffer>> {
1306 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1307 self.opened_buffers.values().find_map(|buffer| {
1308 let buffer = buffer.upgrade(cx)?;
1309 let file = File::from_dyn(buffer.read(cx).file())?;
1310 if file.worktree == worktree && file.path() == &path.path {
1311 Some(buffer)
1312 } else {
1313 None
1314 }
1315 })
1316 }
1317
1318 fn register_buffer(
1319 &mut self,
1320 buffer: &ModelHandle<Buffer>,
1321 cx: &mut ModelContext<Self>,
1322 ) -> Result<()> {
1323 let remote_id = buffer.read(cx).remote_id();
1324 let open_buffer = if self.is_remote() || self.is_shared() {
1325 OpenBuffer::Strong(buffer.clone())
1326 } else {
1327 OpenBuffer::Weak(buffer.downgrade())
1328 };
1329
1330 match self.opened_buffers.insert(remote_id, open_buffer) {
1331 None => {}
1332 Some(OpenBuffer::Loading(operations)) => {
1333 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1334 }
1335 Some(OpenBuffer::Weak(existing_handle)) => {
1336 if existing_handle.upgrade(cx).is_some() {
1337 Err(anyhow!(
1338 "already registered buffer with remote id {}",
1339 remote_id
1340 ))?
1341 }
1342 }
1343 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1344 "already registered buffer with remote id {}",
1345 remote_id
1346 ))?,
1347 }
1348 cx.subscribe(buffer, |this, buffer, event, cx| {
1349 this.on_buffer_event(buffer, event, cx);
1350 })
1351 .detach();
1352
1353 self.assign_language_to_buffer(buffer, cx);
1354 self.register_buffer_with_language_server(buffer, cx);
1355 cx.observe_release(buffer, |this, buffer, cx| {
1356 if let Some(file) = File::from_dyn(buffer.file()) {
1357 if file.is_local() {
1358 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1359 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1360 server
1361 .notify::<lsp::notification::DidCloseTextDocument>(
1362 lsp::DidCloseTextDocumentParams {
1363 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1364 },
1365 )
1366 .log_err();
1367 }
1368 }
1369 }
1370 })
1371 .detach();
1372
1373 Ok(())
1374 }
1375
1376 fn register_buffer_with_language_server(
1377 &mut self,
1378 buffer_handle: &ModelHandle<Buffer>,
1379 cx: &mut ModelContext<Self>,
1380 ) {
1381 let buffer = buffer_handle.read(cx);
1382 let buffer_id = buffer.remote_id();
1383 if let Some(file) = File::from_dyn(buffer.file()) {
1384 if file.is_local() {
1385 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1386 let initial_snapshot = buffer.text_snapshot();
1387
1388 let mut language_server = None;
1389 let mut language_id = None;
1390 if let Some(language) = buffer.language() {
1391 let worktree_id = file.worktree_id(cx);
1392 if let Some(adapter) = language.lsp_adapter() {
1393 language_id = adapter.id_for_language(language.name().as_ref());
1394 language_server = self
1395 .language_servers
1396 .get(&(worktree_id, adapter.name()))
1397 .cloned();
1398 }
1399 }
1400
1401 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1402 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1403 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1404 .log_err();
1405 }
1406 }
1407
1408 if let Some((_, server)) = language_server {
1409 server
1410 .notify::<lsp::notification::DidOpenTextDocument>(
1411 lsp::DidOpenTextDocumentParams {
1412 text_document: lsp::TextDocumentItem::new(
1413 uri,
1414 language_id.unwrap_or_default(),
1415 0,
1416 initial_snapshot.text(),
1417 ),
1418 }
1419 .clone(),
1420 )
1421 .log_err();
1422 buffer_handle.update(cx, |buffer, cx| {
1423 buffer.set_completion_triggers(
1424 server
1425 .capabilities()
1426 .completion_provider
1427 .as_ref()
1428 .and_then(|provider| provider.trigger_characters.clone())
1429 .unwrap_or(Vec::new()),
1430 cx,
1431 )
1432 });
1433 self.buffer_snapshots
1434 .insert(buffer_id, vec![(0, initial_snapshot)]);
1435 }
1436 }
1437 }
1438 }
1439
1440 fn unregister_buffer_from_language_server(
1441 &mut self,
1442 buffer: &ModelHandle<Buffer>,
1443 old_path: PathBuf,
1444 cx: &mut ModelContext<Self>,
1445 ) {
1446 buffer.update(cx, |buffer, cx| {
1447 buffer.update_diagnostics(Default::default(), cx);
1448 self.buffer_snapshots.remove(&buffer.remote_id());
1449 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1450 language_server
1451 .notify::<lsp::notification::DidCloseTextDocument>(
1452 lsp::DidCloseTextDocumentParams {
1453 text_document: lsp::TextDocumentIdentifier::new(
1454 lsp::Url::from_file_path(old_path).unwrap(),
1455 ),
1456 },
1457 )
1458 .log_err();
1459 }
1460 });
1461 }
1462
1463 fn on_buffer_event(
1464 &mut self,
1465 buffer: ModelHandle<Buffer>,
1466 event: &BufferEvent,
1467 cx: &mut ModelContext<Self>,
1468 ) -> Option<()> {
1469 match event {
1470 BufferEvent::Operation(operation) => {
1471 let project_id = self.remote_id()?;
1472 let request = self.client.request(proto::UpdateBuffer {
1473 project_id,
1474 buffer_id: buffer.read(cx).remote_id(),
1475 operations: vec![language::proto::serialize_operation(&operation)],
1476 });
1477 cx.background().spawn(request).detach_and_log_err(cx);
1478 }
1479 BufferEvent::Edited { .. } => {
1480 let (_, language_server) = self
1481 .language_server_for_buffer(buffer.read(cx), cx)?
1482 .clone();
1483 let buffer = buffer.read(cx);
1484 let file = File::from_dyn(buffer.file())?;
1485 let abs_path = file.as_local()?.abs_path(cx);
1486 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1487 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1488 let (version, prev_snapshot) = buffer_snapshots.last()?;
1489 let next_snapshot = buffer.text_snapshot();
1490 let next_version = version + 1;
1491
1492 let content_changes = buffer
1493 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1494 .map(|edit| {
1495 let edit_start = edit.new.start.0;
1496 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1497 let new_text = next_snapshot
1498 .text_for_range(edit.new.start.1..edit.new.end.1)
1499 .collect();
1500 lsp::TextDocumentContentChangeEvent {
1501 range: Some(lsp::Range::new(
1502 point_to_lsp(edit_start),
1503 point_to_lsp(edit_end),
1504 )),
1505 range_length: None,
1506 text: new_text,
1507 }
1508 })
1509 .collect();
1510
1511 buffer_snapshots.push((next_version, next_snapshot));
1512
1513 language_server
1514 .notify::<lsp::notification::DidChangeTextDocument>(
1515 lsp::DidChangeTextDocumentParams {
1516 text_document: lsp::VersionedTextDocumentIdentifier::new(
1517 uri,
1518 next_version,
1519 ),
1520 content_changes,
1521 },
1522 )
1523 .log_err();
1524 }
1525 BufferEvent::Saved => {
1526 let file = File::from_dyn(buffer.read(cx).file())?;
1527 let worktree_id = file.worktree_id(cx);
1528 let abs_path = file.as_local()?.abs_path(cx);
1529 let text_document = lsp::TextDocumentIdentifier {
1530 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1531 };
1532
1533 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1534 server
1535 .notify::<lsp::notification::DidSaveTextDocument>(
1536 lsp::DidSaveTextDocumentParams {
1537 text_document: text_document.clone(),
1538 text: None,
1539 },
1540 )
1541 .log_err();
1542 }
1543 }
1544 _ => {}
1545 }
1546
1547 None
1548 }
1549
1550 fn language_servers_for_worktree(
1551 &self,
1552 worktree_id: WorktreeId,
1553 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1554 self.language_servers.iter().filter_map(
1555 move |((language_server_worktree_id, _), server)| {
1556 if *language_server_worktree_id == worktree_id {
1557 Some(server)
1558 } else {
1559 None
1560 }
1561 },
1562 )
1563 }
1564
1565 fn assign_language_to_buffer(
1566 &mut self,
1567 buffer: &ModelHandle<Buffer>,
1568 cx: &mut ModelContext<Self>,
1569 ) -> Option<()> {
1570 // If the buffer has a language, set it and start the language server if we haven't already.
1571 let full_path = buffer.read(cx).file()?.full_path(cx);
1572 let language = self.languages.select_language(&full_path)?;
1573 buffer.update(cx, |buffer, cx| {
1574 buffer.set_language(Some(language.clone()), cx);
1575 });
1576
1577 let file = File::from_dyn(buffer.read(cx).file())?;
1578 let worktree = file.worktree.read(cx).as_local()?;
1579 let worktree_id = worktree.id();
1580 let worktree_abs_path = worktree.abs_path().clone();
1581 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1582
1583 None
1584 }
1585
1586 fn start_language_server(
1587 &mut self,
1588 worktree_id: WorktreeId,
1589 worktree_path: Arc<Path>,
1590 language: Arc<Language>,
1591 cx: &mut ModelContext<Self>,
1592 ) {
1593 let adapter = if let Some(adapter) = language.lsp_adapter() {
1594 adapter
1595 } else {
1596 return;
1597 };
1598 let key = (worktree_id, adapter.name());
1599 self.started_language_servers
1600 .entry(key.clone())
1601 .or_insert_with(|| {
1602 let server_id = post_inc(&mut self.next_language_server_id);
1603 let language_server = self.languages.start_language_server(
1604 server_id,
1605 language.clone(),
1606 worktree_path,
1607 self.client.http_client(),
1608 cx,
1609 );
1610 cx.spawn_weak(|this, mut cx| async move {
1611 let language_server = language_server?.await.log_err()?;
1612 let language_server = language_server
1613 .initialize(adapter.initialization_options())
1614 .await
1615 .log_err()?;
1616 let this = this.upgrade(&cx)?;
1617 let disk_based_diagnostics_progress_token =
1618 adapter.disk_based_diagnostics_progress_token();
1619
1620 language_server
1621 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1622 let this = this.downgrade();
1623 let adapter = adapter.clone();
1624 move |params, mut cx| {
1625 if let Some(this) = this.upgrade(&cx) {
1626 this.update(&mut cx, |this, cx| {
1627 this.on_lsp_diagnostics_published(
1628 server_id,
1629 params,
1630 &adapter,
1631 disk_based_diagnostics_progress_token,
1632 cx,
1633 );
1634 });
1635 }
1636 }
1637 })
1638 .detach();
1639
1640 language_server
1641 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1642 let settings = this
1643 .read_with(&cx, |this, _| this.language_server_settings.clone());
1644 move |params, _| {
1645 let settings = settings.lock().clone();
1646 async move {
1647 Ok(params
1648 .items
1649 .into_iter()
1650 .map(|item| {
1651 if let Some(section) = &item.section {
1652 settings
1653 .get(section)
1654 .cloned()
1655 .unwrap_or(serde_json::Value::Null)
1656 } else {
1657 settings.clone()
1658 }
1659 })
1660 .collect())
1661 }
1662 }
1663 })
1664 .detach();
1665
1666 language_server
1667 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1668 let this = this.downgrade();
1669 let adapter = adapter.clone();
1670 let language_server = language_server.clone();
1671 move |params, cx| {
1672 Self::on_lsp_workspace_edit(
1673 this,
1674 params,
1675 server_id,
1676 adapter.clone(),
1677 language_server.clone(),
1678 cx,
1679 )
1680 }
1681 })
1682 .detach();
1683
1684 language_server
1685 .on_notification::<lsp::notification::Progress, _>({
1686 let this = this.downgrade();
1687 move |params, mut cx| {
1688 if let Some(this) = this.upgrade(&cx) {
1689 this.update(&mut cx, |this, cx| {
1690 this.on_lsp_progress(
1691 params,
1692 server_id,
1693 disk_based_diagnostics_progress_token,
1694 cx,
1695 );
1696 });
1697 }
1698 }
1699 })
1700 .detach();
1701
1702 this.update(&mut cx, |this, cx| {
1703 this.language_servers
1704 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1705 this.language_server_statuses.insert(
1706 server_id,
1707 LanguageServerStatus {
1708 name: language_server.name().to_string(),
1709 pending_work: Default::default(),
1710 pending_diagnostic_updates: 0,
1711 },
1712 );
1713 language_server
1714 .notify::<lsp::notification::DidChangeConfiguration>(
1715 lsp::DidChangeConfigurationParams {
1716 settings: this.language_server_settings.lock().clone(),
1717 },
1718 )
1719 .ok();
1720
1721 if let Some(project_id) = this.remote_id() {
1722 this.client
1723 .send(proto::StartLanguageServer {
1724 project_id,
1725 server: Some(proto::LanguageServer {
1726 id: server_id as u64,
1727 name: language_server.name().to_string(),
1728 }),
1729 })
1730 .log_err();
1731 }
1732
1733 // Tell the language server about every open buffer in the worktree that matches the language.
1734 for buffer in this.opened_buffers.values() {
1735 if let Some(buffer_handle) = buffer.upgrade(cx) {
1736 let buffer = buffer_handle.read(cx);
1737 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1738 file
1739 } else {
1740 continue;
1741 };
1742 let language = if let Some(language) = buffer.language() {
1743 language
1744 } else {
1745 continue;
1746 };
1747 if file.worktree.read(cx).id() != key.0
1748 || language.lsp_adapter().map(|a| a.name())
1749 != Some(key.1.clone())
1750 {
1751 continue;
1752 }
1753
1754 let file = file.as_local()?;
1755 let versions = this
1756 .buffer_snapshots
1757 .entry(buffer.remote_id())
1758 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1759 let (version, initial_snapshot) = versions.last().unwrap();
1760 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1761 let language_id = adapter.id_for_language(language.name().as_ref());
1762 language_server
1763 .notify::<lsp::notification::DidOpenTextDocument>(
1764 lsp::DidOpenTextDocumentParams {
1765 text_document: lsp::TextDocumentItem::new(
1766 uri,
1767 language_id.unwrap_or_default(),
1768 *version,
1769 initial_snapshot.text(),
1770 ),
1771 },
1772 )
1773 .log_err()?;
1774 buffer_handle.update(cx, |buffer, cx| {
1775 buffer.set_completion_triggers(
1776 language_server
1777 .capabilities()
1778 .completion_provider
1779 .as_ref()
1780 .and_then(|provider| {
1781 provider.trigger_characters.clone()
1782 })
1783 .unwrap_or(Vec::new()),
1784 cx,
1785 )
1786 });
1787 }
1788 }
1789
1790 cx.notify();
1791 Some(())
1792 });
1793
1794 Some(language_server)
1795 })
1796 });
1797 }
1798
1799 pub fn restart_language_servers_for_buffers(
1800 &mut self,
1801 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1802 cx: &mut ModelContext<Self>,
1803 ) -> Option<()> {
1804 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1805 .into_iter()
1806 .filter_map(|buffer| {
1807 let file = File::from_dyn(buffer.read(cx).file())?;
1808 let worktree = file.worktree.read(cx).as_local()?;
1809 let worktree_id = worktree.id();
1810 let worktree_abs_path = worktree.abs_path().clone();
1811 let full_path = file.full_path(cx);
1812 Some((worktree_id, worktree_abs_path, full_path))
1813 })
1814 .collect();
1815 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1816 let language = self.languages.select_language(&full_path)?;
1817 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1818 }
1819
1820 None
1821 }
1822
1823 fn restart_language_server(
1824 &mut self,
1825 worktree_id: WorktreeId,
1826 worktree_path: Arc<Path>,
1827 language: Arc<Language>,
1828 cx: &mut ModelContext<Self>,
1829 ) {
1830 let adapter = if let Some(adapter) = language.lsp_adapter() {
1831 adapter
1832 } else {
1833 return;
1834 };
1835 let key = (worktree_id, adapter.name());
1836 let server_to_shutdown = self.language_servers.remove(&key);
1837 self.started_language_servers.remove(&key);
1838 server_to_shutdown
1839 .as_ref()
1840 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1841 cx.spawn_weak(|this, mut cx| async move {
1842 if let Some(this) = this.upgrade(&cx) {
1843 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1844 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1845 shutdown_task.await;
1846 }
1847 }
1848
1849 this.update(&mut cx, |this, cx| {
1850 this.start_language_server(worktree_id, worktree_path, language, cx);
1851 });
1852 }
1853 })
1854 .detach();
1855 }
1856
1857 fn on_lsp_diagnostics_published(
1858 &mut self,
1859 server_id: usize,
1860 mut params: lsp::PublishDiagnosticsParams,
1861 adapter: &Arc<dyn LspAdapter>,
1862 disk_based_diagnostics_progress_token: Option<&str>,
1863 cx: &mut ModelContext<Self>,
1864 ) {
1865 adapter.process_diagnostics(&mut params);
1866 if disk_based_diagnostics_progress_token.is_none() {
1867 self.disk_based_diagnostics_started(cx);
1868 self.broadcast_language_server_update(
1869 server_id,
1870 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1871 proto::LspDiskBasedDiagnosticsUpdating {},
1872 ),
1873 );
1874 }
1875 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1876 .log_err();
1877 if disk_based_diagnostics_progress_token.is_none() {
1878 self.disk_based_diagnostics_finished(cx);
1879 self.broadcast_language_server_update(
1880 server_id,
1881 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1882 proto::LspDiskBasedDiagnosticsUpdated {},
1883 ),
1884 );
1885 }
1886 }
1887
1888 fn on_lsp_progress(
1889 &mut self,
1890 progress: lsp::ProgressParams,
1891 server_id: usize,
1892 disk_based_diagnostics_progress_token: Option<&str>,
1893 cx: &mut ModelContext<Self>,
1894 ) {
1895 let token = match progress.token {
1896 lsp::NumberOrString::String(token) => token,
1897 lsp::NumberOrString::Number(token) => {
1898 log::info!("skipping numeric progress token {}", token);
1899 return;
1900 }
1901 };
1902 let progress = match progress.value {
1903 lsp::ProgressParamsValue::WorkDone(value) => value,
1904 };
1905 let language_server_status =
1906 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1907 status
1908 } else {
1909 return;
1910 };
1911 match progress {
1912 lsp::WorkDoneProgress::Begin(_) => {
1913 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1914 language_server_status.pending_diagnostic_updates += 1;
1915 if language_server_status.pending_diagnostic_updates == 1 {
1916 self.disk_based_diagnostics_started(cx);
1917 self.broadcast_language_server_update(
1918 server_id,
1919 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1920 proto::LspDiskBasedDiagnosticsUpdating {},
1921 ),
1922 );
1923 }
1924 } else {
1925 self.on_lsp_work_start(server_id, token.clone(), cx);
1926 self.broadcast_language_server_update(
1927 server_id,
1928 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1929 token,
1930 }),
1931 );
1932 }
1933 }
1934 lsp::WorkDoneProgress::Report(report) => {
1935 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1936 self.on_lsp_work_progress(
1937 server_id,
1938 token.clone(),
1939 LanguageServerProgress {
1940 message: report.message.clone(),
1941 percentage: report.percentage.map(|p| p as usize),
1942 last_update_at: Instant::now(),
1943 },
1944 cx,
1945 );
1946 self.broadcast_language_server_update(
1947 server_id,
1948 proto::update_language_server::Variant::WorkProgress(
1949 proto::LspWorkProgress {
1950 token,
1951 message: report.message,
1952 percentage: report.percentage.map(|p| p as u32),
1953 },
1954 ),
1955 );
1956 }
1957 }
1958 lsp::WorkDoneProgress::End(_) => {
1959 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1960 language_server_status.pending_diagnostic_updates -= 1;
1961 if language_server_status.pending_diagnostic_updates == 0 {
1962 self.disk_based_diagnostics_finished(cx);
1963 self.broadcast_language_server_update(
1964 server_id,
1965 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1966 proto::LspDiskBasedDiagnosticsUpdated {},
1967 ),
1968 );
1969 }
1970 } else {
1971 self.on_lsp_work_end(server_id, token.clone(), cx);
1972 self.broadcast_language_server_update(
1973 server_id,
1974 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1975 token,
1976 }),
1977 );
1978 }
1979 }
1980 }
1981 }
1982
1983 fn on_lsp_work_start(
1984 &mut self,
1985 language_server_id: usize,
1986 token: String,
1987 cx: &mut ModelContext<Self>,
1988 ) {
1989 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1990 status.pending_work.insert(
1991 token,
1992 LanguageServerProgress {
1993 message: None,
1994 percentage: None,
1995 last_update_at: Instant::now(),
1996 },
1997 );
1998 cx.notify();
1999 }
2000 }
2001
2002 fn on_lsp_work_progress(
2003 &mut self,
2004 language_server_id: usize,
2005 token: String,
2006 progress: LanguageServerProgress,
2007 cx: &mut ModelContext<Self>,
2008 ) {
2009 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2010 status.pending_work.insert(token, progress);
2011 cx.notify();
2012 }
2013 }
2014
2015 fn on_lsp_work_end(
2016 &mut self,
2017 language_server_id: usize,
2018 token: String,
2019 cx: &mut ModelContext<Self>,
2020 ) {
2021 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2022 status.pending_work.remove(&token);
2023 cx.notify();
2024 }
2025 }
2026
2027 async fn on_lsp_workspace_edit(
2028 this: WeakModelHandle<Self>,
2029 params: lsp::ApplyWorkspaceEditParams,
2030 server_id: usize,
2031 adapter: Arc<dyn LspAdapter>,
2032 language_server: Arc<LanguageServer>,
2033 mut cx: AsyncAppContext,
2034 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2035 let this = this
2036 .upgrade(&cx)
2037 .ok_or_else(|| anyhow!("project project closed"))?;
2038 let transaction = Self::deserialize_workspace_edit(
2039 this.clone(),
2040 params.edit,
2041 true,
2042 adapter.clone(),
2043 language_server.clone(),
2044 &mut cx,
2045 )
2046 .await
2047 .log_err();
2048 this.update(&mut cx, |this, _| {
2049 if let Some(transaction) = transaction {
2050 this.last_workspace_edits_by_language_server
2051 .insert(server_id, transaction);
2052 }
2053 });
2054 Ok(lsp::ApplyWorkspaceEditResponse {
2055 applied: true,
2056 failed_change: None,
2057 failure_reason: None,
2058 })
2059 }
2060
2061 fn broadcast_language_server_update(
2062 &self,
2063 language_server_id: usize,
2064 event: proto::update_language_server::Variant,
2065 ) {
2066 if let Some(project_id) = self.remote_id() {
2067 self.client
2068 .send(proto::UpdateLanguageServer {
2069 project_id,
2070 language_server_id: language_server_id as u64,
2071 variant: Some(event),
2072 })
2073 .log_err();
2074 }
2075 }
2076
2077 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2078 for (_, server) in self.language_servers.values() {
2079 server
2080 .notify::<lsp::notification::DidChangeConfiguration>(
2081 lsp::DidChangeConfigurationParams {
2082 settings: settings.clone(),
2083 },
2084 )
2085 .ok();
2086 }
2087 *self.language_server_settings.lock() = settings;
2088 }
2089
2090 pub fn language_server_statuses(
2091 &self,
2092 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2093 self.language_server_statuses.values()
2094 }
2095
2096 pub fn update_diagnostics(
2097 &mut self,
2098 params: lsp::PublishDiagnosticsParams,
2099 disk_based_sources: &[&str],
2100 cx: &mut ModelContext<Self>,
2101 ) -> Result<()> {
2102 let abs_path = params
2103 .uri
2104 .to_file_path()
2105 .map_err(|_| anyhow!("URI is not a file"))?;
2106 let mut next_group_id = 0;
2107 let mut diagnostics = Vec::default();
2108 let mut primary_diagnostic_group_ids = HashMap::default();
2109 let mut sources_by_group_id = HashMap::default();
2110 let mut supporting_diagnostics = HashMap::default();
2111 for diagnostic in ¶ms.diagnostics {
2112 let source = diagnostic.source.as_ref();
2113 let code = diagnostic.code.as_ref().map(|code| match code {
2114 lsp::NumberOrString::Number(code) => code.to_string(),
2115 lsp::NumberOrString::String(code) => code.clone(),
2116 });
2117 let range = range_from_lsp(diagnostic.range);
2118 let is_supporting = diagnostic
2119 .related_information
2120 .as_ref()
2121 .map_or(false, |infos| {
2122 infos.iter().any(|info| {
2123 primary_diagnostic_group_ids.contains_key(&(
2124 source,
2125 code.clone(),
2126 range_from_lsp(info.location.range),
2127 ))
2128 })
2129 });
2130
2131 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2132 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2133 });
2134
2135 if is_supporting {
2136 supporting_diagnostics.insert(
2137 (source, code.clone(), range),
2138 (diagnostic.severity, is_unnecessary),
2139 );
2140 } else {
2141 let group_id = post_inc(&mut next_group_id);
2142 let is_disk_based = source.map_or(false, |source| {
2143 disk_based_sources.contains(&source.as_str())
2144 });
2145
2146 sources_by_group_id.insert(group_id, source);
2147 primary_diagnostic_group_ids
2148 .insert((source, code.clone(), range.clone()), group_id);
2149
2150 diagnostics.push(DiagnosticEntry {
2151 range,
2152 diagnostic: Diagnostic {
2153 code: code.clone(),
2154 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2155 message: diagnostic.message.clone(),
2156 group_id,
2157 is_primary: true,
2158 is_valid: true,
2159 is_disk_based,
2160 is_unnecessary,
2161 },
2162 });
2163 if let Some(infos) = &diagnostic.related_information {
2164 for info in infos {
2165 if info.location.uri == params.uri && !info.message.is_empty() {
2166 let range = range_from_lsp(info.location.range);
2167 diagnostics.push(DiagnosticEntry {
2168 range,
2169 diagnostic: Diagnostic {
2170 code: code.clone(),
2171 severity: DiagnosticSeverity::INFORMATION,
2172 message: info.message.clone(),
2173 group_id,
2174 is_primary: false,
2175 is_valid: true,
2176 is_disk_based,
2177 is_unnecessary: false,
2178 },
2179 });
2180 }
2181 }
2182 }
2183 }
2184 }
2185
2186 for entry in &mut diagnostics {
2187 let diagnostic = &mut entry.diagnostic;
2188 if !diagnostic.is_primary {
2189 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2190 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2191 source,
2192 diagnostic.code.clone(),
2193 entry.range.clone(),
2194 )) {
2195 if let Some(severity) = severity {
2196 diagnostic.severity = severity;
2197 }
2198 diagnostic.is_unnecessary = is_unnecessary;
2199 }
2200 }
2201 }
2202
2203 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2204 Ok(())
2205 }
2206
2207 pub fn update_diagnostic_entries(
2208 &mut self,
2209 abs_path: PathBuf,
2210 version: Option<i32>,
2211 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2212 cx: &mut ModelContext<Project>,
2213 ) -> Result<(), anyhow::Error> {
2214 let (worktree, relative_path) = self
2215 .find_local_worktree(&abs_path, cx)
2216 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2217 if !worktree.read(cx).is_visible() {
2218 return Ok(());
2219 }
2220
2221 let project_path = ProjectPath {
2222 worktree_id: worktree.read(cx).id(),
2223 path: relative_path.into(),
2224 };
2225 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2226 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2227 }
2228
2229 let updated = worktree.update(cx, |worktree, cx| {
2230 worktree
2231 .as_local_mut()
2232 .ok_or_else(|| anyhow!("not a local worktree"))?
2233 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2234 })?;
2235 if updated {
2236 cx.emit(Event::DiagnosticsUpdated(project_path));
2237 }
2238 Ok(())
2239 }
2240
2241 fn update_buffer_diagnostics(
2242 &mut self,
2243 buffer: &ModelHandle<Buffer>,
2244 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2245 version: Option<i32>,
2246 cx: &mut ModelContext<Self>,
2247 ) -> Result<()> {
2248 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2249 Ordering::Equal
2250 .then_with(|| b.is_primary.cmp(&a.is_primary))
2251 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2252 .then_with(|| a.severity.cmp(&b.severity))
2253 .then_with(|| a.message.cmp(&b.message))
2254 }
2255
2256 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2257
2258 diagnostics.sort_unstable_by(|a, b| {
2259 Ordering::Equal
2260 .then_with(|| a.range.start.cmp(&b.range.start))
2261 .then_with(|| b.range.end.cmp(&a.range.end))
2262 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2263 });
2264
2265 let mut sanitized_diagnostics = Vec::new();
2266 let edits_since_save = Patch::new(
2267 snapshot
2268 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2269 .collect(),
2270 );
2271 for entry in diagnostics {
2272 let start;
2273 let end;
2274 if entry.diagnostic.is_disk_based {
2275 // Some diagnostics are based on files on disk instead of buffers'
2276 // current contents. Adjust these diagnostics' ranges to reflect
2277 // any unsaved edits.
2278 start = edits_since_save.old_to_new(entry.range.start);
2279 end = edits_since_save.old_to_new(entry.range.end);
2280 } else {
2281 start = entry.range.start;
2282 end = entry.range.end;
2283 }
2284
2285 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2286 ..snapshot.clip_point_utf16(end, Bias::Right);
2287
2288 // Expand empty ranges by one character
2289 if range.start == range.end {
2290 range.end.column += 1;
2291 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2292 if range.start == range.end && range.end.column > 0 {
2293 range.start.column -= 1;
2294 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2295 }
2296 }
2297
2298 sanitized_diagnostics.push(DiagnosticEntry {
2299 range,
2300 diagnostic: entry.diagnostic,
2301 });
2302 }
2303 drop(edits_since_save);
2304
2305 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2306 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2307 Ok(())
2308 }
2309
2310 pub fn reload_buffers(
2311 &self,
2312 buffers: HashSet<ModelHandle<Buffer>>,
2313 push_to_history: bool,
2314 cx: &mut ModelContext<Self>,
2315 ) -> Task<Result<ProjectTransaction>> {
2316 let mut local_buffers = Vec::new();
2317 let mut remote_buffers = None;
2318 for buffer_handle in buffers {
2319 let buffer = buffer_handle.read(cx);
2320 if buffer.is_dirty() {
2321 if let Some(file) = File::from_dyn(buffer.file()) {
2322 if file.is_local() {
2323 local_buffers.push(buffer_handle);
2324 } else {
2325 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2326 }
2327 }
2328 }
2329 }
2330
2331 let remote_buffers = self.remote_id().zip(remote_buffers);
2332 let client = self.client.clone();
2333
2334 cx.spawn(|this, mut cx| async move {
2335 let mut project_transaction = ProjectTransaction::default();
2336
2337 if let Some((project_id, remote_buffers)) = remote_buffers {
2338 let response = client
2339 .request(proto::ReloadBuffers {
2340 project_id,
2341 buffer_ids: remote_buffers
2342 .iter()
2343 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2344 .collect(),
2345 })
2346 .await?
2347 .transaction
2348 .ok_or_else(|| anyhow!("missing transaction"))?;
2349 project_transaction = this
2350 .update(&mut cx, |this, cx| {
2351 this.deserialize_project_transaction(response, push_to_history, cx)
2352 })
2353 .await?;
2354 }
2355
2356 for buffer in local_buffers {
2357 let transaction = buffer
2358 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2359 .await?;
2360 buffer.update(&mut cx, |buffer, cx| {
2361 if let Some(transaction) = transaction {
2362 if !push_to_history {
2363 buffer.forget_transaction(transaction.id);
2364 }
2365 project_transaction.0.insert(cx.handle(), transaction);
2366 }
2367 });
2368 }
2369
2370 Ok(project_transaction)
2371 })
2372 }
2373
2374 pub fn format(
2375 &self,
2376 buffers: HashSet<ModelHandle<Buffer>>,
2377 push_to_history: bool,
2378 cx: &mut ModelContext<Project>,
2379 ) -> Task<Result<ProjectTransaction>> {
2380 let mut local_buffers = Vec::new();
2381 let mut remote_buffers = None;
2382 for buffer_handle in buffers {
2383 let buffer = buffer_handle.read(cx);
2384 if let Some(file) = File::from_dyn(buffer.file()) {
2385 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2386 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2387 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2388 }
2389 } else {
2390 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2391 }
2392 } else {
2393 return Task::ready(Ok(Default::default()));
2394 }
2395 }
2396
2397 let remote_buffers = self.remote_id().zip(remote_buffers);
2398 let client = self.client.clone();
2399
2400 cx.spawn(|this, mut cx| async move {
2401 let mut project_transaction = ProjectTransaction::default();
2402
2403 if let Some((project_id, remote_buffers)) = remote_buffers {
2404 let response = client
2405 .request(proto::FormatBuffers {
2406 project_id,
2407 buffer_ids: remote_buffers
2408 .iter()
2409 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2410 .collect(),
2411 })
2412 .await?
2413 .transaction
2414 .ok_or_else(|| anyhow!("missing transaction"))?;
2415 project_transaction = this
2416 .update(&mut cx, |this, cx| {
2417 this.deserialize_project_transaction(response, push_to_history, cx)
2418 })
2419 .await?;
2420 }
2421
2422 for (buffer, buffer_abs_path, language_server) in local_buffers {
2423 let text_document = lsp::TextDocumentIdentifier::new(
2424 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2425 );
2426 let capabilities = &language_server.capabilities();
2427 let tab_size = cx.update(|cx| {
2428 let language_name = buffer.read(cx).language().map(|language| language.name());
2429 cx.global::<Settings>().tab_size(language_name.as_deref())
2430 });
2431 let lsp_edits = if capabilities
2432 .document_formatting_provider
2433 .as_ref()
2434 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2435 {
2436 language_server
2437 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2438 text_document,
2439 options: lsp::FormattingOptions {
2440 tab_size,
2441 insert_spaces: true,
2442 insert_final_newline: Some(true),
2443 ..Default::default()
2444 },
2445 work_done_progress_params: Default::default(),
2446 })
2447 .await?
2448 } else if capabilities
2449 .document_range_formatting_provider
2450 .as_ref()
2451 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2452 {
2453 let buffer_start = lsp::Position::new(0, 0);
2454 let buffer_end =
2455 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2456 language_server
2457 .request::<lsp::request::RangeFormatting>(
2458 lsp::DocumentRangeFormattingParams {
2459 text_document,
2460 range: lsp::Range::new(buffer_start, buffer_end),
2461 options: lsp::FormattingOptions {
2462 tab_size: 4,
2463 insert_spaces: true,
2464 insert_final_newline: Some(true),
2465 ..Default::default()
2466 },
2467 work_done_progress_params: Default::default(),
2468 },
2469 )
2470 .await?
2471 } else {
2472 continue;
2473 };
2474
2475 if let Some(lsp_edits) = lsp_edits {
2476 let edits = this
2477 .update(&mut cx, |this, cx| {
2478 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2479 })
2480 .await?;
2481 buffer.update(&mut cx, |buffer, cx| {
2482 buffer.finalize_last_transaction();
2483 buffer.start_transaction();
2484 for (range, text) in edits {
2485 buffer.edit([(range, text)], cx);
2486 }
2487 if buffer.end_transaction(cx).is_some() {
2488 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2489 if !push_to_history {
2490 buffer.forget_transaction(transaction.id);
2491 }
2492 project_transaction.0.insert(cx.handle(), transaction);
2493 }
2494 });
2495 }
2496 }
2497
2498 Ok(project_transaction)
2499 })
2500 }
2501
2502 pub fn definition<T: ToPointUtf16>(
2503 &self,
2504 buffer: &ModelHandle<Buffer>,
2505 position: T,
2506 cx: &mut ModelContext<Self>,
2507 ) -> Task<Result<Vec<Location>>> {
2508 let position = position.to_point_utf16(buffer.read(cx));
2509 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2510 }
2511
2512 pub fn references<T: ToPointUtf16>(
2513 &self,
2514 buffer: &ModelHandle<Buffer>,
2515 position: T,
2516 cx: &mut ModelContext<Self>,
2517 ) -> Task<Result<Vec<Location>>> {
2518 let position = position.to_point_utf16(buffer.read(cx));
2519 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2520 }
2521
2522 pub fn document_highlights<T: ToPointUtf16>(
2523 &self,
2524 buffer: &ModelHandle<Buffer>,
2525 position: T,
2526 cx: &mut ModelContext<Self>,
2527 ) -> Task<Result<Vec<DocumentHighlight>>> {
2528 let position = position.to_point_utf16(buffer.read(cx));
2529
2530 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2531 }
2532
2533 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2534 if self.is_local() {
2535 let mut requests = Vec::new();
2536 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2537 let worktree_id = *worktree_id;
2538 if let Some(worktree) = self
2539 .worktree_for_id(worktree_id, cx)
2540 .and_then(|worktree| worktree.read(cx).as_local())
2541 {
2542 let lsp_adapter = lsp_adapter.clone();
2543 let worktree_abs_path = worktree.abs_path().clone();
2544 requests.push(
2545 language_server
2546 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2547 query: query.to_string(),
2548 ..Default::default()
2549 })
2550 .log_err()
2551 .map(move |response| {
2552 (
2553 lsp_adapter,
2554 worktree_id,
2555 worktree_abs_path,
2556 response.unwrap_or_default(),
2557 )
2558 }),
2559 );
2560 }
2561 }
2562
2563 cx.spawn_weak(|this, cx| async move {
2564 let responses = futures::future::join_all(requests).await;
2565 let this = if let Some(this) = this.upgrade(&cx) {
2566 this
2567 } else {
2568 return Ok(Default::default());
2569 };
2570 this.read_with(&cx, |this, cx| {
2571 let mut symbols = Vec::new();
2572 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2573 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2574 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2575 let mut worktree_id = source_worktree_id;
2576 let path;
2577 if let Some((worktree, rel_path)) =
2578 this.find_local_worktree(&abs_path, cx)
2579 {
2580 worktree_id = worktree.read(cx).id();
2581 path = rel_path;
2582 } else {
2583 path = relativize_path(&worktree_abs_path, &abs_path);
2584 }
2585
2586 let label = this
2587 .languages
2588 .select_language(&path)
2589 .and_then(|language| {
2590 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2591 })
2592 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2593 let signature = this.symbol_signature(worktree_id, &path);
2594
2595 Some(Symbol {
2596 source_worktree_id,
2597 worktree_id,
2598 language_server_name: adapter.name(),
2599 name: lsp_symbol.name,
2600 kind: lsp_symbol.kind,
2601 label,
2602 path,
2603 range: range_from_lsp(lsp_symbol.location.range),
2604 signature,
2605 })
2606 }));
2607 }
2608 Ok(symbols)
2609 })
2610 })
2611 } else if let Some(project_id) = self.remote_id() {
2612 let request = self.client.request(proto::GetProjectSymbols {
2613 project_id,
2614 query: query.to_string(),
2615 });
2616 cx.spawn_weak(|this, cx| async move {
2617 let response = request.await?;
2618 let mut symbols = Vec::new();
2619 if let Some(this) = this.upgrade(&cx) {
2620 this.read_with(&cx, |this, _| {
2621 symbols.extend(
2622 response
2623 .symbols
2624 .into_iter()
2625 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2626 );
2627 })
2628 }
2629 Ok(symbols)
2630 })
2631 } else {
2632 Task::ready(Ok(Default::default()))
2633 }
2634 }
2635
2636 pub fn open_buffer_for_symbol(
2637 &mut self,
2638 symbol: &Symbol,
2639 cx: &mut ModelContext<Self>,
2640 ) -> Task<Result<ModelHandle<Buffer>>> {
2641 if self.is_local() {
2642 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2643 symbol.source_worktree_id,
2644 symbol.language_server_name.clone(),
2645 )) {
2646 server.clone()
2647 } else {
2648 return Task::ready(Err(anyhow!(
2649 "language server for worktree and language not found"
2650 )));
2651 };
2652
2653 let worktree_abs_path = if let Some(worktree_abs_path) = self
2654 .worktree_for_id(symbol.worktree_id, cx)
2655 .and_then(|worktree| worktree.read(cx).as_local())
2656 .map(|local_worktree| local_worktree.abs_path())
2657 {
2658 worktree_abs_path
2659 } else {
2660 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2661 };
2662 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2663 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2664 uri
2665 } else {
2666 return Task::ready(Err(anyhow!("invalid symbol path")));
2667 };
2668
2669 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2670 } else if let Some(project_id) = self.remote_id() {
2671 let request = self.client.request(proto::OpenBufferForSymbol {
2672 project_id,
2673 symbol: Some(serialize_symbol(symbol)),
2674 });
2675 cx.spawn(|this, mut cx| async move {
2676 let response = request.await?;
2677 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2678 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2679 .await
2680 })
2681 } else {
2682 Task::ready(Err(anyhow!("project does not have a remote id")))
2683 }
2684 }
2685
2686 pub fn completions<T: ToPointUtf16>(
2687 &self,
2688 source_buffer_handle: &ModelHandle<Buffer>,
2689 position: T,
2690 cx: &mut ModelContext<Self>,
2691 ) -> Task<Result<Vec<Completion>>> {
2692 let source_buffer_handle = source_buffer_handle.clone();
2693 let source_buffer = source_buffer_handle.read(cx);
2694 let buffer_id = source_buffer.remote_id();
2695 let language = source_buffer.language().cloned();
2696 let worktree;
2697 let buffer_abs_path;
2698 if let Some(file) = File::from_dyn(source_buffer.file()) {
2699 worktree = file.worktree.clone();
2700 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2701 } else {
2702 return Task::ready(Ok(Default::default()));
2703 };
2704
2705 let position = position.to_point_utf16(source_buffer);
2706 let anchor = source_buffer.anchor_after(position);
2707
2708 if worktree.read(cx).as_local().is_some() {
2709 let buffer_abs_path = buffer_abs_path.unwrap();
2710 let (_, lang_server) =
2711 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2712 server.clone()
2713 } else {
2714 return Task::ready(Ok(Default::default()));
2715 };
2716
2717 cx.spawn(|_, cx| async move {
2718 let completions = lang_server
2719 .request::<lsp::request::Completion>(lsp::CompletionParams {
2720 text_document_position: lsp::TextDocumentPositionParams::new(
2721 lsp::TextDocumentIdentifier::new(
2722 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2723 ),
2724 point_to_lsp(position),
2725 ),
2726 context: Default::default(),
2727 work_done_progress_params: Default::default(),
2728 partial_result_params: Default::default(),
2729 })
2730 .await
2731 .context("lsp completion request failed")?;
2732
2733 let completions = if let Some(completions) = completions {
2734 match completions {
2735 lsp::CompletionResponse::Array(completions) => completions,
2736 lsp::CompletionResponse::List(list) => list.items,
2737 }
2738 } else {
2739 Default::default()
2740 };
2741
2742 source_buffer_handle.read_with(&cx, |this, _| {
2743 let snapshot = this.snapshot();
2744 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2745 let mut range_for_token = None;
2746 Ok(completions
2747 .into_iter()
2748 .filter_map(|lsp_completion| {
2749 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2750 // If the language server provides a range to overwrite, then
2751 // check that the range is valid.
2752 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2753 let range = range_from_lsp(edit.range);
2754 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2755 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2756 if start != range.start || end != range.end {
2757 log::info!("completion out of expected range");
2758 return None;
2759 }
2760 (
2761 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2762 edit.new_text.clone(),
2763 )
2764 }
2765 // If the language server does not provide a range, then infer
2766 // the range based on the syntax tree.
2767 None => {
2768 if position != clipped_position {
2769 log::info!("completion out of expected range");
2770 return None;
2771 }
2772 let Range { start, end } = range_for_token
2773 .get_or_insert_with(|| {
2774 let offset = position.to_offset(&snapshot);
2775 snapshot
2776 .range_for_word_token_at(offset)
2777 .unwrap_or_else(|| offset..offset)
2778 })
2779 .clone();
2780 let text = lsp_completion
2781 .insert_text
2782 .as_ref()
2783 .unwrap_or(&lsp_completion.label)
2784 .clone();
2785 (
2786 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2787 text.clone(),
2788 )
2789 }
2790 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2791 log::info!("unsupported insert/replace completion");
2792 return None;
2793 }
2794 };
2795
2796 Some(Completion {
2797 old_range,
2798 new_text,
2799 label: language
2800 .as_ref()
2801 .and_then(|l| l.label_for_completion(&lsp_completion))
2802 .unwrap_or_else(|| {
2803 CodeLabel::plain(
2804 lsp_completion.label.clone(),
2805 lsp_completion.filter_text.as_deref(),
2806 )
2807 }),
2808 lsp_completion,
2809 })
2810 })
2811 .collect())
2812 })
2813 })
2814 } else if let Some(project_id) = self.remote_id() {
2815 let rpc = self.client.clone();
2816 let message = proto::GetCompletions {
2817 project_id,
2818 buffer_id,
2819 position: Some(language::proto::serialize_anchor(&anchor)),
2820 version: serialize_version(&source_buffer.version()),
2821 };
2822 cx.spawn_weak(|_, mut cx| async move {
2823 let response = rpc.request(message).await?;
2824
2825 source_buffer_handle
2826 .update(&mut cx, |buffer, _| {
2827 buffer.wait_for_version(deserialize_version(response.version))
2828 })
2829 .await;
2830
2831 response
2832 .completions
2833 .into_iter()
2834 .map(|completion| {
2835 language::proto::deserialize_completion(completion, language.as_ref())
2836 })
2837 .collect()
2838 })
2839 } else {
2840 Task::ready(Ok(Default::default()))
2841 }
2842 }
2843
2844 pub fn apply_additional_edits_for_completion(
2845 &self,
2846 buffer_handle: ModelHandle<Buffer>,
2847 completion: Completion,
2848 push_to_history: bool,
2849 cx: &mut ModelContext<Self>,
2850 ) -> Task<Result<Option<Transaction>>> {
2851 let buffer = buffer_handle.read(cx);
2852 let buffer_id = buffer.remote_id();
2853
2854 if self.is_local() {
2855 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2856 {
2857 server.clone()
2858 } else {
2859 return Task::ready(Ok(Default::default()));
2860 };
2861
2862 cx.spawn(|this, mut cx| async move {
2863 let resolved_completion = lang_server
2864 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2865 .await?;
2866 if let Some(edits) = resolved_completion.additional_text_edits {
2867 let edits = this
2868 .update(&mut cx, |this, cx| {
2869 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2870 })
2871 .await?;
2872 buffer_handle.update(&mut cx, |buffer, cx| {
2873 buffer.finalize_last_transaction();
2874 buffer.start_transaction();
2875 for (range, text) in edits {
2876 buffer.edit([(range, text)], cx);
2877 }
2878 let transaction = if buffer.end_transaction(cx).is_some() {
2879 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2880 if !push_to_history {
2881 buffer.forget_transaction(transaction.id);
2882 }
2883 Some(transaction)
2884 } else {
2885 None
2886 };
2887 Ok(transaction)
2888 })
2889 } else {
2890 Ok(None)
2891 }
2892 })
2893 } else if let Some(project_id) = self.remote_id() {
2894 let client = self.client.clone();
2895 cx.spawn(|_, mut cx| async move {
2896 let response = client
2897 .request(proto::ApplyCompletionAdditionalEdits {
2898 project_id,
2899 buffer_id,
2900 completion: Some(language::proto::serialize_completion(&completion)),
2901 })
2902 .await?;
2903
2904 if let Some(transaction) = response.transaction {
2905 let transaction = language::proto::deserialize_transaction(transaction)?;
2906 buffer_handle
2907 .update(&mut cx, |buffer, _| {
2908 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2909 })
2910 .await;
2911 if push_to_history {
2912 buffer_handle.update(&mut cx, |buffer, _| {
2913 buffer.push_transaction(transaction.clone(), Instant::now());
2914 });
2915 }
2916 Ok(Some(transaction))
2917 } else {
2918 Ok(None)
2919 }
2920 })
2921 } else {
2922 Task::ready(Err(anyhow!("project does not have a remote id")))
2923 }
2924 }
2925
2926 pub fn code_actions<T: Clone + ToOffset>(
2927 &self,
2928 buffer_handle: &ModelHandle<Buffer>,
2929 range: Range<T>,
2930 cx: &mut ModelContext<Self>,
2931 ) -> Task<Result<Vec<CodeAction>>> {
2932 let buffer_handle = buffer_handle.clone();
2933 let buffer = buffer_handle.read(cx);
2934 let snapshot = buffer.snapshot();
2935 let relevant_diagnostics = snapshot
2936 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2937 .map(|entry| entry.to_lsp_diagnostic_stub())
2938 .collect();
2939 let buffer_id = buffer.remote_id();
2940 let worktree;
2941 let buffer_abs_path;
2942 if let Some(file) = File::from_dyn(buffer.file()) {
2943 worktree = file.worktree.clone();
2944 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2945 } else {
2946 return Task::ready(Ok(Default::default()));
2947 };
2948 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2949
2950 if worktree.read(cx).as_local().is_some() {
2951 let buffer_abs_path = buffer_abs_path.unwrap();
2952 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2953 {
2954 server.clone()
2955 } else {
2956 return Task::ready(Ok(Default::default()));
2957 };
2958
2959 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2960 cx.foreground().spawn(async move {
2961 if !lang_server.capabilities().code_action_provider.is_some() {
2962 return Ok(Default::default());
2963 }
2964
2965 Ok(lang_server
2966 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2967 text_document: lsp::TextDocumentIdentifier::new(
2968 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2969 ),
2970 range: lsp_range,
2971 work_done_progress_params: Default::default(),
2972 partial_result_params: Default::default(),
2973 context: lsp::CodeActionContext {
2974 diagnostics: relevant_diagnostics,
2975 only: Some(vec![
2976 lsp::CodeActionKind::QUICKFIX,
2977 lsp::CodeActionKind::REFACTOR,
2978 lsp::CodeActionKind::REFACTOR_EXTRACT,
2979 lsp::CodeActionKind::SOURCE,
2980 ]),
2981 },
2982 })
2983 .await?
2984 .unwrap_or_default()
2985 .into_iter()
2986 .filter_map(|entry| {
2987 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2988 Some(CodeAction {
2989 range: range.clone(),
2990 lsp_action,
2991 })
2992 } else {
2993 None
2994 }
2995 })
2996 .collect())
2997 })
2998 } else if let Some(project_id) = self.remote_id() {
2999 let rpc = self.client.clone();
3000 let version = buffer.version();
3001 cx.spawn_weak(|_, mut cx| async move {
3002 let response = rpc
3003 .request(proto::GetCodeActions {
3004 project_id,
3005 buffer_id,
3006 start: Some(language::proto::serialize_anchor(&range.start)),
3007 end: Some(language::proto::serialize_anchor(&range.end)),
3008 version: serialize_version(&version),
3009 })
3010 .await?;
3011
3012 buffer_handle
3013 .update(&mut cx, |buffer, _| {
3014 buffer.wait_for_version(deserialize_version(response.version))
3015 })
3016 .await;
3017
3018 response
3019 .actions
3020 .into_iter()
3021 .map(language::proto::deserialize_code_action)
3022 .collect()
3023 })
3024 } else {
3025 Task::ready(Ok(Default::default()))
3026 }
3027 }
3028
3029 pub fn apply_code_action(
3030 &self,
3031 buffer_handle: ModelHandle<Buffer>,
3032 mut action: CodeAction,
3033 push_to_history: bool,
3034 cx: &mut ModelContext<Self>,
3035 ) -> Task<Result<ProjectTransaction>> {
3036 if self.is_local() {
3037 let buffer = buffer_handle.read(cx);
3038 let (lsp_adapter, lang_server) =
3039 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3040 server.clone()
3041 } else {
3042 return Task::ready(Ok(Default::default()));
3043 };
3044 let range = action.range.to_point_utf16(buffer);
3045
3046 cx.spawn(|this, mut cx| async move {
3047 if let Some(lsp_range) = action
3048 .lsp_action
3049 .data
3050 .as_mut()
3051 .and_then(|d| d.get_mut("codeActionParams"))
3052 .and_then(|d| d.get_mut("range"))
3053 {
3054 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3055 action.lsp_action = lang_server
3056 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3057 .await?;
3058 } else {
3059 let actions = this
3060 .update(&mut cx, |this, cx| {
3061 this.code_actions(&buffer_handle, action.range, cx)
3062 })
3063 .await?;
3064 action.lsp_action = actions
3065 .into_iter()
3066 .find(|a| a.lsp_action.title == action.lsp_action.title)
3067 .ok_or_else(|| anyhow!("code action is outdated"))?
3068 .lsp_action;
3069 }
3070
3071 if let Some(edit) = action.lsp_action.edit {
3072 Self::deserialize_workspace_edit(
3073 this,
3074 edit,
3075 push_to_history,
3076 lsp_adapter,
3077 lang_server,
3078 &mut cx,
3079 )
3080 .await
3081 } else if let Some(command) = action.lsp_action.command {
3082 this.update(&mut cx, |this, _| {
3083 this.last_workspace_edits_by_language_server
3084 .remove(&lang_server.server_id());
3085 });
3086 lang_server
3087 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3088 command: command.command,
3089 arguments: command.arguments.unwrap_or_default(),
3090 ..Default::default()
3091 })
3092 .await?;
3093 Ok(this.update(&mut cx, |this, _| {
3094 this.last_workspace_edits_by_language_server
3095 .remove(&lang_server.server_id())
3096 .unwrap_or_default()
3097 }))
3098 } else {
3099 Ok(ProjectTransaction::default())
3100 }
3101 })
3102 } else if let Some(project_id) = self.remote_id() {
3103 let client = self.client.clone();
3104 let request = proto::ApplyCodeAction {
3105 project_id,
3106 buffer_id: buffer_handle.read(cx).remote_id(),
3107 action: Some(language::proto::serialize_code_action(&action)),
3108 };
3109 cx.spawn(|this, mut cx| async move {
3110 let response = client
3111 .request(request)
3112 .await?
3113 .transaction
3114 .ok_or_else(|| anyhow!("missing transaction"))?;
3115 this.update(&mut cx, |this, cx| {
3116 this.deserialize_project_transaction(response, push_to_history, cx)
3117 })
3118 .await
3119 })
3120 } else {
3121 Task::ready(Err(anyhow!("project does not have a remote id")))
3122 }
3123 }
3124
3125 async fn deserialize_workspace_edit(
3126 this: ModelHandle<Self>,
3127 edit: lsp::WorkspaceEdit,
3128 push_to_history: bool,
3129 lsp_adapter: Arc<dyn LspAdapter>,
3130 language_server: Arc<LanguageServer>,
3131 cx: &mut AsyncAppContext,
3132 ) -> Result<ProjectTransaction> {
3133 let fs = this.read_with(cx, |this, _| this.fs.clone());
3134 let mut operations = Vec::new();
3135 if let Some(document_changes) = edit.document_changes {
3136 match document_changes {
3137 lsp::DocumentChanges::Edits(edits) => {
3138 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3139 }
3140 lsp::DocumentChanges::Operations(ops) => operations = ops,
3141 }
3142 } else if let Some(changes) = edit.changes {
3143 operations.extend(changes.into_iter().map(|(uri, edits)| {
3144 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3145 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3146 uri,
3147 version: None,
3148 },
3149 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3150 })
3151 }));
3152 }
3153
3154 let mut project_transaction = ProjectTransaction::default();
3155 for operation in operations {
3156 match operation {
3157 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3158 let abs_path = op
3159 .uri
3160 .to_file_path()
3161 .map_err(|_| anyhow!("can't convert URI to path"))?;
3162
3163 if let Some(parent_path) = abs_path.parent() {
3164 fs.create_dir(parent_path).await?;
3165 }
3166 if abs_path.ends_with("/") {
3167 fs.create_dir(&abs_path).await?;
3168 } else {
3169 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3170 .await?;
3171 }
3172 }
3173 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3174 let source_abs_path = op
3175 .old_uri
3176 .to_file_path()
3177 .map_err(|_| anyhow!("can't convert URI to path"))?;
3178 let target_abs_path = op
3179 .new_uri
3180 .to_file_path()
3181 .map_err(|_| anyhow!("can't convert URI to path"))?;
3182 fs.rename(
3183 &source_abs_path,
3184 &target_abs_path,
3185 op.options.map(Into::into).unwrap_or_default(),
3186 )
3187 .await?;
3188 }
3189 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3190 let abs_path = op
3191 .uri
3192 .to_file_path()
3193 .map_err(|_| anyhow!("can't convert URI to path"))?;
3194 let options = op.options.map(Into::into).unwrap_or_default();
3195 if abs_path.ends_with("/") {
3196 fs.remove_dir(&abs_path, options).await?;
3197 } else {
3198 fs.remove_file(&abs_path, options).await?;
3199 }
3200 }
3201 lsp::DocumentChangeOperation::Edit(op) => {
3202 let buffer_to_edit = this
3203 .update(cx, |this, cx| {
3204 this.open_local_buffer_via_lsp(
3205 op.text_document.uri,
3206 lsp_adapter.clone(),
3207 language_server.clone(),
3208 cx,
3209 )
3210 })
3211 .await?;
3212
3213 let edits = this
3214 .update(cx, |this, cx| {
3215 let edits = op.edits.into_iter().map(|edit| match edit {
3216 lsp::OneOf::Left(edit) => edit,
3217 lsp::OneOf::Right(edit) => edit.text_edit,
3218 });
3219 this.edits_from_lsp(
3220 &buffer_to_edit,
3221 edits,
3222 op.text_document.version,
3223 cx,
3224 )
3225 })
3226 .await?;
3227
3228 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3229 buffer.finalize_last_transaction();
3230 buffer.start_transaction();
3231 for (range, text) in edits {
3232 buffer.edit([(range, text)], cx);
3233 }
3234 let transaction = if buffer.end_transaction(cx).is_some() {
3235 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3236 if !push_to_history {
3237 buffer.forget_transaction(transaction.id);
3238 }
3239 Some(transaction)
3240 } else {
3241 None
3242 };
3243
3244 transaction
3245 });
3246 if let Some(transaction) = transaction {
3247 project_transaction.0.insert(buffer_to_edit, transaction);
3248 }
3249 }
3250 }
3251 }
3252
3253 Ok(project_transaction)
3254 }
3255
3256 pub fn prepare_rename<T: ToPointUtf16>(
3257 &self,
3258 buffer: ModelHandle<Buffer>,
3259 position: T,
3260 cx: &mut ModelContext<Self>,
3261 ) -> Task<Result<Option<Range<Anchor>>>> {
3262 let position = position.to_point_utf16(buffer.read(cx));
3263 self.request_lsp(buffer, PrepareRename { position }, cx)
3264 }
3265
3266 pub fn perform_rename<T: ToPointUtf16>(
3267 &self,
3268 buffer: ModelHandle<Buffer>,
3269 position: T,
3270 new_name: String,
3271 push_to_history: bool,
3272 cx: &mut ModelContext<Self>,
3273 ) -> Task<Result<ProjectTransaction>> {
3274 let position = position.to_point_utf16(buffer.read(cx));
3275 self.request_lsp(
3276 buffer,
3277 PerformRename {
3278 position,
3279 new_name,
3280 push_to_history,
3281 },
3282 cx,
3283 )
3284 }
3285
3286 pub fn search(
3287 &self,
3288 query: SearchQuery,
3289 cx: &mut ModelContext<Self>,
3290 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3291 if self.is_local() {
3292 let snapshots = self
3293 .visible_worktrees(cx)
3294 .filter_map(|tree| {
3295 let tree = tree.read(cx).as_local()?;
3296 Some(tree.snapshot())
3297 })
3298 .collect::<Vec<_>>();
3299
3300 let background = cx.background().clone();
3301 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3302 if path_count == 0 {
3303 return Task::ready(Ok(Default::default()));
3304 }
3305 let workers = background.num_cpus().min(path_count);
3306 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3307 cx.background()
3308 .spawn({
3309 let fs = self.fs.clone();
3310 let background = cx.background().clone();
3311 let query = query.clone();
3312 async move {
3313 let fs = &fs;
3314 let query = &query;
3315 let matching_paths_tx = &matching_paths_tx;
3316 let paths_per_worker = (path_count + workers - 1) / workers;
3317 let snapshots = &snapshots;
3318 background
3319 .scoped(|scope| {
3320 for worker_ix in 0..workers {
3321 let worker_start_ix = worker_ix * paths_per_worker;
3322 let worker_end_ix = worker_start_ix + paths_per_worker;
3323 scope.spawn(async move {
3324 let mut snapshot_start_ix = 0;
3325 let mut abs_path = PathBuf::new();
3326 for snapshot in snapshots {
3327 let snapshot_end_ix =
3328 snapshot_start_ix + snapshot.visible_file_count();
3329 if worker_end_ix <= snapshot_start_ix {
3330 break;
3331 } else if worker_start_ix > snapshot_end_ix {
3332 snapshot_start_ix = snapshot_end_ix;
3333 continue;
3334 } else {
3335 let start_in_snapshot = worker_start_ix
3336 .saturating_sub(snapshot_start_ix);
3337 let end_in_snapshot =
3338 cmp::min(worker_end_ix, snapshot_end_ix)
3339 - snapshot_start_ix;
3340
3341 for entry in snapshot
3342 .files(false, start_in_snapshot)
3343 .take(end_in_snapshot - start_in_snapshot)
3344 {
3345 if matching_paths_tx.is_closed() {
3346 break;
3347 }
3348
3349 abs_path.clear();
3350 abs_path.push(&snapshot.abs_path());
3351 abs_path.push(&entry.path);
3352 let matches = if let Some(file) =
3353 fs.open_sync(&abs_path).await.log_err()
3354 {
3355 query.detect(file).unwrap_or(false)
3356 } else {
3357 false
3358 };
3359
3360 if matches {
3361 let project_path =
3362 (snapshot.id(), entry.path.clone());
3363 if matching_paths_tx
3364 .send(project_path)
3365 .await
3366 .is_err()
3367 {
3368 break;
3369 }
3370 }
3371 }
3372
3373 snapshot_start_ix = snapshot_end_ix;
3374 }
3375 }
3376 });
3377 }
3378 })
3379 .await;
3380 }
3381 })
3382 .detach();
3383
3384 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3385 let open_buffers = self
3386 .opened_buffers
3387 .values()
3388 .filter_map(|b| b.upgrade(cx))
3389 .collect::<HashSet<_>>();
3390 cx.spawn(|this, cx| async move {
3391 for buffer in &open_buffers {
3392 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3393 buffers_tx.send((buffer.clone(), snapshot)).await?;
3394 }
3395
3396 let open_buffers = Rc::new(RefCell::new(open_buffers));
3397 while let Some(project_path) = matching_paths_rx.next().await {
3398 if buffers_tx.is_closed() {
3399 break;
3400 }
3401
3402 let this = this.clone();
3403 let open_buffers = open_buffers.clone();
3404 let buffers_tx = buffers_tx.clone();
3405 cx.spawn(|mut cx| async move {
3406 if let Some(buffer) = this
3407 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3408 .await
3409 .log_err()
3410 {
3411 if open_buffers.borrow_mut().insert(buffer.clone()) {
3412 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3413 buffers_tx.send((buffer, snapshot)).await?;
3414 }
3415 }
3416
3417 Ok::<_, anyhow::Error>(())
3418 })
3419 .detach();
3420 }
3421
3422 Ok::<_, anyhow::Error>(())
3423 })
3424 .detach_and_log_err(cx);
3425
3426 let background = cx.background().clone();
3427 cx.background().spawn(async move {
3428 let query = &query;
3429 let mut matched_buffers = Vec::new();
3430 for _ in 0..workers {
3431 matched_buffers.push(HashMap::default());
3432 }
3433 background
3434 .scoped(|scope| {
3435 for worker_matched_buffers in matched_buffers.iter_mut() {
3436 let mut buffers_rx = buffers_rx.clone();
3437 scope.spawn(async move {
3438 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3439 let buffer_matches = query
3440 .search(snapshot.as_rope())
3441 .await
3442 .iter()
3443 .map(|range| {
3444 snapshot.anchor_before(range.start)
3445 ..snapshot.anchor_after(range.end)
3446 })
3447 .collect::<Vec<_>>();
3448 if !buffer_matches.is_empty() {
3449 worker_matched_buffers
3450 .insert(buffer.clone(), buffer_matches);
3451 }
3452 }
3453 });
3454 }
3455 })
3456 .await;
3457 Ok(matched_buffers.into_iter().flatten().collect())
3458 })
3459 } else if let Some(project_id) = self.remote_id() {
3460 let request = self.client.request(query.to_proto(project_id));
3461 cx.spawn(|this, mut cx| async move {
3462 let response = request.await?;
3463 let mut result = HashMap::default();
3464 for location in response.locations {
3465 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3466 let target_buffer = this
3467 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3468 .await?;
3469 let start = location
3470 .start
3471 .and_then(deserialize_anchor)
3472 .ok_or_else(|| anyhow!("missing target start"))?;
3473 let end = location
3474 .end
3475 .and_then(deserialize_anchor)
3476 .ok_or_else(|| anyhow!("missing target end"))?;
3477 result
3478 .entry(target_buffer)
3479 .or_insert(Vec::new())
3480 .push(start..end)
3481 }
3482 Ok(result)
3483 })
3484 } else {
3485 Task::ready(Ok(Default::default()))
3486 }
3487 }
3488
3489 fn request_lsp<R: LspCommand>(
3490 &self,
3491 buffer_handle: ModelHandle<Buffer>,
3492 request: R,
3493 cx: &mut ModelContext<Self>,
3494 ) -> Task<Result<R::Response>>
3495 where
3496 <R::LspRequest as lsp::request::Request>::Result: Send,
3497 {
3498 let buffer = buffer_handle.read(cx);
3499 if self.is_local() {
3500 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3501 if let Some((file, (_, language_server))) =
3502 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3503 {
3504 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3505 return cx.spawn(|this, cx| async move {
3506 if !request.check_capabilities(&language_server.capabilities()) {
3507 return Ok(Default::default());
3508 }
3509
3510 let response = language_server
3511 .request::<R::LspRequest>(lsp_params)
3512 .await
3513 .context("lsp request failed")?;
3514 request
3515 .response_from_lsp(response, this, buffer_handle, cx)
3516 .await
3517 });
3518 }
3519 } else if let Some(project_id) = self.remote_id() {
3520 let rpc = self.client.clone();
3521 let message = request.to_proto(project_id, buffer);
3522 return cx.spawn(|this, cx| async move {
3523 let response = rpc.request(message).await?;
3524 request
3525 .response_from_proto(response, this, buffer_handle, cx)
3526 .await
3527 });
3528 }
3529 Task::ready(Ok(Default::default()))
3530 }
3531
3532 pub fn find_or_create_local_worktree(
3533 &mut self,
3534 abs_path: impl AsRef<Path>,
3535 visible: bool,
3536 cx: &mut ModelContext<Self>,
3537 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3538 let abs_path = abs_path.as_ref();
3539 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3540 Task::ready(Ok((tree.clone(), relative_path.into())))
3541 } else {
3542 let worktree = self.create_local_worktree(abs_path, visible, cx);
3543 cx.foreground()
3544 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3545 }
3546 }
3547
3548 pub fn find_local_worktree(
3549 &self,
3550 abs_path: &Path,
3551 cx: &AppContext,
3552 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3553 for tree in self.worktrees(cx) {
3554 if let Some(relative_path) = tree
3555 .read(cx)
3556 .as_local()
3557 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3558 {
3559 return Some((tree.clone(), relative_path.into()));
3560 }
3561 }
3562 None
3563 }
3564
3565 pub fn is_shared(&self) -> bool {
3566 match &self.client_state {
3567 ProjectClientState::Local { is_shared, .. } => *is_shared,
3568 ProjectClientState::Remote { .. } => false,
3569 }
3570 }
3571
3572 fn create_local_worktree(
3573 &mut self,
3574 abs_path: impl AsRef<Path>,
3575 visible: bool,
3576 cx: &mut ModelContext<Self>,
3577 ) -> Task<Result<ModelHandle<Worktree>>> {
3578 let fs = self.fs.clone();
3579 let client = self.client.clone();
3580 let next_entry_id = self.next_entry_id.clone();
3581 let path: Arc<Path> = abs_path.as_ref().into();
3582 let task = self
3583 .loading_local_worktrees
3584 .entry(path.clone())
3585 .or_insert_with(|| {
3586 cx.spawn(|project, mut cx| {
3587 async move {
3588 let worktree = Worktree::local(
3589 client.clone(),
3590 path.clone(),
3591 visible,
3592 fs,
3593 next_entry_id,
3594 &mut cx,
3595 )
3596 .await;
3597 project.update(&mut cx, |project, _| {
3598 project.loading_local_worktrees.remove(&path);
3599 });
3600 let worktree = worktree?;
3601
3602 let remote_project_id = project.update(&mut cx, |project, cx| {
3603 project.add_worktree(&worktree, cx);
3604 project.remote_id()
3605 });
3606
3607 if let Some(project_id) = remote_project_id {
3608 // Because sharing is async, we may have *unshared* the project by the time it completes,
3609 // in which case we need to register the worktree instead.
3610 loop {
3611 if project.read_with(&cx, |project, _| project.is_shared()) {
3612 if worktree
3613 .update(&mut cx, |worktree, cx| {
3614 worktree.as_local_mut().unwrap().share(project_id, cx)
3615 })
3616 .await
3617 .is_ok()
3618 {
3619 break;
3620 }
3621 } else {
3622 worktree
3623 .update(&mut cx, |worktree, cx| {
3624 worktree
3625 .as_local_mut()
3626 .unwrap()
3627 .register(project_id, cx)
3628 })
3629 .await?;
3630 break;
3631 }
3632 }
3633 }
3634
3635 Ok(worktree)
3636 }
3637 .map_err(|err| Arc::new(err))
3638 })
3639 .shared()
3640 })
3641 .clone();
3642 cx.foreground().spawn(async move {
3643 match task.await {
3644 Ok(worktree) => Ok(worktree),
3645 Err(err) => Err(anyhow!("{}", err)),
3646 }
3647 })
3648 }
3649
3650 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3651 self.worktrees.retain(|worktree| {
3652 if let Some(worktree) = worktree.upgrade(cx) {
3653 let id = worktree.read(cx).id();
3654 if id == id_to_remove {
3655 cx.emit(Event::WorktreeRemoved(id));
3656 false
3657 } else {
3658 true
3659 }
3660 } else {
3661 false
3662 }
3663 });
3664 cx.notify();
3665 }
3666
3667 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3668 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3669 if worktree.read(cx).is_local() {
3670 cx.subscribe(&worktree, |this, worktree, _, cx| {
3671 this.update_local_worktree_buffers(worktree, cx);
3672 })
3673 .detach();
3674 }
3675
3676 let push_strong_handle = {
3677 let worktree = worktree.read(cx);
3678 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3679 };
3680 if push_strong_handle {
3681 self.worktrees
3682 .push(WorktreeHandle::Strong(worktree.clone()));
3683 } else {
3684 cx.observe_release(&worktree, |this, _, cx| {
3685 this.worktrees
3686 .retain(|worktree| worktree.upgrade(cx).is_some());
3687 cx.notify();
3688 })
3689 .detach();
3690 self.worktrees
3691 .push(WorktreeHandle::Weak(worktree.downgrade()));
3692 }
3693 cx.emit(Event::WorktreeAdded);
3694 cx.notify();
3695 }
3696
3697 fn update_local_worktree_buffers(
3698 &mut self,
3699 worktree_handle: ModelHandle<Worktree>,
3700 cx: &mut ModelContext<Self>,
3701 ) {
3702 let snapshot = worktree_handle.read(cx).snapshot();
3703 let mut buffers_to_delete = Vec::new();
3704 let mut renamed_buffers = Vec::new();
3705 for (buffer_id, buffer) in &self.opened_buffers {
3706 if let Some(buffer) = buffer.upgrade(cx) {
3707 buffer.update(cx, |buffer, cx| {
3708 if let Some(old_file) = File::from_dyn(buffer.file()) {
3709 if old_file.worktree != worktree_handle {
3710 return;
3711 }
3712
3713 let new_file = if let Some(entry) = old_file
3714 .entry_id
3715 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3716 {
3717 File {
3718 is_local: true,
3719 entry_id: Some(entry.id),
3720 mtime: entry.mtime,
3721 path: entry.path.clone(),
3722 worktree: worktree_handle.clone(),
3723 }
3724 } else if let Some(entry) =
3725 snapshot.entry_for_path(old_file.path().as_ref())
3726 {
3727 File {
3728 is_local: true,
3729 entry_id: Some(entry.id),
3730 mtime: entry.mtime,
3731 path: entry.path.clone(),
3732 worktree: worktree_handle.clone(),
3733 }
3734 } else {
3735 File {
3736 is_local: true,
3737 entry_id: None,
3738 path: old_file.path().clone(),
3739 mtime: old_file.mtime(),
3740 worktree: worktree_handle.clone(),
3741 }
3742 };
3743
3744 let old_path = old_file.abs_path(cx);
3745 if new_file.abs_path(cx) != old_path {
3746 renamed_buffers.push((cx.handle(), old_path));
3747 }
3748
3749 if let Some(project_id) = self.remote_id() {
3750 self.client
3751 .send(proto::UpdateBufferFile {
3752 project_id,
3753 buffer_id: *buffer_id as u64,
3754 file: Some(new_file.to_proto()),
3755 })
3756 .log_err();
3757 }
3758 buffer.file_updated(Box::new(new_file), cx).detach();
3759 }
3760 });
3761 } else {
3762 buffers_to_delete.push(*buffer_id);
3763 }
3764 }
3765
3766 for buffer_id in buffers_to_delete {
3767 self.opened_buffers.remove(&buffer_id);
3768 }
3769
3770 for (buffer, old_path) in renamed_buffers {
3771 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3772 self.assign_language_to_buffer(&buffer, cx);
3773 self.register_buffer_with_language_server(&buffer, cx);
3774 }
3775 }
3776
3777 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3778 let new_active_entry = entry.and_then(|project_path| {
3779 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3780 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3781 Some(entry.id)
3782 });
3783 if new_active_entry != self.active_entry {
3784 self.active_entry = new_active_entry;
3785 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3786 }
3787 }
3788
3789 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3790 self.language_server_statuses
3791 .values()
3792 .any(|status| status.pending_diagnostic_updates > 0)
3793 }
3794
3795 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3796 let mut summary = DiagnosticSummary::default();
3797 for (_, path_summary) in self.diagnostic_summaries(cx) {
3798 summary.error_count += path_summary.error_count;
3799 summary.warning_count += path_summary.warning_count;
3800 }
3801 summary
3802 }
3803
3804 pub fn diagnostic_summaries<'a>(
3805 &'a self,
3806 cx: &'a AppContext,
3807 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3808 self.worktrees(cx).flat_map(move |worktree| {
3809 let worktree = worktree.read(cx);
3810 let worktree_id = worktree.id();
3811 worktree
3812 .diagnostic_summaries()
3813 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3814 })
3815 }
3816
3817 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3818 if self
3819 .language_server_statuses
3820 .values()
3821 .map(|status| status.pending_diagnostic_updates)
3822 .sum::<isize>()
3823 == 1
3824 {
3825 cx.emit(Event::DiskBasedDiagnosticsStarted);
3826 }
3827 }
3828
3829 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3830 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3831 if self
3832 .language_server_statuses
3833 .values()
3834 .map(|status| status.pending_diagnostic_updates)
3835 .sum::<isize>()
3836 == 0
3837 {
3838 cx.emit(Event::DiskBasedDiagnosticsFinished);
3839 }
3840 }
3841
3842 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3843 self.active_entry
3844 }
3845
3846 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3847 self.worktree_for_id(path.worktree_id, cx)?
3848 .read(cx)
3849 .entry_for_path(&path.path)
3850 .map(|entry| entry.id)
3851 }
3852
3853 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3854 let worktree = self.worktree_for_entry(entry_id, cx)?;
3855 let worktree = worktree.read(cx);
3856 let worktree_id = worktree.id();
3857 let path = worktree.entry_for_id(entry_id)?.path.clone();
3858 Some(ProjectPath { worktree_id, path })
3859 }
3860
3861 // RPC message handlers
3862
3863 async fn handle_request_join_project(
3864 this: ModelHandle<Self>,
3865 message: TypedEnvelope<proto::RequestJoinProject>,
3866 _: Arc<Client>,
3867 mut cx: AsyncAppContext,
3868 ) -> Result<()> {
3869 let user_id = message.payload.requester_id;
3870 if this.read_with(&cx, |project, _| {
3871 project.collaborators.values().any(|c| c.user.id == user_id)
3872 }) {
3873 this.update(&mut cx, |this, cx| {
3874 this.respond_to_join_request(user_id, true, cx)
3875 });
3876 } else {
3877 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3878 let user = user_store
3879 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3880 .await?;
3881 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3882 }
3883 Ok(())
3884 }
3885
3886 async fn handle_unregister_project(
3887 this: ModelHandle<Self>,
3888 _: TypedEnvelope<proto::UnregisterProject>,
3889 _: Arc<Client>,
3890 mut cx: AsyncAppContext,
3891 ) -> Result<()> {
3892 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3893 Ok(())
3894 }
3895
3896 async fn handle_project_unshared(
3897 this: ModelHandle<Self>,
3898 _: TypedEnvelope<proto::ProjectUnshared>,
3899 _: Arc<Client>,
3900 mut cx: AsyncAppContext,
3901 ) -> Result<()> {
3902 this.update(&mut cx, |this, cx| this.unshared(cx));
3903 Ok(())
3904 }
3905
3906 async fn handle_add_collaborator(
3907 this: ModelHandle<Self>,
3908 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3909 _: Arc<Client>,
3910 mut cx: AsyncAppContext,
3911 ) -> Result<()> {
3912 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3913 let collaborator = envelope
3914 .payload
3915 .collaborator
3916 .take()
3917 .ok_or_else(|| anyhow!("empty collaborator"))?;
3918
3919 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3920 this.update(&mut cx, |this, cx| {
3921 this.collaborators
3922 .insert(collaborator.peer_id, collaborator);
3923 cx.notify();
3924 });
3925
3926 Ok(())
3927 }
3928
3929 async fn handle_remove_collaborator(
3930 this: ModelHandle<Self>,
3931 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3932 _: Arc<Client>,
3933 mut cx: AsyncAppContext,
3934 ) -> Result<()> {
3935 this.update(&mut cx, |this, cx| {
3936 let peer_id = PeerId(envelope.payload.peer_id);
3937 let replica_id = this
3938 .collaborators
3939 .remove(&peer_id)
3940 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3941 .replica_id;
3942 for (_, buffer) in &this.opened_buffers {
3943 if let Some(buffer) = buffer.upgrade(cx) {
3944 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3945 }
3946 }
3947
3948 cx.emit(Event::CollaboratorLeft(peer_id));
3949 cx.notify();
3950 Ok(())
3951 })
3952 }
3953
3954 async fn handle_join_project_request_cancelled(
3955 this: ModelHandle<Self>,
3956 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
3957 _: Arc<Client>,
3958 mut cx: AsyncAppContext,
3959 ) -> Result<()> {
3960 let user = this
3961 .update(&mut cx, |this, cx| {
3962 this.user_store.update(cx, |user_store, cx| {
3963 user_store.fetch_user(envelope.payload.requester_id, cx)
3964 })
3965 })
3966 .await?;
3967
3968 this.update(&mut cx, |_, cx| {
3969 cx.emit(Event::ContactCancelledJoinRequest(user));
3970 });
3971
3972 Ok(())
3973 }
3974
3975 async fn handle_register_worktree(
3976 this: ModelHandle<Self>,
3977 envelope: TypedEnvelope<proto::RegisterWorktree>,
3978 client: Arc<Client>,
3979 mut cx: AsyncAppContext,
3980 ) -> Result<()> {
3981 this.update(&mut cx, |this, cx| {
3982 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3983 let replica_id = this.replica_id();
3984 let worktree = proto::Worktree {
3985 id: envelope.payload.worktree_id,
3986 root_name: envelope.payload.root_name,
3987 entries: Default::default(),
3988 diagnostic_summaries: Default::default(),
3989 visible: envelope.payload.visible,
3990 scan_id: 0,
3991 };
3992 let (worktree, load_task) =
3993 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3994 this.add_worktree(&worktree, cx);
3995 load_task.detach();
3996 Ok(())
3997 })
3998 }
3999
4000 async fn handle_unregister_worktree(
4001 this: ModelHandle<Self>,
4002 envelope: TypedEnvelope<proto::UnregisterWorktree>,
4003 _: Arc<Client>,
4004 mut cx: AsyncAppContext,
4005 ) -> Result<()> {
4006 this.update(&mut cx, |this, cx| {
4007 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4008 this.remove_worktree(worktree_id, cx);
4009 Ok(())
4010 })
4011 }
4012
4013 async fn handle_update_worktree(
4014 this: ModelHandle<Self>,
4015 envelope: TypedEnvelope<proto::UpdateWorktree>,
4016 _: Arc<Client>,
4017 mut cx: AsyncAppContext,
4018 ) -> Result<()> {
4019 this.update(&mut cx, |this, cx| {
4020 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4021 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4022 worktree.update(cx, |worktree, _| {
4023 let worktree = worktree.as_remote_mut().unwrap();
4024 worktree.update_from_remote(envelope)
4025 })?;
4026 }
4027 Ok(())
4028 })
4029 }
4030
4031 async fn handle_create_project_entry(
4032 this: ModelHandle<Self>,
4033 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4034 _: Arc<Client>,
4035 mut cx: AsyncAppContext,
4036 ) -> Result<proto::ProjectEntryResponse> {
4037 let worktree = this.update(&mut cx, |this, cx| {
4038 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4039 this.worktree_for_id(worktree_id, cx)
4040 .ok_or_else(|| anyhow!("worktree not found"))
4041 })?;
4042 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4043 let entry = worktree
4044 .update(&mut cx, |worktree, cx| {
4045 let worktree = worktree.as_local_mut().unwrap();
4046 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4047 worktree.create_entry(path, envelope.payload.is_directory, cx)
4048 })
4049 .await?;
4050 Ok(proto::ProjectEntryResponse {
4051 entry: Some((&entry).into()),
4052 worktree_scan_id: worktree_scan_id as u64,
4053 })
4054 }
4055
4056 async fn handle_rename_project_entry(
4057 this: ModelHandle<Self>,
4058 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4059 _: Arc<Client>,
4060 mut cx: AsyncAppContext,
4061 ) -> Result<proto::ProjectEntryResponse> {
4062 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4063 let worktree = this.read_with(&cx, |this, cx| {
4064 this.worktree_for_entry(entry_id, cx)
4065 .ok_or_else(|| anyhow!("worktree not found"))
4066 })?;
4067 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4068 let entry = worktree
4069 .update(&mut cx, |worktree, cx| {
4070 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4071 worktree
4072 .as_local_mut()
4073 .unwrap()
4074 .rename_entry(entry_id, new_path, cx)
4075 .ok_or_else(|| anyhow!("invalid entry"))
4076 })?
4077 .await?;
4078 Ok(proto::ProjectEntryResponse {
4079 entry: Some((&entry).into()),
4080 worktree_scan_id: worktree_scan_id as u64,
4081 })
4082 }
4083
4084 async fn handle_copy_project_entry(
4085 this: ModelHandle<Self>,
4086 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4087 _: Arc<Client>,
4088 mut cx: AsyncAppContext,
4089 ) -> Result<proto::ProjectEntryResponse> {
4090 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4091 let worktree = this.read_with(&cx, |this, cx| {
4092 this.worktree_for_entry(entry_id, cx)
4093 .ok_or_else(|| anyhow!("worktree not found"))
4094 })?;
4095 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4096 let entry = worktree
4097 .update(&mut cx, |worktree, cx| {
4098 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4099 worktree
4100 .as_local_mut()
4101 .unwrap()
4102 .copy_entry(entry_id, new_path, cx)
4103 .ok_or_else(|| anyhow!("invalid entry"))
4104 })?
4105 .await?;
4106 Ok(proto::ProjectEntryResponse {
4107 entry: Some((&entry).into()),
4108 worktree_scan_id: worktree_scan_id as u64,
4109 })
4110 }
4111
4112 async fn handle_delete_project_entry(
4113 this: ModelHandle<Self>,
4114 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4115 _: Arc<Client>,
4116 mut cx: AsyncAppContext,
4117 ) -> Result<proto::ProjectEntryResponse> {
4118 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4119 let worktree = this.read_with(&cx, |this, cx| {
4120 this.worktree_for_entry(entry_id, cx)
4121 .ok_or_else(|| anyhow!("worktree not found"))
4122 })?;
4123 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4124 worktree
4125 .update(&mut cx, |worktree, cx| {
4126 worktree
4127 .as_local_mut()
4128 .unwrap()
4129 .delete_entry(entry_id, cx)
4130 .ok_or_else(|| anyhow!("invalid entry"))
4131 })?
4132 .await?;
4133 Ok(proto::ProjectEntryResponse {
4134 entry: None,
4135 worktree_scan_id: worktree_scan_id as u64,
4136 })
4137 }
4138
4139 async fn handle_update_diagnostic_summary(
4140 this: ModelHandle<Self>,
4141 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4142 _: Arc<Client>,
4143 mut cx: AsyncAppContext,
4144 ) -> Result<()> {
4145 this.update(&mut cx, |this, cx| {
4146 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4147 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4148 if let Some(summary) = envelope.payload.summary {
4149 let project_path = ProjectPath {
4150 worktree_id,
4151 path: Path::new(&summary.path).into(),
4152 };
4153 worktree.update(cx, |worktree, _| {
4154 worktree
4155 .as_remote_mut()
4156 .unwrap()
4157 .update_diagnostic_summary(project_path.path.clone(), &summary);
4158 });
4159 cx.emit(Event::DiagnosticsUpdated(project_path));
4160 }
4161 }
4162 Ok(())
4163 })
4164 }
4165
4166 async fn handle_start_language_server(
4167 this: ModelHandle<Self>,
4168 envelope: TypedEnvelope<proto::StartLanguageServer>,
4169 _: Arc<Client>,
4170 mut cx: AsyncAppContext,
4171 ) -> Result<()> {
4172 let server = envelope
4173 .payload
4174 .server
4175 .ok_or_else(|| anyhow!("invalid server"))?;
4176 this.update(&mut cx, |this, cx| {
4177 this.language_server_statuses.insert(
4178 server.id as usize,
4179 LanguageServerStatus {
4180 name: server.name,
4181 pending_work: Default::default(),
4182 pending_diagnostic_updates: 0,
4183 },
4184 );
4185 cx.notify();
4186 });
4187 Ok(())
4188 }
4189
4190 async fn handle_update_language_server(
4191 this: ModelHandle<Self>,
4192 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4193 _: Arc<Client>,
4194 mut cx: AsyncAppContext,
4195 ) -> Result<()> {
4196 let language_server_id = envelope.payload.language_server_id as usize;
4197 match envelope
4198 .payload
4199 .variant
4200 .ok_or_else(|| anyhow!("invalid variant"))?
4201 {
4202 proto::update_language_server::Variant::WorkStart(payload) => {
4203 this.update(&mut cx, |this, cx| {
4204 this.on_lsp_work_start(language_server_id, payload.token, cx);
4205 })
4206 }
4207 proto::update_language_server::Variant::WorkProgress(payload) => {
4208 this.update(&mut cx, |this, cx| {
4209 this.on_lsp_work_progress(
4210 language_server_id,
4211 payload.token,
4212 LanguageServerProgress {
4213 message: payload.message,
4214 percentage: payload.percentage.map(|p| p as usize),
4215 last_update_at: Instant::now(),
4216 },
4217 cx,
4218 );
4219 })
4220 }
4221 proto::update_language_server::Variant::WorkEnd(payload) => {
4222 this.update(&mut cx, |this, cx| {
4223 this.on_lsp_work_end(language_server_id, payload.token, cx);
4224 })
4225 }
4226 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4227 this.update(&mut cx, |this, cx| {
4228 this.disk_based_diagnostics_started(cx);
4229 })
4230 }
4231 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4232 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4233 }
4234 }
4235
4236 Ok(())
4237 }
4238
4239 async fn handle_update_buffer(
4240 this: ModelHandle<Self>,
4241 envelope: TypedEnvelope<proto::UpdateBuffer>,
4242 _: Arc<Client>,
4243 mut cx: AsyncAppContext,
4244 ) -> Result<()> {
4245 this.update(&mut cx, |this, cx| {
4246 let payload = envelope.payload.clone();
4247 let buffer_id = payload.buffer_id;
4248 let ops = payload
4249 .operations
4250 .into_iter()
4251 .map(|op| language::proto::deserialize_operation(op))
4252 .collect::<Result<Vec<_>, _>>()?;
4253 let is_remote = this.is_remote();
4254 match this.opened_buffers.entry(buffer_id) {
4255 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4256 OpenBuffer::Strong(buffer) => {
4257 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4258 }
4259 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4260 OpenBuffer::Weak(_) => {}
4261 },
4262 hash_map::Entry::Vacant(e) => {
4263 assert!(
4264 is_remote,
4265 "received buffer update from {:?}",
4266 envelope.original_sender_id
4267 );
4268 e.insert(OpenBuffer::Loading(ops));
4269 }
4270 }
4271 Ok(())
4272 })
4273 }
4274
4275 async fn handle_update_buffer_file(
4276 this: ModelHandle<Self>,
4277 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4278 _: Arc<Client>,
4279 mut cx: AsyncAppContext,
4280 ) -> Result<()> {
4281 this.update(&mut cx, |this, cx| {
4282 let payload = envelope.payload.clone();
4283 let buffer_id = payload.buffer_id;
4284 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4285 let worktree = this
4286 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4287 .ok_or_else(|| anyhow!("no such worktree"))?;
4288 let file = File::from_proto(file, worktree.clone(), cx)?;
4289 let buffer = this
4290 .opened_buffers
4291 .get_mut(&buffer_id)
4292 .and_then(|b| b.upgrade(cx))
4293 .ok_or_else(|| anyhow!("no such buffer"))?;
4294 buffer.update(cx, |buffer, cx| {
4295 buffer.file_updated(Box::new(file), cx).detach();
4296 });
4297 Ok(())
4298 })
4299 }
4300
4301 async fn handle_save_buffer(
4302 this: ModelHandle<Self>,
4303 envelope: TypedEnvelope<proto::SaveBuffer>,
4304 _: Arc<Client>,
4305 mut cx: AsyncAppContext,
4306 ) -> Result<proto::BufferSaved> {
4307 let buffer_id = envelope.payload.buffer_id;
4308 let requested_version = deserialize_version(envelope.payload.version);
4309
4310 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4311 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4312 let buffer = this
4313 .opened_buffers
4314 .get(&buffer_id)
4315 .and_then(|buffer| buffer.upgrade(cx))
4316 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4317 Ok::<_, anyhow::Error>((project_id, buffer))
4318 })?;
4319 buffer
4320 .update(&mut cx, |buffer, _| {
4321 buffer.wait_for_version(requested_version)
4322 })
4323 .await;
4324
4325 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4326 Ok(proto::BufferSaved {
4327 project_id,
4328 buffer_id,
4329 version: serialize_version(&saved_version),
4330 mtime: Some(mtime.into()),
4331 })
4332 }
4333
4334 async fn handle_reload_buffers(
4335 this: ModelHandle<Self>,
4336 envelope: TypedEnvelope<proto::ReloadBuffers>,
4337 _: Arc<Client>,
4338 mut cx: AsyncAppContext,
4339 ) -> Result<proto::ReloadBuffersResponse> {
4340 let sender_id = envelope.original_sender_id()?;
4341 let reload = this.update(&mut cx, |this, cx| {
4342 let mut buffers = HashSet::default();
4343 for buffer_id in &envelope.payload.buffer_ids {
4344 buffers.insert(
4345 this.opened_buffers
4346 .get(buffer_id)
4347 .and_then(|buffer| buffer.upgrade(cx))
4348 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4349 );
4350 }
4351 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4352 })?;
4353
4354 let project_transaction = reload.await?;
4355 let project_transaction = this.update(&mut cx, |this, cx| {
4356 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4357 });
4358 Ok(proto::ReloadBuffersResponse {
4359 transaction: Some(project_transaction),
4360 })
4361 }
4362
4363 async fn handle_format_buffers(
4364 this: ModelHandle<Self>,
4365 envelope: TypedEnvelope<proto::FormatBuffers>,
4366 _: Arc<Client>,
4367 mut cx: AsyncAppContext,
4368 ) -> Result<proto::FormatBuffersResponse> {
4369 let sender_id = envelope.original_sender_id()?;
4370 let format = this.update(&mut cx, |this, cx| {
4371 let mut buffers = HashSet::default();
4372 for buffer_id in &envelope.payload.buffer_ids {
4373 buffers.insert(
4374 this.opened_buffers
4375 .get(buffer_id)
4376 .and_then(|buffer| buffer.upgrade(cx))
4377 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4378 );
4379 }
4380 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4381 })?;
4382
4383 let project_transaction = format.await?;
4384 let project_transaction = this.update(&mut cx, |this, cx| {
4385 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4386 });
4387 Ok(proto::FormatBuffersResponse {
4388 transaction: Some(project_transaction),
4389 })
4390 }
4391
4392 async fn handle_get_completions(
4393 this: ModelHandle<Self>,
4394 envelope: TypedEnvelope<proto::GetCompletions>,
4395 _: Arc<Client>,
4396 mut cx: AsyncAppContext,
4397 ) -> Result<proto::GetCompletionsResponse> {
4398 let position = envelope
4399 .payload
4400 .position
4401 .and_then(language::proto::deserialize_anchor)
4402 .ok_or_else(|| anyhow!("invalid position"))?;
4403 let version = deserialize_version(envelope.payload.version);
4404 let buffer = this.read_with(&cx, |this, cx| {
4405 this.opened_buffers
4406 .get(&envelope.payload.buffer_id)
4407 .and_then(|buffer| buffer.upgrade(cx))
4408 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4409 })?;
4410 buffer
4411 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4412 .await;
4413 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4414 let completions = this
4415 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4416 .await?;
4417
4418 Ok(proto::GetCompletionsResponse {
4419 completions: completions
4420 .iter()
4421 .map(language::proto::serialize_completion)
4422 .collect(),
4423 version: serialize_version(&version),
4424 })
4425 }
4426
4427 async fn handle_apply_additional_edits_for_completion(
4428 this: ModelHandle<Self>,
4429 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4430 _: Arc<Client>,
4431 mut cx: AsyncAppContext,
4432 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4433 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4434 let buffer = this
4435 .opened_buffers
4436 .get(&envelope.payload.buffer_id)
4437 .and_then(|buffer| buffer.upgrade(cx))
4438 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4439 let language = buffer.read(cx).language();
4440 let completion = language::proto::deserialize_completion(
4441 envelope
4442 .payload
4443 .completion
4444 .ok_or_else(|| anyhow!("invalid completion"))?,
4445 language,
4446 )?;
4447 Ok::<_, anyhow::Error>(
4448 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4449 )
4450 })?;
4451
4452 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4453 transaction: apply_additional_edits
4454 .await?
4455 .as_ref()
4456 .map(language::proto::serialize_transaction),
4457 })
4458 }
4459
4460 async fn handle_get_code_actions(
4461 this: ModelHandle<Self>,
4462 envelope: TypedEnvelope<proto::GetCodeActions>,
4463 _: Arc<Client>,
4464 mut cx: AsyncAppContext,
4465 ) -> Result<proto::GetCodeActionsResponse> {
4466 let start = envelope
4467 .payload
4468 .start
4469 .and_then(language::proto::deserialize_anchor)
4470 .ok_or_else(|| anyhow!("invalid start"))?;
4471 let end = envelope
4472 .payload
4473 .end
4474 .and_then(language::proto::deserialize_anchor)
4475 .ok_or_else(|| anyhow!("invalid end"))?;
4476 let buffer = this.update(&mut cx, |this, cx| {
4477 this.opened_buffers
4478 .get(&envelope.payload.buffer_id)
4479 .and_then(|buffer| buffer.upgrade(cx))
4480 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4481 })?;
4482 buffer
4483 .update(&mut cx, |buffer, _| {
4484 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4485 })
4486 .await;
4487
4488 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4489 let code_actions = this.update(&mut cx, |this, cx| {
4490 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4491 })?;
4492
4493 Ok(proto::GetCodeActionsResponse {
4494 actions: code_actions
4495 .await?
4496 .iter()
4497 .map(language::proto::serialize_code_action)
4498 .collect(),
4499 version: serialize_version(&version),
4500 })
4501 }
4502
4503 async fn handle_apply_code_action(
4504 this: ModelHandle<Self>,
4505 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4506 _: Arc<Client>,
4507 mut cx: AsyncAppContext,
4508 ) -> Result<proto::ApplyCodeActionResponse> {
4509 let sender_id = envelope.original_sender_id()?;
4510 let action = language::proto::deserialize_code_action(
4511 envelope
4512 .payload
4513 .action
4514 .ok_or_else(|| anyhow!("invalid action"))?,
4515 )?;
4516 let apply_code_action = this.update(&mut cx, |this, cx| {
4517 let buffer = this
4518 .opened_buffers
4519 .get(&envelope.payload.buffer_id)
4520 .and_then(|buffer| buffer.upgrade(cx))
4521 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4522 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4523 })?;
4524
4525 let project_transaction = apply_code_action.await?;
4526 let project_transaction = this.update(&mut cx, |this, cx| {
4527 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4528 });
4529 Ok(proto::ApplyCodeActionResponse {
4530 transaction: Some(project_transaction),
4531 })
4532 }
4533
4534 async fn handle_lsp_command<T: LspCommand>(
4535 this: ModelHandle<Self>,
4536 envelope: TypedEnvelope<T::ProtoRequest>,
4537 _: Arc<Client>,
4538 mut cx: AsyncAppContext,
4539 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4540 where
4541 <T::LspRequest as lsp::request::Request>::Result: Send,
4542 {
4543 let sender_id = envelope.original_sender_id()?;
4544 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4545 let buffer_handle = this.read_with(&cx, |this, _| {
4546 this.opened_buffers
4547 .get(&buffer_id)
4548 .and_then(|buffer| buffer.upgrade(&cx))
4549 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4550 })?;
4551 let request = T::from_proto(
4552 envelope.payload,
4553 this.clone(),
4554 buffer_handle.clone(),
4555 cx.clone(),
4556 )
4557 .await?;
4558 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4559 let response = this
4560 .update(&mut cx, |this, cx| {
4561 this.request_lsp(buffer_handle, request, cx)
4562 })
4563 .await?;
4564 this.update(&mut cx, |this, cx| {
4565 Ok(T::response_to_proto(
4566 response,
4567 this,
4568 sender_id,
4569 &buffer_version,
4570 cx,
4571 ))
4572 })
4573 }
4574
4575 async fn handle_get_project_symbols(
4576 this: ModelHandle<Self>,
4577 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4578 _: Arc<Client>,
4579 mut cx: AsyncAppContext,
4580 ) -> Result<proto::GetProjectSymbolsResponse> {
4581 let symbols = this
4582 .update(&mut cx, |this, cx| {
4583 this.symbols(&envelope.payload.query, cx)
4584 })
4585 .await?;
4586
4587 Ok(proto::GetProjectSymbolsResponse {
4588 symbols: symbols.iter().map(serialize_symbol).collect(),
4589 })
4590 }
4591
4592 async fn handle_search_project(
4593 this: ModelHandle<Self>,
4594 envelope: TypedEnvelope<proto::SearchProject>,
4595 _: Arc<Client>,
4596 mut cx: AsyncAppContext,
4597 ) -> Result<proto::SearchProjectResponse> {
4598 let peer_id = envelope.original_sender_id()?;
4599 let query = SearchQuery::from_proto(envelope.payload)?;
4600 let result = this
4601 .update(&mut cx, |this, cx| this.search(query, cx))
4602 .await?;
4603
4604 this.update(&mut cx, |this, cx| {
4605 let mut locations = Vec::new();
4606 for (buffer, ranges) in result {
4607 for range in ranges {
4608 let start = serialize_anchor(&range.start);
4609 let end = serialize_anchor(&range.end);
4610 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4611 locations.push(proto::Location {
4612 buffer: Some(buffer),
4613 start: Some(start),
4614 end: Some(end),
4615 });
4616 }
4617 }
4618 Ok(proto::SearchProjectResponse { locations })
4619 })
4620 }
4621
4622 async fn handle_open_buffer_for_symbol(
4623 this: ModelHandle<Self>,
4624 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4625 _: Arc<Client>,
4626 mut cx: AsyncAppContext,
4627 ) -> Result<proto::OpenBufferForSymbolResponse> {
4628 let peer_id = envelope.original_sender_id()?;
4629 let symbol = envelope
4630 .payload
4631 .symbol
4632 .ok_or_else(|| anyhow!("invalid symbol"))?;
4633 let symbol = this.read_with(&cx, |this, _| {
4634 let symbol = this.deserialize_symbol(symbol)?;
4635 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4636 if signature == symbol.signature {
4637 Ok(symbol)
4638 } else {
4639 Err(anyhow!("invalid symbol signature"))
4640 }
4641 })?;
4642 let buffer = this
4643 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4644 .await?;
4645
4646 Ok(proto::OpenBufferForSymbolResponse {
4647 buffer: Some(this.update(&mut cx, |this, cx| {
4648 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4649 })),
4650 })
4651 }
4652
4653 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4654 let mut hasher = Sha256::new();
4655 hasher.update(worktree_id.to_proto().to_be_bytes());
4656 hasher.update(path.to_string_lossy().as_bytes());
4657 hasher.update(self.nonce.to_be_bytes());
4658 hasher.finalize().as_slice().try_into().unwrap()
4659 }
4660
4661 async fn handle_open_buffer_by_id(
4662 this: ModelHandle<Self>,
4663 envelope: TypedEnvelope<proto::OpenBufferById>,
4664 _: Arc<Client>,
4665 mut cx: AsyncAppContext,
4666 ) -> Result<proto::OpenBufferResponse> {
4667 let peer_id = envelope.original_sender_id()?;
4668 let buffer = this
4669 .update(&mut cx, |this, cx| {
4670 this.open_buffer_by_id(envelope.payload.id, cx)
4671 })
4672 .await?;
4673 this.update(&mut cx, |this, cx| {
4674 Ok(proto::OpenBufferResponse {
4675 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4676 })
4677 })
4678 }
4679
4680 async fn handle_open_buffer_by_path(
4681 this: ModelHandle<Self>,
4682 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4683 _: Arc<Client>,
4684 mut cx: AsyncAppContext,
4685 ) -> Result<proto::OpenBufferResponse> {
4686 let peer_id = envelope.original_sender_id()?;
4687 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4688 let open_buffer = this.update(&mut cx, |this, cx| {
4689 this.open_buffer(
4690 ProjectPath {
4691 worktree_id,
4692 path: PathBuf::from(envelope.payload.path).into(),
4693 },
4694 cx,
4695 )
4696 });
4697
4698 let buffer = open_buffer.await?;
4699 this.update(&mut cx, |this, cx| {
4700 Ok(proto::OpenBufferResponse {
4701 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4702 })
4703 })
4704 }
4705
4706 fn serialize_project_transaction_for_peer(
4707 &mut self,
4708 project_transaction: ProjectTransaction,
4709 peer_id: PeerId,
4710 cx: &AppContext,
4711 ) -> proto::ProjectTransaction {
4712 let mut serialized_transaction = proto::ProjectTransaction {
4713 buffers: Default::default(),
4714 transactions: Default::default(),
4715 };
4716 for (buffer, transaction) in project_transaction.0 {
4717 serialized_transaction
4718 .buffers
4719 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4720 serialized_transaction
4721 .transactions
4722 .push(language::proto::serialize_transaction(&transaction));
4723 }
4724 serialized_transaction
4725 }
4726
4727 fn deserialize_project_transaction(
4728 &mut self,
4729 message: proto::ProjectTransaction,
4730 push_to_history: bool,
4731 cx: &mut ModelContext<Self>,
4732 ) -> Task<Result<ProjectTransaction>> {
4733 cx.spawn(|this, mut cx| async move {
4734 let mut project_transaction = ProjectTransaction::default();
4735 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4736 let buffer = this
4737 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4738 .await?;
4739 let transaction = language::proto::deserialize_transaction(transaction)?;
4740 project_transaction.0.insert(buffer, transaction);
4741 }
4742
4743 for (buffer, transaction) in &project_transaction.0 {
4744 buffer
4745 .update(&mut cx, |buffer, _| {
4746 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4747 })
4748 .await;
4749
4750 if push_to_history {
4751 buffer.update(&mut cx, |buffer, _| {
4752 buffer.push_transaction(transaction.clone(), Instant::now());
4753 });
4754 }
4755 }
4756
4757 Ok(project_transaction)
4758 })
4759 }
4760
4761 fn serialize_buffer_for_peer(
4762 &mut self,
4763 buffer: &ModelHandle<Buffer>,
4764 peer_id: PeerId,
4765 cx: &AppContext,
4766 ) -> proto::Buffer {
4767 let buffer_id = buffer.read(cx).remote_id();
4768 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4769 if shared_buffers.insert(buffer_id) {
4770 proto::Buffer {
4771 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4772 }
4773 } else {
4774 proto::Buffer {
4775 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4776 }
4777 }
4778 }
4779
4780 fn deserialize_buffer(
4781 &mut self,
4782 buffer: proto::Buffer,
4783 cx: &mut ModelContext<Self>,
4784 ) -> Task<Result<ModelHandle<Buffer>>> {
4785 let replica_id = self.replica_id();
4786
4787 let opened_buffer_tx = self.opened_buffer.0.clone();
4788 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4789 cx.spawn(|this, mut cx| async move {
4790 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4791 proto::buffer::Variant::Id(id) => {
4792 let buffer = loop {
4793 let buffer = this.read_with(&cx, |this, cx| {
4794 this.opened_buffers
4795 .get(&id)
4796 .and_then(|buffer| buffer.upgrade(cx))
4797 });
4798 if let Some(buffer) = buffer {
4799 break buffer;
4800 }
4801 opened_buffer_rx
4802 .next()
4803 .await
4804 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4805 };
4806 Ok(buffer)
4807 }
4808 proto::buffer::Variant::State(mut buffer) => {
4809 let mut buffer_worktree = None;
4810 let mut buffer_file = None;
4811 if let Some(file) = buffer.file.take() {
4812 this.read_with(&cx, |this, cx| {
4813 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4814 let worktree =
4815 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4816 anyhow!("no worktree found for id {}", file.worktree_id)
4817 })?;
4818 buffer_file =
4819 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4820 as Box<dyn language::File>);
4821 buffer_worktree = Some(worktree);
4822 Ok::<_, anyhow::Error>(())
4823 })?;
4824 }
4825
4826 let buffer = cx.add_model(|cx| {
4827 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4828 });
4829
4830 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4831
4832 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4833 Ok(buffer)
4834 }
4835 }
4836 })
4837 }
4838
4839 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4840 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4841 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4842 let start = serialized_symbol
4843 .start
4844 .ok_or_else(|| anyhow!("invalid start"))?;
4845 let end = serialized_symbol
4846 .end
4847 .ok_or_else(|| anyhow!("invalid end"))?;
4848 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4849 let path = PathBuf::from(serialized_symbol.path);
4850 let language = self.languages.select_language(&path);
4851 Ok(Symbol {
4852 source_worktree_id,
4853 worktree_id,
4854 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4855 label: language
4856 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4857 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4858 name: serialized_symbol.name,
4859 path,
4860 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4861 kind,
4862 signature: serialized_symbol
4863 .signature
4864 .try_into()
4865 .map_err(|_| anyhow!("invalid signature"))?,
4866 })
4867 }
4868
4869 async fn handle_buffer_saved(
4870 this: ModelHandle<Self>,
4871 envelope: TypedEnvelope<proto::BufferSaved>,
4872 _: Arc<Client>,
4873 mut cx: AsyncAppContext,
4874 ) -> Result<()> {
4875 let version = deserialize_version(envelope.payload.version);
4876 let mtime = envelope
4877 .payload
4878 .mtime
4879 .ok_or_else(|| anyhow!("missing mtime"))?
4880 .into();
4881
4882 this.update(&mut cx, |this, cx| {
4883 let buffer = this
4884 .opened_buffers
4885 .get(&envelope.payload.buffer_id)
4886 .and_then(|buffer| buffer.upgrade(cx));
4887 if let Some(buffer) = buffer {
4888 buffer.update(cx, |buffer, cx| {
4889 buffer.did_save(version, mtime, None, cx);
4890 });
4891 }
4892 Ok(())
4893 })
4894 }
4895
4896 async fn handle_buffer_reloaded(
4897 this: ModelHandle<Self>,
4898 envelope: TypedEnvelope<proto::BufferReloaded>,
4899 _: Arc<Client>,
4900 mut cx: AsyncAppContext,
4901 ) -> Result<()> {
4902 let payload = envelope.payload.clone();
4903 let version = deserialize_version(payload.version);
4904 let mtime = payload
4905 .mtime
4906 .ok_or_else(|| anyhow!("missing mtime"))?
4907 .into();
4908 this.update(&mut cx, |this, cx| {
4909 let buffer = this
4910 .opened_buffers
4911 .get(&payload.buffer_id)
4912 .and_then(|buffer| buffer.upgrade(cx));
4913 if let Some(buffer) = buffer {
4914 buffer.update(cx, |buffer, cx| {
4915 buffer.did_reload(version, mtime, cx);
4916 });
4917 }
4918 Ok(())
4919 })
4920 }
4921
4922 pub fn match_paths<'a>(
4923 &self,
4924 query: &'a str,
4925 include_ignored: bool,
4926 smart_case: bool,
4927 max_results: usize,
4928 cancel_flag: &'a AtomicBool,
4929 cx: &AppContext,
4930 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4931 let worktrees = self
4932 .worktrees(cx)
4933 .filter(|worktree| worktree.read(cx).is_visible())
4934 .collect::<Vec<_>>();
4935 let include_root_name = worktrees.len() > 1;
4936 let candidate_sets = worktrees
4937 .into_iter()
4938 .map(|worktree| CandidateSet {
4939 snapshot: worktree.read(cx).snapshot(),
4940 include_ignored,
4941 include_root_name,
4942 })
4943 .collect::<Vec<_>>();
4944
4945 let background = cx.background().clone();
4946 async move {
4947 fuzzy::match_paths(
4948 candidate_sets.as_slice(),
4949 query,
4950 smart_case,
4951 max_results,
4952 cancel_flag,
4953 background,
4954 )
4955 .await
4956 }
4957 }
4958
4959 fn edits_from_lsp(
4960 &mut self,
4961 buffer: &ModelHandle<Buffer>,
4962 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4963 version: Option<i32>,
4964 cx: &mut ModelContext<Self>,
4965 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4966 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4967 cx.background().spawn(async move {
4968 let snapshot = snapshot?;
4969 let mut lsp_edits = lsp_edits
4970 .into_iter()
4971 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4972 .peekable();
4973
4974 let mut edits = Vec::new();
4975 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4976 // Combine any LSP edits that are adjacent.
4977 //
4978 // Also, combine LSP edits that are separated from each other by only
4979 // a newline. This is important because for some code actions,
4980 // Rust-analyzer rewrites the entire buffer via a series of edits that
4981 // are separated by unchanged newline characters.
4982 //
4983 // In order for the diffing logic below to work properly, any edits that
4984 // cancel each other out must be combined into one.
4985 while let Some((next_range, next_text)) = lsp_edits.peek() {
4986 if next_range.start > range.end {
4987 if next_range.start.row > range.end.row + 1
4988 || next_range.start.column > 0
4989 || snapshot.clip_point_utf16(
4990 PointUtf16::new(range.end.row, u32::MAX),
4991 Bias::Left,
4992 ) > range.end
4993 {
4994 break;
4995 }
4996 new_text.push('\n');
4997 }
4998 range.end = next_range.end;
4999 new_text.push_str(&next_text);
5000 lsp_edits.next();
5001 }
5002
5003 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5004 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5005 {
5006 return Err(anyhow!("invalid edits received from language server"));
5007 }
5008
5009 // For multiline edits, perform a diff of the old and new text so that
5010 // we can identify the changes more precisely, preserving the locations
5011 // of any anchors positioned in the unchanged regions.
5012 if range.end.row > range.start.row {
5013 let mut offset = range.start.to_offset(&snapshot);
5014 let old_text = snapshot.text_for_range(range).collect::<String>();
5015
5016 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5017 let mut moved_since_edit = true;
5018 for change in diff.iter_all_changes() {
5019 let tag = change.tag();
5020 let value = change.value();
5021 match tag {
5022 ChangeTag::Equal => {
5023 offset += value.len();
5024 moved_since_edit = true;
5025 }
5026 ChangeTag::Delete => {
5027 let start = snapshot.anchor_after(offset);
5028 let end = snapshot.anchor_before(offset + value.len());
5029 if moved_since_edit {
5030 edits.push((start..end, String::new()));
5031 } else {
5032 edits.last_mut().unwrap().0.end = end;
5033 }
5034 offset += value.len();
5035 moved_since_edit = false;
5036 }
5037 ChangeTag::Insert => {
5038 if moved_since_edit {
5039 let anchor = snapshot.anchor_after(offset);
5040 edits.push((anchor.clone()..anchor, value.to_string()));
5041 } else {
5042 edits.last_mut().unwrap().1.push_str(value);
5043 }
5044 moved_since_edit = false;
5045 }
5046 }
5047 }
5048 } else if range.end == range.start {
5049 let anchor = snapshot.anchor_after(range.start);
5050 edits.push((anchor.clone()..anchor, new_text));
5051 } else {
5052 let edit_start = snapshot.anchor_after(range.start);
5053 let edit_end = snapshot.anchor_before(range.end);
5054 edits.push((edit_start..edit_end, new_text));
5055 }
5056 }
5057
5058 Ok(edits)
5059 })
5060 }
5061
5062 fn buffer_snapshot_for_lsp_version(
5063 &mut self,
5064 buffer: &ModelHandle<Buffer>,
5065 version: Option<i32>,
5066 cx: &AppContext,
5067 ) -> Result<TextBufferSnapshot> {
5068 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5069
5070 if let Some(version) = version {
5071 let buffer_id = buffer.read(cx).remote_id();
5072 let snapshots = self
5073 .buffer_snapshots
5074 .get_mut(&buffer_id)
5075 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5076 let mut found_snapshot = None;
5077 snapshots.retain(|(snapshot_version, snapshot)| {
5078 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5079 false
5080 } else {
5081 if *snapshot_version == version {
5082 found_snapshot = Some(snapshot.clone());
5083 }
5084 true
5085 }
5086 });
5087
5088 found_snapshot.ok_or_else(|| {
5089 anyhow!(
5090 "snapshot not found for buffer {} at version {}",
5091 buffer_id,
5092 version
5093 )
5094 })
5095 } else {
5096 Ok((buffer.read(cx)).text_snapshot())
5097 }
5098 }
5099
5100 fn language_server_for_buffer(
5101 &self,
5102 buffer: &Buffer,
5103 cx: &AppContext,
5104 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5105 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5106 let worktree_id = file.worktree_id(cx);
5107 self.language_servers
5108 .get(&(worktree_id, language.lsp_adapter()?.name()))
5109 } else {
5110 None
5111 }
5112 }
5113}
5114
5115impl WorktreeHandle {
5116 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5117 match self {
5118 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5119 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5120 }
5121 }
5122}
5123
5124impl OpenBuffer {
5125 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5126 match self {
5127 OpenBuffer::Strong(handle) => Some(handle.clone()),
5128 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5129 OpenBuffer::Loading(_) => None,
5130 }
5131 }
5132}
5133
5134struct CandidateSet {
5135 snapshot: Snapshot,
5136 include_ignored: bool,
5137 include_root_name: bool,
5138}
5139
5140impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5141 type Candidates = CandidateSetIter<'a>;
5142
5143 fn id(&self) -> usize {
5144 self.snapshot.id().to_usize()
5145 }
5146
5147 fn len(&self) -> usize {
5148 if self.include_ignored {
5149 self.snapshot.file_count()
5150 } else {
5151 self.snapshot.visible_file_count()
5152 }
5153 }
5154
5155 fn prefix(&self) -> Arc<str> {
5156 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5157 self.snapshot.root_name().into()
5158 } else if self.include_root_name {
5159 format!("{}/", self.snapshot.root_name()).into()
5160 } else {
5161 "".into()
5162 }
5163 }
5164
5165 fn candidates(&'a self, start: usize) -> Self::Candidates {
5166 CandidateSetIter {
5167 traversal: self.snapshot.files(self.include_ignored, start),
5168 }
5169 }
5170}
5171
5172struct CandidateSetIter<'a> {
5173 traversal: Traversal<'a>,
5174}
5175
5176impl<'a> Iterator for CandidateSetIter<'a> {
5177 type Item = PathMatchCandidate<'a>;
5178
5179 fn next(&mut self) -> Option<Self::Item> {
5180 self.traversal.next().map(|entry| {
5181 if let EntryKind::File(char_bag) = entry.kind {
5182 PathMatchCandidate {
5183 path: &entry.path,
5184 char_bag,
5185 }
5186 } else {
5187 unreachable!()
5188 }
5189 })
5190 }
5191}
5192
5193impl Entity for Project {
5194 type Event = Event;
5195
5196 fn release(&mut self, _: &mut gpui::MutableAppContext) {
5197 match &self.client_state {
5198 ProjectClientState::Local { remote_id_rx, .. } => {
5199 if let Some(project_id) = *remote_id_rx.borrow() {
5200 self.client
5201 .send(proto::UnregisterProject { project_id })
5202 .log_err();
5203 }
5204 }
5205 ProjectClientState::Remote { remote_id, .. } => {
5206 self.client
5207 .send(proto::LeaveProject {
5208 project_id: *remote_id,
5209 })
5210 .log_err();
5211 }
5212 }
5213 }
5214
5215 fn app_will_quit(
5216 &mut self,
5217 _: &mut MutableAppContext,
5218 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5219 let shutdown_futures = self
5220 .language_servers
5221 .drain()
5222 .filter_map(|(_, (_, server))| server.shutdown())
5223 .collect::<Vec<_>>();
5224 Some(
5225 async move {
5226 futures::future::join_all(shutdown_futures).await;
5227 }
5228 .boxed(),
5229 )
5230 }
5231}
5232
5233impl Collaborator {
5234 fn from_proto(
5235 message: proto::Collaborator,
5236 user_store: &ModelHandle<UserStore>,
5237 cx: &mut AsyncAppContext,
5238 ) -> impl Future<Output = Result<Self>> {
5239 let user = user_store.update(cx, |user_store, cx| {
5240 user_store.fetch_user(message.user_id, cx)
5241 });
5242
5243 async move {
5244 Ok(Self {
5245 peer_id: PeerId(message.peer_id),
5246 user: user.await?,
5247 replica_id: message.replica_id as ReplicaId,
5248 })
5249 }
5250 }
5251}
5252
5253impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5254 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5255 Self {
5256 worktree_id,
5257 path: path.as_ref().into(),
5258 }
5259 }
5260}
5261
5262impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5263 fn from(options: lsp::CreateFileOptions) -> Self {
5264 Self {
5265 overwrite: options.overwrite.unwrap_or(false),
5266 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5267 }
5268 }
5269}
5270
5271impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5272 fn from(options: lsp::RenameFileOptions) -> Self {
5273 Self {
5274 overwrite: options.overwrite.unwrap_or(false),
5275 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5276 }
5277 }
5278}
5279
5280impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5281 fn from(options: lsp::DeleteFileOptions) -> Self {
5282 Self {
5283 recursive: options.recursive.unwrap_or(false),
5284 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5285 }
5286 }
5287}
5288
5289fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5290 proto::Symbol {
5291 source_worktree_id: symbol.source_worktree_id.to_proto(),
5292 worktree_id: symbol.worktree_id.to_proto(),
5293 language_server_name: symbol.language_server_name.0.to_string(),
5294 name: symbol.name.clone(),
5295 kind: unsafe { mem::transmute(symbol.kind) },
5296 path: symbol.path.to_string_lossy().to_string(),
5297 start: Some(proto::Point {
5298 row: symbol.range.start.row,
5299 column: symbol.range.start.column,
5300 }),
5301 end: Some(proto::Point {
5302 row: symbol.range.end.row,
5303 column: symbol.range.end.column,
5304 }),
5305 signature: symbol.signature.to_vec(),
5306 }
5307}
5308
5309fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5310 let mut path_components = path.components();
5311 let mut base_components = base.components();
5312 let mut components: Vec<Component> = Vec::new();
5313 loop {
5314 match (path_components.next(), base_components.next()) {
5315 (None, None) => break,
5316 (Some(a), None) => {
5317 components.push(a);
5318 components.extend(path_components.by_ref());
5319 break;
5320 }
5321 (None, _) => components.push(Component::ParentDir),
5322 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5323 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5324 (Some(a), Some(_)) => {
5325 components.push(Component::ParentDir);
5326 for _ in base_components {
5327 components.push(Component::ParentDir);
5328 }
5329 components.push(a);
5330 components.extend(path_components.by_ref());
5331 break;
5332 }
5333 }
5334 }
5335 components.iter().map(|c| c.as_os_str()).collect()
5336}
5337
5338impl Item for Buffer {
5339 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5340 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5341 }
5342}
5343
5344#[cfg(test)]
5345mod tests {
5346 use crate::worktree::WorktreeHandle;
5347
5348 use super::{Event, *};
5349 use fs::RealFs;
5350 use futures::{future, StreamExt};
5351 use gpui::test::subscribe;
5352 use language::{
5353 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5354 OffsetRangeExt, Point, ToPoint,
5355 };
5356 use lsp::Url;
5357 use serde_json::json;
5358 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5359 use unindent::Unindent as _;
5360 use util::{assert_set_eq, test::temp_tree};
5361
5362 #[gpui::test]
5363 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5364 let dir = temp_tree(json!({
5365 "root": {
5366 "apple": "",
5367 "banana": {
5368 "carrot": {
5369 "date": "",
5370 "endive": "",
5371 }
5372 },
5373 "fennel": {
5374 "grape": "",
5375 }
5376 }
5377 }));
5378
5379 let root_link_path = dir.path().join("root_link");
5380 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5381 unix::fs::symlink(
5382 &dir.path().join("root/fennel"),
5383 &dir.path().join("root/finnochio"),
5384 )
5385 .unwrap();
5386
5387 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5388
5389 project.read_with(cx, |project, cx| {
5390 let tree = project.worktrees(cx).next().unwrap().read(cx);
5391 assert_eq!(tree.file_count(), 5);
5392 assert_eq!(
5393 tree.inode_for_path("fennel/grape"),
5394 tree.inode_for_path("finnochio/grape")
5395 );
5396 });
5397
5398 let cancel_flag = Default::default();
5399 let results = project
5400 .read_with(cx, |project, cx| {
5401 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5402 })
5403 .await;
5404 assert_eq!(
5405 results
5406 .into_iter()
5407 .map(|result| result.path)
5408 .collect::<Vec<Arc<Path>>>(),
5409 vec![
5410 PathBuf::from("banana/carrot/date").into(),
5411 PathBuf::from("banana/carrot/endive").into(),
5412 ]
5413 );
5414 }
5415
5416 #[gpui::test]
5417 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5418 cx.foreground().forbid_parking();
5419
5420 let mut rust_language = Language::new(
5421 LanguageConfig {
5422 name: "Rust".into(),
5423 path_suffixes: vec!["rs".to_string()],
5424 ..Default::default()
5425 },
5426 Some(tree_sitter_rust::language()),
5427 );
5428 let mut json_language = Language::new(
5429 LanguageConfig {
5430 name: "JSON".into(),
5431 path_suffixes: vec!["json".to_string()],
5432 ..Default::default()
5433 },
5434 None,
5435 );
5436 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5437 name: "the-rust-language-server",
5438 capabilities: lsp::ServerCapabilities {
5439 completion_provider: Some(lsp::CompletionOptions {
5440 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5441 ..Default::default()
5442 }),
5443 ..Default::default()
5444 },
5445 ..Default::default()
5446 });
5447 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5448 name: "the-json-language-server",
5449 capabilities: lsp::ServerCapabilities {
5450 completion_provider: Some(lsp::CompletionOptions {
5451 trigger_characters: Some(vec![":".to_string()]),
5452 ..Default::default()
5453 }),
5454 ..Default::default()
5455 },
5456 ..Default::default()
5457 });
5458
5459 let fs = FakeFs::new(cx.background());
5460 fs.insert_tree(
5461 "/the-root",
5462 json!({
5463 "test.rs": "const A: i32 = 1;",
5464 "test2.rs": "",
5465 "Cargo.toml": "a = 1",
5466 "package.json": "{\"a\": 1}",
5467 }),
5468 )
5469 .await;
5470
5471 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5472 project.update(cx, |project, _| {
5473 project.languages.add(Arc::new(rust_language));
5474 project.languages.add(Arc::new(json_language));
5475 });
5476
5477 // Open a buffer without an associated language server.
5478 let toml_buffer = project
5479 .update(cx, |project, cx| {
5480 project.open_local_buffer("/the-root/Cargo.toml", cx)
5481 })
5482 .await
5483 .unwrap();
5484
5485 // Open a buffer with an associated language server.
5486 let rust_buffer = project
5487 .update(cx, |project, cx| {
5488 project.open_local_buffer("/the-root/test.rs", cx)
5489 })
5490 .await
5491 .unwrap();
5492
5493 // A server is started up, and it is notified about Rust files.
5494 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5495 assert_eq!(
5496 fake_rust_server
5497 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5498 .await
5499 .text_document,
5500 lsp::TextDocumentItem {
5501 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5502 version: 0,
5503 text: "const A: i32 = 1;".to_string(),
5504 language_id: Default::default()
5505 }
5506 );
5507
5508 // The buffer is configured based on the language server's capabilities.
5509 rust_buffer.read_with(cx, |buffer, _| {
5510 assert_eq!(
5511 buffer.completion_triggers(),
5512 &[".".to_string(), "::".to_string()]
5513 );
5514 });
5515 toml_buffer.read_with(cx, |buffer, _| {
5516 assert!(buffer.completion_triggers().is_empty());
5517 });
5518
5519 // Edit a buffer. The changes are reported to the language server.
5520 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5521 assert_eq!(
5522 fake_rust_server
5523 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5524 .await
5525 .text_document,
5526 lsp::VersionedTextDocumentIdentifier::new(
5527 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5528 1
5529 )
5530 );
5531
5532 // Open a third buffer with a different associated language server.
5533 let json_buffer = project
5534 .update(cx, |project, cx| {
5535 project.open_local_buffer("/the-root/package.json", cx)
5536 })
5537 .await
5538 .unwrap();
5539
5540 // A json language server is started up and is only notified about the json buffer.
5541 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5542 assert_eq!(
5543 fake_json_server
5544 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5545 .await
5546 .text_document,
5547 lsp::TextDocumentItem {
5548 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5549 version: 0,
5550 text: "{\"a\": 1}".to_string(),
5551 language_id: Default::default()
5552 }
5553 );
5554
5555 // This buffer is configured based on the second language server's
5556 // capabilities.
5557 json_buffer.read_with(cx, |buffer, _| {
5558 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5559 });
5560
5561 // When opening another buffer whose language server is already running,
5562 // it is also configured based on the existing language server's capabilities.
5563 let rust_buffer2 = project
5564 .update(cx, |project, cx| {
5565 project.open_local_buffer("/the-root/test2.rs", cx)
5566 })
5567 .await
5568 .unwrap();
5569 rust_buffer2.read_with(cx, |buffer, _| {
5570 assert_eq!(
5571 buffer.completion_triggers(),
5572 &[".".to_string(), "::".to_string()]
5573 );
5574 });
5575
5576 // Changes are reported only to servers matching the buffer's language.
5577 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5578 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5579 assert_eq!(
5580 fake_rust_server
5581 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5582 .await
5583 .text_document,
5584 lsp::VersionedTextDocumentIdentifier::new(
5585 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5586 1
5587 )
5588 );
5589
5590 // Save notifications are reported to all servers.
5591 toml_buffer
5592 .update(cx, |buffer, cx| buffer.save(cx))
5593 .await
5594 .unwrap();
5595 assert_eq!(
5596 fake_rust_server
5597 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5598 .await
5599 .text_document,
5600 lsp::TextDocumentIdentifier::new(
5601 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5602 )
5603 );
5604 assert_eq!(
5605 fake_json_server
5606 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5607 .await
5608 .text_document,
5609 lsp::TextDocumentIdentifier::new(
5610 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5611 )
5612 );
5613
5614 // Renames are reported only to servers matching the buffer's language.
5615 fs.rename(
5616 Path::new("/the-root/test2.rs"),
5617 Path::new("/the-root/test3.rs"),
5618 Default::default(),
5619 )
5620 .await
5621 .unwrap();
5622 assert_eq!(
5623 fake_rust_server
5624 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5625 .await
5626 .text_document,
5627 lsp::TextDocumentIdentifier::new(
5628 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5629 ),
5630 );
5631 assert_eq!(
5632 fake_rust_server
5633 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5634 .await
5635 .text_document,
5636 lsp::TextDocumentItem {
5637 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5638 version: 0,
5639 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5640 language_id: Default::default()
5641 },
5642 );
5643
5644 rust_buffer2.update(cx, |buffer, cx| {
5645 buffer.update_diagnostics(
5646 DiagnosticSet::from_sorted_entries(
5647 vec![DiagnosticEntry {
5648 diagnostic: Default::default(),
5649 range: Anchor::MIN..Anchor::MAX,
5650 }],
5651 &buffer.snapshot(),
5652 ),
5653 cx,
5654 );
5655 assert_eq!(
5656 buffer
5657 .snapshot()
5658 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5659 .count(),
5660 1
5661 );
5662 });
5663
5664 // When the rename changes the extension of the file, the buffer gets closed on the old
5665 // language server and gets opened on the new one.
5666 fs.rename(
5667 Path::new("/the-root/test3.rs"),
5668 Path::new("/the-root/test3.json"),
5669 Default::default(),
5670 )
5671 .await
5672 .unwrap();
5673 assert_eq!(
5674 fake_rust_server
5675 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5676 .await
5677 .text_document,
5678 lsp::TextDocumentIdentifier::new(
5679 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5680 ),
5681 );
5682 assert_eq!(
5683 fake_json_server
5684 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5685 .await
5686 .text_document,
5687 lsp::TextDocumentItem {
5688 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5689 version: 0,
5690 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5691 language_id: Default::default()
5692 },
5693 );
5694
5695 // We clear the diagnostics, since the language has changed.
5696 rust_buffer2.read_with(cx, |buffer, _| {
5697 assert_eq!(
5698 buffer
5699 .snapshot()
5700 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5701 .count(),
5702 0
5703 );
5704 });
5705
5706 // The renamed file's version resets after changing language server.
5707 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5708 assert_eq!(
5709 fake_json_server
5710 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5711 .await
5712 .text_document,
5713 lsp::VersionedTextDocumentIdentifier::new(
5714 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5715 1
5716 )
5717 );
5718
5719 // Restart language servers
5720 project.update(cx, |project, cx| {
5721 project.restart_language_servers_for_buffers(
5722 vec![rust_buffer.clone(), json_buffer.clone()],
5723 cx,
5724 );
5725 });
5726
5727 let mut rust_shutdown_requests = fake_rust_server
5728 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5729 let mut json_shutdown_requests = fake_json_server
5730 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5731 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5732
5733 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5734 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5735
5736 // Ensure rust document is reopened in new rust language server
5737 assert_eq!(
5738 fake_rust_server
5739 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5740 .await
5741 .text_document,
5742 lsp::TextDocumentItem {
5743 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5744 version: 1,
5745 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5746 language_id: Default::default()
5747 }
5748 );
5749
5750 // Ensure json documents are reopened in new json language server
5751 assert_set_eq!(
5752 [
5753 fake_json_server
5754 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5755 .await
5756 .text_document,
5757 fake_json_server
5758 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5759 .await
5760 .text_document,
5761 ],
5762 [
5763 lsp::TextDocumentItem {
5764 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5765 version: 0,
5766 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5767 language_id: Default::default()
5768 },
5769 lsp::TextDocumentItem {
5770 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5771 version: 1,
5772 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5773 language_id: Default::default()
5774 }
5775 ]
5776 );
5777
5778 // Close notifications are reported only to servers matching the buffer's language.
5779 cx.update(|_| drop(json_buffer));
5780 let close_message = lsp::DidCloseTextDocumentParams {
5781 text_document: lsp::TextDocumentIdentifier::new(
5782 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5783 ),
5784 };
5785 assert_eq!(
5786 fake_json_server
5787 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5788 .await,
5789 close_message,
5790 );
5791 }
5792
5793 #[gpui::test]
5794 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5795 cx.foreground().forbid_parking();
5796
5797 let fs = FakeFs::new(cx.background());
5798 fs.insert_tree(
5799 "/dir",
5800 json!({
5801 "a.rs": "let a = 1;",
5802 "b.rs": "let b = 2;"
5803 }),
5804 )
5805 .await;
5806
5807 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
5808
5809 let buffer_a = project
5810 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5811 .await
5812 .unwrap();
5813 let buffer_b = project
5814 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5815 .await
5816 .unwrap();
5817
5818 project.update(cx, |project, cx| {
5819 project
5820 .update_diagnostics(
5821 lsp::PublishDiagnosticsParams {
5822 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5823 version: None,
5824 diagnostics: vec![lsp::Diagnostic {
5825 range: lsp::Range::new(
5826 lsp::Position::new(0, 4),
5827 lsp::Position::new(0, 5),
5828 ),
5829 severity: Some(lsp::DiagnosticSeverity::ERROR),
5830 message: "error 1".to_string(),
5831 ..Default::default()
5832 }],
5833 },
5834 &[],
5835 cx,
5836 )
5837 .unwrap();
5838 project
5839 .update_diagnostics(
5840 lsp::PublishDiagnosticsParams {
5841 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5842 version: None,
5843 diagnostics: vec![lsp::Diagnostic {
5844 range: lsp::Range::new(
5845 lsp::Position::new(0, 4),
5846 lsp::Position::new(0, 5),
5847 ),
5848 severity: Some(lsp::DiagnosticSeverity::WARNING),
5849 message: "error 2".to_string(),
5850 ..Default::default()
5851 }],
5852 },
5853 &[],
5854 cx,
5855 )
5856 .unwrap();
5857 });
5858
5859 buffer_a.read_with(cx, |buffer, _| {
5860 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5861 assert_eq!(
5862 chunks
5863 .iter()
5864 .map(|(s, d)| (s.as_str(), *d))
5865 .collect::<Vec<_>>(),
5866 &[
5867 ("let ", None),
5868 ("a", Some(DiagnosticSeverity::ERROR)),
5869 (" = 1;", None),
5870 ]
5871 );
5872 });
5873 buffer_b.read_with(cx, |buffer, _| {
5874 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5875 assert_eq!(
5876 chunks
5877 .iter()
5878 .map(|(s, d)| (s.as_str(), *d))
5879 .collect::<Vec<_>>(),
5880 &[
5881 ("let ", None),
5882 ("b", Some(DiagnosticSeverity::WARNING)),
5883 (" = 2;", None),
5884 ]
5885 );
5886 });
5887 }
5888
5889 #[gpui::test]
5890 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5891 cx.foreground().forbid_parking();
5892
5893 let progress_token = "the-progress-token";
5894 let mut language = Language::new(
5895 LanguageConfig {
5896 name: "Rust".into(),
5897 path_suffixes: vec!["rs".to_string()],
5898 ..Default::default()
5899 },
5900 Some(tree_sitter_rust::language()),
5901 );
5902 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5903 disk_based_diagnostics_progress_token: Some(progress_token),
5904 disk_based_diagnostics_sources: &["disk"],
5905 ..Default::default()
5906 });
5907
5908 let fs = FakeFs::new(cx.background());
5909 fs.insert_tree(
5910 "/dir",
5911 json!({
5912 "a.rs": "fn a() { A }",
5913 "b.rs": "const y: i32 = 1",
5914 }),
5915 )
5916 .await;
5917
5918 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5919 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5920 let worktree_id =
5921 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5922
5923 // Cause worktree to start the fake language server
5924 let _buffer = project
5925 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5926 .await
5927 .unwrap();
5928
5929 let mut events = subscribe(&project, cx);
5930
5931 let mut fake_server = fake_servers.next().await.unwrap();
5932 fake_server.start_progress(progress_token).await;
5933 assert_eq!(
5934 events.next().await.unwrap(),
5935 Event::DiskBasedDiagnosticsStarted
5936 );
5937
5938 fake_server.start_progress(progress_token).await;
5939 fake_server.end_progress(progress_token).await;
5940 fake_server.start_progress(progress_token).await;
5941
5942 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5943 lsp::PublishDiagnosticsParams {
5944 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5945 version: None,
5946 diagnostics: vec![lsp::Diagnostic {
5947 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5948 severity: Some(lsp::DiagnosticSeverity::ERROR),
5949 message: "undefined variable 'A'".to_string(),
5950 ..Default::default()
5951 }],
5952 },
5953 );
5954 assert_eq!(
5955 events.next().await.unwrap(),
5956 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5957 );
5958
5959 fake_server.end_progress(progress_token).await;
5960 fake_server.end_progress(progress_token).await;
5961 assert_eq!(
5962 events.next().await.unwrap(),
5963 Event::DiskBasedDiagnosticsUpdated
5964 );
5965 assert_eq!(
5966 events.next().await.unwrap(),
5967 Event::DiskBasedDiagnosticsFinished
5968 );
5969
5970 let buffer = project
5971 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5972 .await
5973 .unwrap();
5974
5975 buffer.read_with(cx, |buffer, _| {
5976 let snapshot = buffer.snapshot();
5977 let diagnostics = snapshot
5978 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5979 .collect::<Vec<_>>();
5980 assert_eq!(
5981 diagnostics,
5982 &[DiagnosticEntry {
5983 range: Point::new(0, 9)..Point::new(0, 10),
5984 diagnostic: Diagnostic {
5985 severity: lsp::DiagnosticSeverity::ERROR,
5986 message: "undefined variable 'A'".to_string(),
5987 group_id: 0,
5988 is_primary: true,
5989 ..Default::default()
5990 }
5991 }]
5992 )
5993 });
5994
5995 // Ensure publishing empty diagnostics twice only results in one update event.
5996 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5997 lsp::PublishDiagnosticsParams {
5998 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5999 version: None,
6000 diagnostics: Default::default(),
6001 },
6002 );
6003 assert_eq!(
6004 events.next().await.unwrap(),
6005 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6006 );
6007
6008 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6009 lsp::PublishDiagnosticsParams {
6010 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6011 version: None,
6012 diagnostics: Default::default(),
6013 },
6014 );
6015 cx.foreground().run_until_parked();
6016 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6017 }
6018
6019 #[gpui::test]
6020 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6021 cx.foreground().forbid_parking();
6022
6023 let progress_token = "the-progress-token";
6024 let mut language = Language::new(
6025 LanguageConfig {
6026 path_suffixes: vec!["rs".to_string()],
6027 ..Default::default()
6028 },
6029 None,
6030 );
6031 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6032 disk_based_diagnostics_sources: &["disk"],
6033 disk_based_diagnostics_progress_token: Some(progress_token),
6034 ..Default::default()
6035 });
6036
6037 let fs = FakeFs::new(cx.background());
6038 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6039
6040 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6041 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6042
6043 let buffer = project
6044 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6045 .await
6046 .unwrap();
6047
6048 // Simulate diagnostics starting to update.
6049 let mut fake_server = fake_servers.next().await.unwrap();
6050 fake_server.start_progress(progress_token).await;
6051
6052 // Restart the server before the diagnostics finish updating.
6053 project.update(cx, |project, cx| {
6054 project.restart_language_servers_for_buffers([buffer], cx);
6055 });
6056 let mut events = subscribe(&project, cx);
6057
6058 // Simulate the newly started server sending more diagnostics.
6059 let mut fake_server = fake_servers.next().await.unwrap();
6060 fake_server.start_progress(progress_token).await;
6061 assert_eq!(
6062 events.next().await.unwrap(),
6063 Event::DiskBasedDiagnosticsStarted
6064 );
6065
6066 // All diagnostics are considered done, despite the old server's diagnostic
6067 // task never completing.
6068 fake_server.end_progress(progress_token).await;
6069 assert_eq!(
6070 events.next().await.unwrap(),
6071 Event::DiskBasedDiagnosticsUpdated
6072 );
6073 assert_eq!(
6074 events.next().await.unwrap(),
6075 Event::DiskBasedDiagnosticsFinished
6076 );
6077 project.read_with(cx, |project, _| {
6078 assert!(!project.is_running_disk_based_diagnostics());
6079 });
6080 }
6081
6082 #[gpui::test]
6083 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6084 cx.foreground().forbid_parking();
6085
6086 let mut language = Language::new(
6087 LanguageConfig {
6088 name: "Rust".into(),
6089 path_suffixes: vec!["rs".to_string()],
6090 ..Default::default()
6091 },
6092 Some(tree_sitter_rust::language()),
6093 );
6094 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6095 disk_based_diagnostics_sources: &["disk"],
6096 ..Default::default()
6097 });
6098
6099 let text = "
6100 fn a() { A }
6101 fn b() { BB }
6102 fn c() { CCC }
6103 "
6104 .unindent();
6105
6106 let fs = FakeFs::new(cx.background());
6107 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6108
6109 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6110 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6111
6112 let buffer = project
6113 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6114 .await
6115 .unwrap();
6116
6117 let mut fake_server = fake_servers.next().await.unwrap();
6118 let open_notification = fake_server
6119 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6120 .await;
6121
6122 // Edit the buffer, moving the content down
6123 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6124 let change_notification_1 = fake_server
6125 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6126 .await;
6127 assert!(
6128 change_notification_1.text_document.version > open_notification.text_document.version
6129 );
6130
6131 // Report some diagnostics for the initial version of the buffer
6132 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6133 lsp::PublishDiagnosticsParams {
6134 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6135 version: Some(open_notification.text_document.version),
6136 diagnostics: vec![
6137 lsp::Diagnostic {
6138 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6139 severity: Some(DiagnosticSeverity::ERROR),
6140 message: "undefined variable 'A'".to_string(),
6141 source: Some("disk".to_string()),
6142 ..Default::default()
6143 },
6144 lsp::Diagnostic {
6145 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6146 severity: Some(DiagnosticSeverity::ERROR),
6147 message: "undefined variable 'BB'".to_string(),
6148 source: Some("disk".to_string()),
6149 ..Default::default()
6150 },
6151 lsp::Diagnostic {
6152 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6153 severity: Some(DiagnosticSeverity::ERROR),
6154 source: Some("disk".to_string()),
6155 message: "undefined variable 'CCC'".to_string(),
6156 ..Default::default()
6157 },
6158 ],
6159 },
6160 );
6161
6162 // The diagnostics have moved down since they were created.
6163 buffer.next_notification(cx).await;
6164 buffer.read_with(cx, |buffer, _| {
6165 assert_eq!(
6166 buffer
6167 .snapshot()
6168 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6169 .collect::<Vec<_>>(),
6170 &[
6171 DiagnosticEntry {
6172 range: Point::new(3, 9)..Point::new(3, 11),
6173 diagnostic: Diagnostic {
6174 severity: DiagnosticSeverity::ERROR,
6175 message: "undefined variable 'BB'".to_string(),
6176 is_disk_based: true,
6177 group_id: 1,
6178 is_primary: true,
6179 ..Default::default()
6180 },
6181 },
6182 DiagnosticEntry {
6183 range: Point::new(4, 9)..Point::new(4, 12),
6184 diagnostic: Diagnostic {
6185 severity: DiagnosticSeverity::ERROR,
6186 message: "undefined variable 'CCC'".to_string(),
6187 is_disk_based: true,
6188 group_id: 2,
6189 is_primary: true,
6190 ..Default::default()
6191 }
6192 }
6193 ]
6194 );
6195 assert_eq!(
6196 chunks_with_diagnostics(buffer, 0..buffer.len()),
6197 [
6198 ("\n\nfn a() { ".to_string(), None),
6199 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6200 (" }\nfn b() { ".to_string(), None),
6201 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6202 (" }\nfn c() { ".to_string(), None),
6203 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6204 (" }\n".to_string(), None),
6205 ]
6206 );
6207 assert_eq!(
6208 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6209 [
6210 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6211 (" }\nfn c() { ".to_string(), None),
6212 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6213 ]
6214 );
6215 });
6216
6217 // Ensure overlapping diagnostics are highlighted correctly.
6218 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6219 lsp::PublishDiagnosticsParams {
6220 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6221 version: Some(open_notification.text_document.version),
6222 diagnostics: vec![
6223 lsp::Diagnostic {
6224 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6225 severity: Some(DiagnosticSeverity::ERROR),
6226 message: "undefined variable 'A'".to_string(),
6227 source: Some("disk".to_string()),
6228 ..Default::default()
6229 },
6230 lsp::Diagnostic {
6231 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6232 severity: Some(DiagnosticSeverity::WARNING),
6233 message: "unreachable statement".to_string(),
6234 source: Some("disk".to_string()),
6235 ..Default::default()
6236 },
6237 ],
6238 },
6239 );
6240
6241 buffer.next_notification(cx).await;
6242 buffer.read_with(cx, |buffer, _| {
6243 assert_eq!(
6244 buffer
6245 .snapshot()
6246 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6247 .collect::<Vec<_>>(),
6248 &[
6249 DiagnosticEntry {
6250 range: Point::new(2, 9)..Point::new(2, 12),
6251 diagnostic: Diagnostic {
6252 severity: DiagnosticSeverity::WARNING,
6253 message: "unreachable statement".to_string(),
6254 is_disk_based: true,
6255 group_id: 1,
6256 is_primary: true,
6257 ..Default::default()
6258 }
6259 },
6260 DiagnosticEntry {
6261 range: Point::new(2, 9)..Point::new(2, 10),
6262 diagnostic: Diagnostic {
6263 severity: DiagnosticSeverity::ERROR,
6264 message: "undefined variable 'A'".to_string(),
6265 is_disk_based: true,
6266 group_id: 0,
6267 is_primary: true,
6268 ..Default::default()
6269 },
6270 }
6271 ]
6272 );
6273 assert_eq!(
6274 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6275 [
6276 ("fn a() { ".to_string(), None),
6277 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6278 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6279 ("\n".to_string(), None),
6280 ]
6281 );
6282 assert_eq!(
6283 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6284 [
6285 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6286 ("\n".to_string(), None),
6287 ]
6288 );
6289 });
6290
6291 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6292 // changes since the last save.
6293 buffer.update(cx, |buffer, cx| {
6294 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6295 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6296 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6297 });
6298 let change_notification_2 = fake_server
6299 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6300 .await;
6301 assert!(
6302 change_notification_2.text_document.version
6303 > change_notification_1.text_document.version
6304 );
6305
6306 // Handle out-of-order diagnostics
6307 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6308 lsp::PublishDiagnosticsParams {
6309 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6310 version: Some(change_notification_2.text_document.version),
6311 diagnostics: vec![
6312 lsp::Diagnostic {
6313 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6314 severity: Some(DiagnosticSeverity::ERROR),
6315 message: "undefined variable 'BB'".to_string(),
6316 source: Some("disk".to_string()),
6317 ..Default::default()
6318 },
6319 lsp::Diagnostic {
6320 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6321 severity: Some(DiagnosticSeverity::WARNING),
6322 message: "undefined variable 'A'".to_string(),
6323 source: Some("disk".to_string()),
6324 ..Default::default()
6325 },
6326 ],
6327 },
6328 );
6329
6330 buffer.next_notification(cx).await;
6331 buffer.read_with(cx, |buffer, _| {
6332 assert_eq!(
6333 buffer
6334 .snapshot()
6335 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6336 .collect::<Vec<_>>(),
6337 &[
6338 DiagnosticEntry {
6339 range: Point::new(2, 21)..Point::new(2, 22),
6340 diagnostic: Diagnostic {
6341 severity: DiagnosticSeverity::WARNING,
6342 message: "undefined variable 'A'".to_string(),
6343 is_disk_based: true,
6344 group_id: 1,
6345 is_primary: true,
6346 ..Default::default()
6347 }
6348 },
6349 DiagnosticEntry {
6350 range: Point::new(3, 9)..Point::new(3, 14),
6351 diagnostic: Diagnostic {
6352 severity: DiagnosticSeverity::ERROR,
6353 message: "undefined variable 'BB'".to_string(),
6354 is_disk_based: true,
6355 group_id: 0,
6356 is_primary: true,
6357 ..Default::default()
6358 },
6359 }
6360 ]
6361 );
6362 });
6363 }
6364
6365 #[gpui::test]
6366 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6367 cx.foreground().forbid_parking();
6368
6369 let text = concat!(
6370 "let one = ;\n", //
6371 "let two = \n",
6372 "let three = 3;\n",
6373 );
6374
6375 let fs = FakeFs::new(cx.background());
6376 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6377
6378 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6379 let buffer = project
6380 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6381 .await
6382 .unwrap();
6383
6384 project.update(cx, |project, cx| {
6385 project
6386 .update_buffer_diagnostics(
6387 &buffer,
6388 vec![
6389 DiagnosticEntry {
6390 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6391 diagnostic: Diagnostic {
6392 severity: DiagnosticSeverity::ERROR,
6393 message: "syntax error 1".to_string(),
6394 ..Default::default()
6395 },
6396 },
6397 DiagnosticEntry {
6398 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6399 diagnostic: Diagnostic {
6400 severity: DiagnosticSeverity::ERROR,
6401 message: "syntax error 2".to_string(),
6402 ..Default::default()
6403 },
6404 },
6405 ],
6406 None,
6407 cx,
6408 )
6409 .unwrap();
6410 });
6411
6412 // An empty range is extended forward to include the following character.
6413 // At the end of a line, an empty range is extended backward to include
6414 // the preceding character.
6415 buffer.read_with(cx, |buffer, _| {
6416 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6417 assert_eq!(
6418 chunks
6419 .iter()
6420 .map(|(s, d)| (s.as_str(), *d))
6421 .collect::<Vec<_>>(),
6422 &[
6423 ("let one = ", None),
6424 (";", Some(DiagnosticSeverity::ERROR)),
6425 ("\nlet two =", None),
6426 (" ", Some(DiagnosticSeverity::ERROR)),
6427 ("\nlet three = 3;\n", None)
6428 ]
6429 );
6430 });
6431 }
6432
6433 #[gpui::test]
6434 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6435 cx.foreground().forbid_parking();
6436
6437 let mut language = Language::new(
6438 LanguageConfig {
6439 name: "Rust".into(),
6440 path_suffixes: vec!["rs".to_string()],
6441 ..Default::default()
6442 },
6443 Some(tree_sitter_rust::language()),
6444 );
6445 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6446
6447 let text = "
6448 fn a() {
6449 f1();
6450 }
6451 fn b() {
6452 f2();
6453 }
6454 fn c() {
6455 f3();
6456 }
6457 "
6458 .unindent();
6459
6460 let fs = FakeFs::new(cx.background());
6461 fs.insert_tree(
6462 "/dir",
6463 json!({
6464 "a.rs": text.clone(),
6465 }),
6466 )
6467 .await;
6468
6469 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6470 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6471 let buffer = project
6472 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6473 .await
6474 .unwrap();
6475
6476 let mut fake_server = fake_servers.next().await.unwrap();
6477 let lsp_document_version = fake_server
6478 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6479 .await
6480 .text_document
6481 .version;
6482
6483 // Simulate editing the buffer after the language server computes some edits.
6484 buffer.update(cx, |buffer, cx| {
6485 buffer.edit(
6486 [(
6487 Point::new(0, 0)..Point::new(0, 0),
6488 "// above first function\n",
6489 )],
6490 cx,
6491 );
6492 buffer.edit(
6493 [(
6494 Point::new(2, 0)..Point::new(2, 0),
6495 " // inside first function\n",
6496 )],
6497 cx,
6498 );
6499 buffer.edit(
6500 [(
6501 Point::new(6, 4)..Point::new(6, 4),
6502 "// inside second function ",
6503 )],
6504 cx,
6505 );
6506
6507 assert_eq!(
6508 buffer.text(),
6509 "
6510 // above first function
6511 fn a() {
6512 // inside first function
6513 f1();
6514 }
6515 fn b() {
6516 // inside second function f2();
6517 }
6518 fn c() {
6519 f3();
6520 }
6521 "
6522 .unindent()
6523 );
6524 });
6525
6526 let edits = project
6527 .update(cx, |project, cx| {
6528 project.edits_from_lsp(
6529 &buffer,
6530 vec![
6531 // replace body of first function
6532 lsp::TextEdit {
6533 range: lsp::Range::new(
6534 lsp::Position::new(0, 0),
6535 lsp::Position::new(3, 0),
6536 ),
6537 new_text: "
6538 fn a() {
6539 f10();
6540 }
6541 "
6542 .unindent(),
6543 },
6544 // edit inside second function
6545 lsp::TextEdit {
6546 range: lsp::Range::new(
6547 lsp::Position::new(4, 6),
6548 lsp::Position::new(4, 6),
6549 ),
6550 new_text: "00".into(),
6551 },
6552 // edit inside third function via two distinct edits
6553 lsp::TextEdit {
6554 range: lsp::Range::new(
6555 lsp::Position::new(7, 5),
6556 lsp::Position::new(7, 5),
6557 ),
6558 new_text: "4000".into(),
6559 },
6560 lsp::TextEdit {
6561 range: lsp::Range::new(
6562 lsp::Position::new(7, 5),
6563 lsp::Position::new(7, 6),
6564 ),
6565 new_text: "".into(),
6566 },
6567 ],
6568 Some(lsp_document_version),
6569 cx,
6570 )
6571 })
6572 .await
6573 .unwrap();
6574
6575 buffer.update(cx, |buffer, cx| {
6576 for (range, new_text) in edits {
6577 buffer.edit([(range, new_text)], cx);
6578 }
6579 assert_eq!(
6580 buffer.text(),
6581 "
6582 // above first function
6583 fn a() {
6584 // inside first function
6585 f10();
6586 }
6587 fn b() {
6588 // inside second function f200();
6589 }
6590 fn c() {
6591 f4000();
6592 }
6593 "
6594 .unindent()
6595 );
6596 });
6597 }
6598
6599 #[gpui::test]
6600 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6601 cx.foreground().forbid_parking();
6602
6603 let text = "
6604 use a::b;
6605 use a::c;
6606
6607 fn f() {
6608 b();
6609 c();
6610 }
6611 "
6612 .unindent();
6613
6614 let fs = FakeFs::new(cx.background());
6615 fs.insert_tree(
6616 "/dir",
6617 json!({
6618 "a.rs": text.clone(),
6619 }),
6620 )
6621 .await;
6622
6623 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6624 let buffer = project
6625 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6626 .await
6627 .unwrap();
6628
6629 // Simulate the language server sending us a small edit in the form of a very large diff.
6630 // Rust-analyzer does this when performing a merge-imports code action.
6631 let edits = project
6632 .update(cx, |project, cx| {
6633 project.edits_from_lsp(
6634 &buffer,
6635 [
6636 // Replace the first use statement without editing the semicolon.
6637 lsp::TextEdit {
6638 range: lsp::Range::new(
6639 lsp::Position::new(0, 4),
6640 lsp::Position::new(0, 8),
6641 ),
6642 new_text: "a::{b, c}".into(),
6643 },
6644 // Reinsert the remainder of the file between the semicolon and the final
6645 // newline of the file.
6646 lsp::TextEdit {
6647 range: lsp::Range::new(
6648 lsp::Position::new(0, 9),
6649 lsp::Position::new(0, 9),
6650 ),
6651 new_text: "\n\n".into(),
6652 },
6653 lsp::TextEdit {
6654 range: lsp::Range::new(
6655 lsp::Position::new(0, 9),
6656 lsp::Position::new(0, 9),
6657 ),
6658 new_text: "
6659 fn f() {
6660 b();
6661 c();
6662 }"
6663 .unindent(),
6664 },
6665 // Delete everything after the first newline of the file.
6666 lsp::TextEdit {
6667 range: lsp::Range::new(
6668 lsp::Position::new(1, 0),
6669 lsp::Position::new(7, 0),
6670 ),
6671 new_text: "".into(),
6672 },
6673 ],
6674 None,
6675 cx,
6676 )
6677 })
6678 .await
6679 .unwrap();
6680
6681 buffer.update(cx, |buffer, cx| {
6682 let edits = edits
6683 .into_iter()
6684 .map(|(range, text)| {
6685 (
6686 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6687 text,
6688 )
6689 })
6690 .collect::<Vec<_>>();
6691
6692 assert_eq!(
6693 edits,
6694 [
6695 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6696 (Point::new(1, 0)..Point::new(2, 0), "".into())
6697 ]
6698 );
6699
6700 for (range, new_text) in edits {
6701 buffer.edit([(range, new_text)], cx);
6702 }
6703 assert_eq!(
6704 buffer.text(),
6705 "
6706 use a::{b, c};
6707
6708 fn f() {
6709 b();
6710 c();
6711 }
6712 "
6713 .unindent()
6714 );
6715 });
6716 }
6717
6718 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6719 buffer: &Buffer,
6720 range: Range<T>,
6721 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6722 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6723 for chunk in buffer.snapshot().chunks(range, true) {
6724 if chunks.last().map_or(false, |prev_chunk| {
6725 prev_chunk.1 == chunk.diagnostic_severity
6726 }) {
6727 chunks.last_mut().unwrap().0.push_str(chunk.text);
6728 } else {
6729 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6730 }
6731 }
6732 chunks
6733 }
6734
6735 #[gpui::test]
6736 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6737 let dir = temp_tree(json!({
6738 "root": {
6739 "dir1": {},
6740 "dir2": {
6741 "dir3": {}
6742 }
6743 }
6744 }));
6745
6746 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6747 let cancel_flag = Default::default();
6748 let results = project
6749 .read_with(cx, |project, cx| {
6750 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6751 })
6752 .await;
6753
6754 assert!(results.is_empty());
6755 }
6756
6757 #[gpui::test(iterations = 10)]
6758 async fn test_definition(cx: &mut gpui::TestAppContext) {
6759 let mut language = Language::new(
6760 LanguageConfig {
6761 name: "Rust".into(),
6762 path_suffixes: vec!["rs".to_string()],
6763 ..Default::default()
6764 },
6765 Some(tree_sitter_rust::language()),
6766 );
6767 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6768
6769 let fs = FakeFs::new(cx.background());
6770 fs.insert_tree(
6771 "/dir",
6772 json!({
6773 "a.rs": "const fn a() { A }",
6774 "b.rs": "const y: i32 = crate::a()",
6775 }),
6776 )
6777 .await;
6778
6779 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
6780 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6781
6782 let buffer = project
6783 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6784 .await
6785 .unwrap();
6786
6787 let fake_server = fake_servers.next().await.unwrap();
6788 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6789 let params = params.text_document_position_params;
6790 assert_eq!(
6791 params.text_document.uri.to_file_path().unwrap(),
6792 Path::new("/dir/b.rs"),
6793 );
6794 assert_eq!(params.position, lsp::Position::new(0, 22));
6795
6796 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6797 lsp::Location::new(
6798 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6799 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6800 ),
6801 )))
6802 });
6803
6804 let mut definitions = project
6805 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6806 .await
6807 .unwrap();
6808
6809 assert_eq!(definitions.len(), 1);
6810 let definition = definitions.pop().unwrap();
6811 cx.update(|cx| {
6812 let target_buffer = definition.buffer.read(cx);
6813 assert_eq!(
6814 target_buffer
6815 .file()
6816 .unwrap()
6817 .as_local()
6818 .unwrap()
6819 .abs_path(cx),
6820 Path::new("/dir/a.rs"),
6821 );
6822 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6823 assert_eq!(
6824 list_worktrees(&project, cx),
6825 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6826 );
6827
6828 drop(definition);
6829 });
6830 cx.read(|cx| {
6831 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6832 });
6833
6834 fn list_worktrees<'a>(
6835 project: &'a ModelHandle<Project>,
6836 cx: &'a AppContext,
6837 ) -> Vec<(&'a Path, bool)> {
6838 project
6839 .read(cx)
6840 .worktrees(cx)
6841 .map(|worktree| {
6842 let worktree = worktree.read(cx);
6843 (
6844 worktree.as_local().unwrap().abs_path().as_ref(),
6845 worktree.is_visible(),
6846 )
6847 })
6848 .collect::<Vec<_>>()
6849 }
6850 }
6851
6852 #[gpui::test]
6853 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6854 let mut language = Language::new(
6855 LanguageConfig {
6856 name: "TypeScript".into(),
6857 path_suffixes: vec!["ts".to_string()],
6858 ..Default::default()
6859 },
6860 Some(tree_sitter_typescript::language_typescript()),
6861 );
6862 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6863
6864 let fs = FakeFs::new(cx.background());
6865 fs.insert_tree(
6866 "/dir",
6867 json!({
6868 "a.ts": "",
6869 }),
6870 )
6871 .await;
6872
6873 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6874 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6875 let buffer = project
6876 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6877 .await
6878 .unwrap();
6879
6880 let fake_server = fake_language_servers.next().await.unwrap();
6881
6882 let text = "let a = b.fqn";
6883 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6884 let completions = project.update(cx, |project, cx| {
6885 project.completions(&buffer, text.len(), cx)
6886 });
6887
6888 fake_server
6889 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6890 Ok(Some(lsp::CompletionResponse::Array(vec![
6891 lsp::CompletionItem {
6892 label: "fullyQualifiedName?".into(),
6893 insert_text: Some("fullyQualifiedName".into()),
6894 ..Default::default()
6895 },
6896 ])))
6897 })
6898 .next()
6899 .await;
6900 let completions = completions.await.unwrap();
6901 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6902 assert_eq!(completions.len(), 1);
6903 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6904 assert_eq!(
6905 completions[0].old_range.to_offset(&snapshot),
6906 text.len() - 3..text.len()
6907 );
6908 }
6909
6910 #[gpui::test(iterations = 10)]
6911 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6912 let mut language = Language::new(
6913 LanguageConfig {
6914 name: "TypeScript".into(),
6915 path_suffixes: vec!["ts".to_string()],
6916 ..Default::default()
6917 },
6918 None,
6919 );
6920 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6921
6922 let fs = FakeFs::new(cx.background());
6923 fs.insert_tree(
6924 "/dir",
6925 json!({
6926 "a.ts": "a",
6927 }),
6928 )
6929 .await;
6930
6931 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6932 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6933 let buffer = project
6934 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6935 .await
6936 .unwrap();
6937
6938 let fake_server = fake_language_servers.next().await.unwrap();
6939
6940 // Language server returns code actions that contain commands, and not edits.
6941 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6942 fake_server
6943 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6944 Ok(Some(vec![
6945 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6946 title: "The code action".into(),
6947 command: Some(lsp::Command {
6948 title: "The command".into(),
6949 command: "_the/command".into(),
6950 arguments: Some(vec![json!("the-argument")]),
6951 }),
6952 ..Default::default()
6953 }),
6954 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6955 title: "two".into(),
6956 ..Default::default()
6957 }),
6958 ]))
6959 })
6960 .next()
6961 .await;
6962
6963 let action = actions.await.unwrap()[0].clone();
6964 let apply = project.update(cx, |project, cx| {
6965 project.apply_code_action(buffer.clone(), action, true, cx)
6966 });
6967
6968 // Resolving the code action does not populate its edits. In absence of
6969 // edits, we must execute the given command.
6970 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6971 |action, _| async move { Ok(action) },
6972 );
6973
6974 // While executing the command, the language server sends the editor
6975 // a `workspaceEdit` request.
6976 fake_server
6977 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6978 let fake = fake_server.clone();
6979 move |params, _| {
6980 assert_eq!(params.command, "_the/command");
6981 let fake = fake.clone();
6982 async move {
6983 fake.server
6984 .request::<lsp::request::ApplyWorkspaceEdit>(
6985 lsp::ApplyWorkspaceEditParams {
6986 label: None,
6987 edit: lsp::WorkspaceEdit {
6988 changes: Some(
6989 [(
6990 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6991 vec![lsp::TextEdit {
6992 range: lsp::Range::new(
6993 lsp::Position::new(0, 0),
6994 lsp::Position::new(0, 0),
6995 ),
6996 new_text: "X".into(),
6997 }],
6998 )]
6999 .into_iter()
7000 .collect(),
7001 ),
7002 ..Default::default()
7003 },
7004 },
7005 )
7006 .await
7007 .unwrap();
7008 Ok(Some(json!(null)))
7009 }
7010 }
7011 })
7012 .next()
7013 .await;
7014
7015 // Applying the code action returns a project transaction containing the edits
7016 // sent by the language server in its `workspaceEdit` request.
7017 let transaction = apply.await.unwrap();
7018 assert!(transaction.0.contains_key(&buffer));
7019 buffer.update(cx, |buffer, cx| {
7020 assert_eq!(buffer.text(), "Xa");
7021 buffer.undo(cx);
7022 assert_eq!(buffer.text(), "a");
7023 });
7024 }
7025
7026 #[gpui::test]
7027 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7028 let fs = FakeFs::new(cx.background());
7029 fs.insert_tree(
7030 "/dir",
7031 json!({
7032 "file1": "the old contents",
7033 }),
7034 )
7035 .await;
7036
7037 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7038 let buffer = project
7039 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7040 .await
7041 .unwrap();
7042 buffer
7043 .update(cx, |buffer, cx| {
7044 assert_eq!(buffer.text(), "the old contents");
7045 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7046 buffer.save(cx)
7047 })
7048 .await
7049 .unwrap();
7050
7051 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7052 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7053 }
7054
7055 #[gpui::test]
7056 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7057 let fs = FakeFs::new(cx.background());
7058 fs.insert_tree(
7059 "/dir",
7060 json!({
7061 "file1": "the old contents",
7062 }),
7063 )
7064 .await;
7065
7066 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7067 let buffer = project
7068 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7069 .await
7070 .unwrap();
7071 buffer
7072 .update(cx, |buffer, cx| {
7073 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7074 buffer.save(cx)
7075 })
7076 .await
7077 .unwrap();
7078
7079 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7080 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7081 }
7082
7083 #[gpui::test]
7084 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7085 let fs = FakeFs::new(cx.background());
7086 fs.insert_tree("/dir", json!({})).await;
7087
7088 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7089 let buffer = project.update(cx, |project, cx| {
7090 project.create_buffer("", None, cx).unwrap()
7091 });
7092 buffer.update(cx, |buffer, cx| {
7093 buffer.edit([(0..0, "abc")], cx);
7094 assert!(buffer.is_dirty());
7095 assert!(!buffer.has_conflict());
7096 });
7097 project
7098 .update(cx, |project, cx| {
7099 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7100 })
7101 .await
7102 .unwrap();
7103 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7104 buffer.read_with(cx, |buffer, cx| {
7105 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7106 assert!(!buffer.is_dirty());
7107 assert!(!buffer.has_conflict());
7108 });
7109
7110 let opened_buffer = project
7111 .update(cx, |project, cx| {
7112 project.open_local_buffer("/dir/file1", cx)
7113 })
7114 .await
7115 .unwrap();
7116 assert_eq!(opened_buffer, buffer);
7117 }
7118
7119 #[gpui::test(retries = 5)]
7120 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7121 let dir = temp_tree(json!({
7122 "a": {
7123 "file1": "",
7124 "file2": "",
7125 "file3": "",
7126 },
7127 "b": {
7128 "c": {
7129 "file4": "",
7130 "file5": "",
7131 }
7132 }
7133 }));
7134
7135 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7136 let rpc = project.read_with(cx, |p, _| p.client.clone());
7137
7138 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7139 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7140 async move { buffer.await.unwrap() }
7141 };
7142 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7143 project.read_with(cx, |project, cx| {
7144 let tree = project.worktrees(cx).next().unwrap();
7145 tree.read(cx)
7146 .entry_for_path(path)
7147 .expect(&format!("no entry for path {}", path))
7148 .id
7149 })
7150 };
7151
7152 let buffer2 = buffer_for_path("a/file2", cx).await;
7153 let buffer3 = buffer_for_path("a/file3", cx).await;
7154 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7155 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7156
7157 let file2_id = id_for_path("a/file2", &cx);
7158 let file3_id = id_for_path("a/file3", &cx);
7159 let file4_id = id_for_path("b/c/file4", &cx);
7160
7161 // Create a remote copy of this worktree.
7162 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7163 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7164 let (remote, load_task) = cx.update(|cx| {
7165 Worktree::remote(
7166 1,
7167 1,
7168 initial_snapshot.to_proto(&Default::default(), true),
7169 rpc.clone(),
7170 cx,
7171 )
7172 });
7173 // tree
7174 load_task.await;
7175
7176 cx.read(|cx| {
7177 assert!(!buffer2.read(cx).is_dirty());
7178 assert!(!buffer3.read(cx).is_dirty());
7179 assert!(!buffer4.read(cx).is_dirty());
7180 assert!(!buffer5.read(cx).is_dirty());
7181 });
7182
7183 // Rename and delete files and directories.
7184 tree.flush_fs_events(&cx).await;
7185 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7186 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7187 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7188 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7189 tree.flush_fs_events(&cx).await;
7190
7191 let expected_paths = vec![
7192 "a",
7193 "a/file1",
7194 "a/file2.new",
7195 "b",
7196 "d",
7197 "d/file3",
7198 "d/file4",
7199 ];
7200
7201 cx.read(|app| {
7202 assert_eq!(
7203 tree.read(app)
7204 .paths()
7205 .map(|p| p.to_str().unwrap())
7206 .collect::<Vec<_>>(),
7207 expected_paths
7208 );
7209
7210 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7211 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7212 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7213
7214 assert_eq!(
7215 buffer2.read(app).file().unwrap().path().as_ref(),
7216 Path::new("a/file2.new")
7217 );
7218 assert_eq!(
7219 buffer3.read(app).file().unwrap().path().as_ref(),
7220 Path::new("d/file3")
7221 );
7222 assert_eq!(
7223 buffer4.read(app).file().unwrap().path().as_ref(),
7224 Path::new("d/file4")
7225 );
7226 assert_eq!(
7227 buffer5.read(app).file().unwrap().path().as_ref(),
7228 Path::new("b/c/file5")
7229 );
7230
7231 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7232 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7233 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7234 assert!(buffer5.read(app).file().unwrap().is_deleted());
7235 });
7236
7237 // Update the remote worktree. Check that it becomes consistent with the
7238 // local worktree.
7239 remote.update(cx, |remote, cx| {
7240 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7241 &initial_snapshot,
7242 1,
7243 1,
7244 true,
7245 );
7246 remote
7247 .as_remote_mut()
7248 .unwrap()
7249 .snapshot
7250 .apply_remote_update(update_message)
7251 .unwrap();
7252
7253 assert_eq!(
7254 remote
7255 .paths()
7256 .map(|p| p.to_str().unwrap())
7257 .collect::<Vec<_>>(),
7258 expected_paths
7259 );
7260 });
7261 }
7262
7263 #[gpui::test]
7264 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7265 let fs = FakeFs::new(cx.background());
7266 fs.insert_tree(
7267 "/dir",
7268 json!({
7269 "a.txt": "a-contents",
7270 "b.txt": "b-contents",
7271 }),
7272 )
7273 .await;
7274
7275 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7276
7277 // Spawn multiple tasks to open paths, repeating some paths.
7278 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7279 (
7280 p.open_local_buffer("/dir/a.txt", cx),
7281 p.open_local_buffer("/dir/b.txt", cx),
7282 p.open_local_buffer("/dir/a.txt", cx),
7283 )
7284 });
7285
7286 let buffer_a_1 = buffer_a_1.await.unwrap();
7287 let buffer_a_2 = buffer_a_2.await.unwrap();
7288 let buffer_b = buffer_b.await.unwrap();
7289 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7290 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7291
7292 // There is only one buffer per path.
7293 let buffer_a_id = buffer_a_1.id();
7294 assert_eq!(buffer_a_2.id(), buffer_a_id);
7295
7296 // Open the same path again while it is still open.
7297 drop(buffer_a_1);
7298 let buffer_a_3 = project
7299 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7300 .await
7301 .unwrap();
7302
7303 // There's still only one buffer per path.
7304 assert_eq!(buffer_a_3.id(), buffer_a_id);
7305 }
7306
7307 #[gpui::test]
7308 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7309 let fs = FakeFs::new(cx.background());
7310 fs.insert_tree(
7311 "/dir",
7312 json!({
7313 "file1": "abc",
7314 "file2": "def",
7315 "file3": "ghi",
7316 }),
7317 )
7318 .await;
7319
7320 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7321
7322 let buffer1 = project
7323 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7324 .await
7325 .unwrap();
7326 let events = Rc::new(RefCell::new(Vec::new()));
7327
7328 // initially, the buffer isn't dirty.
7329 buffer1.update(cx, |buffer, cx| {
7330 cx.subscribe(&buffer1, {
7331 let events = events.clone();
7332 move |_, _, event, _| match event {
7333 BufferEvent::Operation(_) => {}
7334 _ => events.borrow_mut().push(event.clone()),
7335 }
7336 })
7337 .detach();
7338
7339 assert!(!buffer.is_dirty());
7340 assert!(events.borrow().is_empty());
7341
7342 buffer.edit([(1..2, "")], cx);
7343 });
7344
7345 // after the first edit, the buffer is dirty, and emits a dirtied event.
7346 buffer1.update(cx, |buffer, cx| {
7347 assert!(buffer.text() == "ac");
7348 assert!(buffer.is_dirty());
7349 assert_eq!(
7350 *events.borrow(),
7351 &[language::Event::Edited, language::Event::Dirtied]
7352 );
7353 events.borrow_mut().clear();
7354 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7355 });
7356
7357 // after saving, the buffer is not dirty, and emits a saved event.
7358 buffer1.update(cx, |buffer, cx| {
7359 assert!(!buffer.is_dirty());
7360 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7361 events.borrow_mut().clear();
7362
7363 buffer.edit([(1..1, "B")], cx);
7364 buffer.edit([(2..2, "D")], cx);
7365 });
7366
7367 // after editing again, the buffer is dirty, and emits another dirty event.
7368 buffer1.update(cx, |buffer, cx| {
7369 assert!(buffer.text() == "aBDc");
7370 assert!(buffer.is_dirty());
7371 assert_eq!(
7372 *events.borrow(),
7373 &[
7374 language::Event::Edited,
7375 language::Event::Dirtied,
7376 language::Event::Edited,
7377 ],
7378 );
7379 events.borrow_mut().clear();
7380
7381 // TODO - currently, after restoring the buffer to its
7382 // previously-saved state, the is still considered dirty.
7383 buffer.edit([(1..3, "")], cx);
7384 assert!(buffer.text() == "ac");
7385 assert!(buffer.is_dirty());
7386 });
7387
7388 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7389
7390 // When a file is deleted, the buffer is considered dirty.
7391 let events = Rc::new(RefCell::new(Vec::new()));
7392 let buffer2 = project
7393 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7394 .await
7395 .unwrap();
7396 buffer2.update(cx, |_, cx| {
7397 cx.subscribe(&buffer2, {
7398 let events = events.clone();
7399 move |_, _, event, _| events.borrow_mut().push(event.clone())
7400 })
7401 .detach();
7402 });
7403
7404 fs.remove_file("/dir/file2".as_ref(), Default::default())
7405 .await
7406 .unwrap();
7407 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7408 assert_eq!(
7409 *events.borrow(),
7410 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7411 );
7412
7413 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7414 let events = Rc::new(RefCell::new(Vec::new()));
7415 let buffer3 = project
7416 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7417 .await
7418 .unwrap();
7419 buffer3.update(cx, |_, cx| {
7420 cx.subscribe(&buffer3, {
7421 let events = events.clone();
7422 move |_, _, event, _| events.borrow_mut().push(event.clone())
7423 })
7424 .detach();
7425 });
7426
7427 buffer3.update(cx, |buffer, cx| {
7428 buffer.edit([(0..0, "x")], cx);
7429 });
7430 events.borrow_mut().clear();
7431 fs.remove_file("/dir/file3".as_ref(), Default::default())
7432 .await
7433 .unwrap();
7434 buffer3
7435 .condition(&cx, |_, _| !events.borrow().is_empty())
7436 .await;
7437 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7438 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7439 }
7440
7441 #[gpui::test]
7442 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7443 let initial_contents = "aaa\nbbbbb\nc\n";
7444 let fs = FakeFs::new(cx.background());
7445 fs.insert_tree(
7446 "/dir",
7447 json!({
7448 "the-file": initial_contents,
7449 }),
7450 )
7451 .await;
7452 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7453 let buffer = project
7454 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7455 .await
7456 .unwrap();
7457
7458 let anchors = (0..3)
7459 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7460 .collect::<Vec<_>>();
7461
7462 // Change the file on disk, adding two new lines of text, and removing
7463 // one line.
7464 buffer.read_with(cx, |buffer, _| {
7465 assert!(!buffer.is_dirty());
7466 assert!(!buffer.has_conflict());
7467 });
7468 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7469 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7470 .await
7471 .unwrap();
7472
7473 // Because the buffer was not modified, it is reloaded from disk. Its
7474 // contents are edited according to the diff between the old and new
7475 // file contents.
7476 buffer
7477 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7478 .await;
7479
7480 buffer.update(cx, |buffer, _| {
7481 assert_eq!(buffer.text(), new_contents);
7482 assert!(!buffer.is_dirty());
7483 assert!(!buffer.has_conflict());
7484
7485 let anchor_positions = anchors
7486 .iter()
7487 .map(|anchor| anchor.to_point(&*buffer))
7488 .collect::<Vec<_>>();
7489 assert_eq!(
7490 anchor_positions,
7491 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7492 );
7493 });
7494
7495 // Modify the buffer
7496 buffer.update(cx, |buffer, cx| {
7497 buffer.edit([(0..0, " ")], cx);
7498 assert!(buffer.is_dirty());
7499 assert!(!buffer.has_conflict());
7500 });
7501
7502 // Change the file on disk again, adding blank lines to the beginning.
7503 fs.save(
7504 "/dir/the-file".as_ref(),
7505 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7506 )
7507 .await
7508 .unwrap();
7509
7510 // Because the buffer is modified, it doesn't reload from disk, but is
7511 // marked as having a conflict.
7512 buffer
7513 .condition(&cx, |buffer, _| buffer.has_conflict())
7514 .await;
7515 }
7516
7517 #[gpui::test]
7518 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7519 cx.foreground().forbid_parking();
7520
7521 let fs = FakeFs::new(cx.background());
7522 fs.insert_tree(
7523 "/the-dir",
7524 json!({
7525 "a.rs": "
7526 fn foo(mut v: Vec<usize>) {
7527 for x in &v {
7528 v.push(1);
7529 }
7530 }
7531 "
7532 .unindent(),
7533 }),
7534 )
7535 .await;
7536
7537 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7538 let buffer = project
7539 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7540 .await
7541 .unwrap();
7542
7543 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7544 let message = lsp::PublishDiagnosticsParams {
7545 uri: buffer_uri.clone(),
7546 diagnostics: vec![
7547 lsp::Diagnostic {
7548 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7549 severity: Some(DiagnosticSeverity::WARNING),
7550 message: "error 1".to_string(),
7551 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7552 location: lsp::Location {
7553 uri: buffer_uri.clone(),
7554 range: lsp::Range::new(
7555 lsp::Position::new(1, 8),
7556 lsp::Position::new(1, 9),
7557 ),
7558 },
7559 message: "error 1 hint 1".to_string(),
7560 }]),
7561 ..Default::default()
7562 },
7563 lsp::Diagnostic {
7564 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7565 severity: Some(DiagnosticSeverity::HINT),
7566 message: "error 1 hint 1".to_string(),
7567 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7568 location: lsp::Location {
7569 uri: buffer_uri.clone(),
7570 range: lsp::Range::new(
7571 lsp::Position::new(1, 8),
7572 lsp::Position::new(1, 9),
7573 ),
7574 },
7575 message: "original diagnostic".to_string(),
7576 }]),
7577 ..Default::default()
7578 },
7579 lsp::Diagnostic {
7580 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7581 severity: Some(DiagnosticSeverity::ERROR),
7582 message: "error 2".to_string(),
7583 related_information: Some(vec![
7584 lsp::DiagnosticRelatedInformation {
7585 location: lsp::Location {
7586 uri: buffer_uri.clone(),
7587 range: lsp::Range::new(
7588 lsp::Position::new(1, 13),
7589 lsp::Position::new(1, 15),
7590 ),
7591 },
7592 message: "error 2 hint 1".to_string(),
7593 },
7594 lsp::DiagnosticRelatedInformation {
7595 location: lsp::Location {
7596 uri: buffer_uri.clone(),
7597 range: lsp::Range::new(
7598 lsp::Position::new(1, 13),
7599 lsp::Position::new(1, 15),
7600 ),
7601 },
7602 message: "error 2 hint 2".to_string(),
7603 },
7604 ]),
7605 ..Default::default()
7606 },
7607 lsp::Diagnostic {
7608 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7609 severity: Some(DiagnosticSeverity::HINT),
7610 message: "error 2 hint 1".to_string(),
7611 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7612 location: lsp::Location {
7613 uri: buffer_uri.clone(),
7614 range: lsp::Range::new(
7615 lsp::Position::new(2, 8),
7616 lsp::Position::new(2, 17),
7617 ),
7618 },
7619 message: "original diagnostic".to_string(),
7620 }]),
7621 ..Default::default()
7622 },
7623 lsp::Diagnostic {
7624 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7625 severity: Some(DiagnosticSeverity::HINT),
7626 message: "error 2 hint 2".to_string(),
7627 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7628 location: lsp::Location {
7629 uri: buffer_uri.clone(),
7630 range: lsp::Range::new(
7631 lsp::Position::new(2, 8),
7632 lsp::Position::new(2, 17),
7633 ),
7634 },
7635 message: "original diagnostic".to_string(),
7636 }]),
7637 ..Default::default()
7638 },
7639 ],
7640 version: None,
7641 };
7642
7643 project
7644 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7645 .unwrap();
7646 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7647
7648 assert_eq!(
7649 buffer
7650 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7651 .collect::<Vec<_>>(),
7652 &[
7653 DiagnosticEntry {
7654 range: Point::new(1, 8)..Point::new(1, 9),
7655 diagnostic: Diagnostic {
7656 severity: DiagnosticSeverity::WARNING,
7657 message: "error 1".to_string(),
7658 group_id: 0,
7659 is_primary: true,
7660 ..Default::default()
7661 }
7662 },
7663 DiagnosticEntry {
7664 range: Point::new(1, 8)..Point::new(1, 9),
7665 diagnostic: Diagnostic {
7666 severity: DiagnosticSeverity::HINT,
7667 message: "error 1 hint 1".to_string(),
7668 group_id: 0,
7669 is_primary: false,
7670 ..Default::default()
7671 }
7672 },
7673 DiagnosticEntry {
7674 range: Point::new(1, 13)..Point::new(1, 15),
7675 diagnostic: Diagnostic {
7676 severity: DiagnosticSeverity::HINT,
7677 message: "error 2 hint 1".to_string(),
7678 group_id: 1,
7679 is_primary: false,
7680 ..Default::default()
7681 }
7682 },
7683 DiagnosticEntry {
7684 range: Point::new(1, 13)..Point::new(1, 15),
7685 diagnostic: Diagnostic {
7686 severity: DiagnosticSeverity::HINT,
7687 message: "error 2 hint 2".to_string(),
7688 group_id: 1,
7689 is_primary: false,
7690 ..Default::default()
7691 }
7692 },
7693 DiagnosticEntry {
7694 range: Point::new(2, 8)..Point::new(2, 17),
7695 diagnostic: Diagnostic {
7696 severity: DiagnosticSeverity::ERROR,
7697 message: "error 2".to_string(),
7698 group_id: 1,
7699 is_primary: true,
7700 ..Default::default()
7701 }
7702 }
7703 ]
7704 );
7705
7706 assert_eq!(
7707 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7708 &[
7709 DiagnosticEntry {
7710 range: Point::new(1, 8)..Point::new(1, 9),
7711 diagnostic: Diagnostic {
7712 severity: DiagnosticSeverity::WARNING,
7713 message: "error 1".to_string(),
7714 group_id: 0,
7715 is_primary: true,
7716 ..Default::default()
7717 }
7718 },
7719 DiagnosticEntry {
7720 range: Point::new(1, 8)..Point::new(1, 9),
7721 diagnostic: Diagnostic {
7722 severity: DiagnosticSeverity::HINT,
7723 message: "error 1 hint 1".to_string(),
7724 group_id: 0,
7725 is_primary: false,
7726 ..Default::default()
7727 }
7728 },
7729 ]
7730 );
7731 assert_eq!(
7732 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7733 &[
7734 DiagnosticEntry {
7735 range: Point::new(1, 13)..Point::new(1, 15),
7736 diagnostic: Diagnostic {
7737 severity: DiagnosticSeverity::HINT,
7738 message: "error 2 hint 1".to_string(),
7739 group_id: 1,
7740 is_primary: false,
7741 ..Default::default()
7742 }
7743 },
7744 DiagnosticEntry {
7745 range: Point::new(1, 13)..Point::new(1, 15),
7746 diagnostic: Diagnostic {
7747 severity: DiagnosticSeverity::HINT,
7748 message: "error 2 hint 2".to_string(),
7749 group_id: 1,
7750 is_primary: false,
7751 ..Default::default()
7752 }
7753 },
7754 DiagnosticEntry {
7755 range: Point::new(2, 8)..Point::new(2, 17),
7756 diagnostic: Diagnostic {
7757 severity: DiagnosticSeverity::ERROR,
7758 message: "error 2".to_string(),
7759 group_id: 1,
7760 is_primary: true,
7761 ..Default::default()
7762 }
7763 }
7764 ]
7765 );
7766 }
7767
7768 #[gpui::test]
7769 async fn test_rename(cx: &mut gpui::TestAppContext) {
7770 cx.foreground().forbid_parking();
7771
7772 let mut language = Language::new(
7773 LanguageConfig {
7774 name: "Rust".into(),
7775 path_suffixes: vec!["rs".to_string()],
7776 ..Default::default()
7777 },
7778 Some(tree_sitter_rust::language()),
7779 );
7780 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7781 capabilities: lsp::ServerCapabilities {
7782 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7783 prepare_provider: Some(true),
7784 work_done_progress_options: Default::default(),
7785 })),
7786 ..Default::default()
7787 },
7788 ..Default::default()
7789 });
7790
7791 let fs = FakeFs::new(cx.background());
7792 fs.insert_tree(
7793 "/dir",
7794 json!({
7795 "one.rs": "const ONE: usize = 1;",
7796 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7797 }),
7798 )
7799 .await;
7800
7801 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7802 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7803 let buffer = project
7804 .update(cx, |project, cx| {
7805 project.open_local_buffer("/dir/one.rs", cx)
7806 })
7807 .await
7808 .unwrap();
7809
7810 let fake_server = fake_servers.next().await.unwrap();
7811
7812 let response = project.update(cx, |project, cx| {
7813 project.prepare_rename(buffer.clone(), 7, cx)
7814 });
7815 fake_server
7816 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7817 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7818 assert_eq!(params.position, lsp::Position::new(0, 7));
7819 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7820 lsp::Position::new(0, 6),
7821 lsp::Position::new(0, 9),
7822 ))))
7823 })
7824 .next()
7825 .await
7826 .unwrap();
7827 let range = response.await.unwrap().unwrap();
7828 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7829 assert_eq!(range, 6..9);
7830
7831 let response = project.update(cx, |project, cx| {
7832 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7833 });
7834 fake_server
7835 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7836 assert_eq!(
7837 params.text_document_position.text_document.uri.as_str(),
7838 "file:///dir/one.rs"
7839 );
7840 assert_eq!(
7841 params.text_document_position.position,
7842 lsp::Position::new(0, 7)
7843 );
7844 assert_eq!(params.new_name, "THREE");
7845 Ok(Some(lsp::WorkspaceEdit {
7846 changes: Some(
7847 [
7848 (
7849 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7850 vec![lsp::TextEdit::new(
7851 lsp::Range::new(
7852 lsp::Position::new(0, 6),
7853 lsp::Position::new(0, 9),
7854 ),
7855 "THREE".to_string(),
7856 )],
7857 ),
7858 (
7859 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7860 vec![
7861 lsp::TextEdit::new(
7862 lsp::Range::new(
7863 lsp::Position::new(0, 24),
7864 lsp::Position::new(0, 27),
7865 ),
7866 "THREE".to_string(),
7867 ),
7868 lsp::TextEdit::new(
7869 lsp::Range::new(
7870 lsp::Position::new(0, 35),
7871 lsp::Position::new(0, 38),
7872 ),
7873 "THREE".to_string(),
7874 ),
7875 ],
7876 ),
7877 ]
7878 .into_iter()
7879 .collect(),
7880 ),
7881 ..Default::default()
7882 }))
7883 })
7884 .next()
7885 .await
7886 .unwrap();
7887 let mut transaction = response.await.unwrap().0;
7888 assert_eq!(transaction.len(), 2);
7889 assert_eq!(
7890 transaction
7891 .remove_entry(&buffer)
7892 .unwrap()
7893 .0
7894 .read_with(cx, |buffer, _| buffer.text()),
7895 "const THREE: usize = 1;"
7896 );
7897 assert_eq!(
7898 transaction
7899 .into_keys()
7900 .next()
7901 .unwrap()
7902 .read_with(cx, |buffer, _| buffer.text()),
7903 "const TWO: usize = one::THREE + one::THREE;"
7904 );
7905 }
7906
7907 #[gpui::test]
7908 async fn test_search(cx: &mut gpui::TestAppContext) {
7909 let fs = FakeFs::new(cx.background());
7910 fs.insert_tree(
7911 "/dir",
7912 json!({
7913 "one.rs": "const ONE: usize = 1;",
7914 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7915 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7916 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7917 }),
7918 )
7919 .await;
7920 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7921 assert_eq!(
7922 search(&project, SearchQuery::text("TWO", false, true), cx)
7923 .await
7924 .unwrap(),
7925 HashMap::from_iter([
7926 ("two.rs".to_string(), vec![6..9]),
7927 ("three.rs".to_string(), vec![37..40])
7928 ])
7929 );
7930
7931 let buffer_4 = project
7932 .update(cx, |project, cx| {
7933 project.open_local_buffer("/dir/four.rs", cx)
7934 })
7935 .await
7936 .unwrap();
7937 buffer_4.update(cx, |buffer, cx| {
7938 let text = "two::TWO";
7939 buffer.edit([(20..28, text), (31..43, text)], cx);
7940 });
7941
7942 assert_eq!(
7943 search(&project, SearchQuery::text("TWO", false, true), cx)
7944 .await
7945 .unwrap(),
7946 HashMap::from_iter([
7947 ("two.rs".to_string(), vec![6..9]),
7948 ("three.rs".to_string(), vec![37..40]),
7949 ("four.rs".to_string(), vec![25..28, 36..39])
7950 ])
7951 );
7952
7953 async fn search(
7954 project: &ModelHandle<Project>,
7955 query: SearchQuery,
7956 cx: &mut gpui::TestAppContext,
7957 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7958 let results = project
7959 .update(cx, |project, cx| project.search(query, cx))
7960 .await?;
7961
7962 Ok(results
7963 .into_iter()
7964 .map(|(buffer, ranges)| {
7965 buffer.read_with(cx, |buffer, _| {
7966 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7967 let ranges = ranges
7968 .into_iter()
7969 .map(|range| range.to_offset(buffer))
7970 .collect::<Vec<_>>();
7971 (path, ranges)
7972 })
7973 })
7974 .collect())
7975 }
7976 }
7977}