1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use thiserror::Error;
53use util::{post_inc, ResultExt, TryFutureExt as _};
54
55pub use fs::*;
56pub use worktree::*;
57
58pub trait Item: Entity {
59 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
60}
61
62pub struct Project {
63 worktrees: Vec<WorktreeHandle>,
64 active_entry: Option<ProjectEntryId>,
65 languages: Arc<LanguageRegistry>,
66 language_servers:
67 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
68 started_language_servers:
69 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
70 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
71 language_server_settings: Arc<Mutex<serde_json::Value>>,
72 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
73 next_language_server_id: usize,
74 client: Arc<client::Client>,
75 next_entry_id: Arc<AtomicUsize>,
76 user_store: ModelHandle<UserStore>,
77 fs: Arc<dyn Fs>,
78 client_state: ProjectClientState,
79 collaborators: HashMap<PeerId, Collaborator>,
80 subscriptions: Vec<client::Subscription>,
81 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
82 shared_buffers: HashMap<PeerId, HashSet<u64>>,
83 loading_buffers: HashMap<
84 ProjectPath,
85 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
86 >,
87 loading_local_worktrees:
88 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
89 opened_buffers: HashMap<u64, OpenBuffer>,
90 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
91 nonce: u128,
92}
93
94#[derive(Error, Debug)]
95pub enum JoinProjectError {
96 #[error("host declined join request")]
97 HostDeclined,
98 #[error("host closed the project")]
99 HostClosedProject,
100 #[error("host went offline")]
101 HostWentOffline,
102 #[error("{0}")]
103 Other(#[from] anyhow::Error),
104}
105
106enum OpenBuffer {
107 Strong(ModelHandle<Buffer>),
108 Weak(WeakModelHandle<Buffer>),
109 Loading(Vec<Operation>),
110}
111
112enum WorktreeHandle {
113 Strong(ModelHandle<Worktree>),
114 Weak(WeakModelHandle<Worktree>),
115}
116
117enum ProjectClientState {
118 Local {
119 is_shared: bool,
120 remote_id_tx: watch::Sender<Option<u64>>,
121 remote_id_rx: watch::Receiver<Option<u64>>,
122 _maintain_remote_id_task: Task<Option<()>>,
123 },
124 Remote {
125 sharing_has_stopped: bool,
126 remote_id: u64,
127 replica_id: ReplicaId,
128 _detect_unshare_task: Task<Option<()>>,
129 },
130}
131
132#[derive(Clone, Debug)]
133pub struct Collaborator {
134 pub user: Arc<User>,
135 pub peer_id: PeerId,
136 pub replica_id: ReplicaId,
137}
138
139#[derive(Clone, Debug, PartialEq, Eq)]
140pub enum Event {
141 ActiveEntryChanged(Option<ProjectEntryId>),
142 WorktreeAdded,
143 WorktreeRemoved(WorktreeId),
144 DiskBasedDiagnosticsStarted,
145 DiskBasedDiagnosticsUpdated,
146 DiskBasedDiagnosticsFinished,
147 DiagnosticsUpdated(ProjectPath),
148 RemoteIdChanged(Option<u64>),
149 CollaboratorLeft(PeerId),
150 ContactRequestedJoin(Arc<User>),
151 ContactCancelledJoinRequest(Arc<User>),
152}
153
154#[derive(Serialize)]
155pub struct LanguageServerStatus {
156 pub name: String,
157 pub pending_work: BTreeMap<String, LanguageServerProgress>,
158 pub pending_diagnostic_updates: isize,
159}
160
161#[derive(Clone, Debug, Serialize)]
162pub struct LanguageServerProgress {
163 pub message: Option<String>,
164 pub percentage: Option<usize>,
165 #[serde(skip_serializing)]
166 pub last_update_at: Instant,
167}
168
169#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
170pub struct ProjectPath {
171 pub worktree_id: WorktreeId,
172 pub path: Arc<Path>,
173}
174
175#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
176pub struct DiagnosticSummary {
177 pub error_count: usize,
178 pub warning_count: usize,
179}
180
181#[derive(Debug)]
182pub struct Location {
183 pub buffer: ModelHandle<Buffer>,
184 pub range: Range<language::Anchor>,
185}
186
187#[derive(Debug)]
188pub struct DocumentHighlight {
189 pub range: Range<language::Anchor>,
190 pub kind: DocumentHighlightKind,
191}
192
193#[derive(Clone, Debug)]
194pub struct Symbol {
195 pub source_worktree_id: WorktreeId,
196 pub worktree_id: WorktreeId,
197 pub language_server_name: LanguageServerName,
198 pub path: PathBuf,
199 pub label: CodeLabel,
200 pub name: String,
201 pub kind: lsp::SymbolKind,
202 pub range: Range<PointUtf16>,
203 pub signature: [u8; 32],
204}
205
206#[derive(Default)]
207pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
208
209impl DiagnosticSummary {
210 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
211 let mut this = Self {
212 error_count: 0,
213 warning_count: 0,
214 };
215
216 for entry in diagnostics {
217 if entry.diagnostic.is_primary {
218 match entry.diagnostic.severity {
219 DiagnosticSeverity::ERROR => this.error_count += 1,
220 DiagnosticSeverity::WARNING => this.warning_count += 1,
221 _ => {}
222 }
223 }
224 }
225
226 this
227 }
228
229 pub fn is_empty(&self) -> bool {
230 self.error_count == 0 && self.warning_count == 0
231 }
232
233 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
234 proto::DiagnosticSummary {
235 path: path.to_string_lossy().to_string(),
236 error_count: self.error_count as u32,
237 warning_count: self.warning_count as u32,
238 }
239 }
240}
241
242#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
243pub struct ProjectEntryId(usize);
244
245impl ProjectEntryId {
246 pub const MAX: Self = Self(usize::MAX);
247
248 pub fn new(counter: &AtomicUsize) -> Self {
249 Self(counter.fetch_add(1, SeqCst))
250 }
251
252 pub fn from_proto(id: u64) -> Self {
253 Self(id as usize)
254 }
255
256 pub fn to_proto(&self) -> u64 {
257 self.0 as u64
258 }
259
260 pub fn to_usize(&self) -> usize {
261 self.0
262 }
263}
264
265impl Project {
266 pub fn init(client: &Arc<Client>) {
267 client.add_model_message_handler(Self::handle_request_join_project);
268 client.add_model_message_handler(Self::handle_add_collaborator);
269 client.add_model_message_handler(Self::handle_buffer_reloaded);
270 client.add_model_message_handler(Self::handle_buffer_saved);
271 client.add_model_message_handler(Self::handle_start_language_server);
272 client.add_model_message_handler(Self::handle_update_language_server);
273 client.add_model_message_handler(Self::handle_remove_collaborator);
274 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
275 client.add_model_message_handler(Self::handle_register_worktree);
276 client.add_model_message_handler(Self::handle_unregister_worktree);
277 client.add_model_message_handler(Self::handle_unregister_project);
278 client.add_model_message_handler(Self::handle_project_unshared);
279 client.add_model_message_handler(Self::handle_update_buffer_file);
280 client.add_model_message_handler(Self::handle_update_buffer);
281 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
282 client.add_model_message_handler(Self::handle_update_worktree);
283 client.add_model_request_handler(Self::handle_create_project_entry);
284 client.add_model_request_handler(Self::handle_rename_project_entry);
285 client.add_model_request_handler(Self::handle_delete_project_entry);
286 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
287 client.add_model_request_handler(Self::handle_apply_code_action);
288 client.add_model_request_handler(Self::handle_reload_buffers);
289 client.add_model_request_handler(Self::handle_format_buffers);
290 client.add_model_request_handler(Self::handle_get_code_actions);
291 client.add_model_request_handler(Self::handle_get_completions);
292 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
293 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
294 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
295 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
296 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
297 client.add_model_request_handler(Self::handle_search_project);
298 client.add_model_request_handler(Self::handle_get_project_symbols);
299 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
300 client.add_model_request_handler(Self::handle_open_buffer_by_id);
301 client.add_model_request_handler(Self::handle_open_buffer_by_path);
302 client.add_model_request_handler(Self::handle_save_buffer);
303 }
304
305 pub fn local(
306 client: Arc<Client>,
307 user_store: ModelHandle<UserStore>,
308 languages: Arc<LanguageRegistry>,
309 fs: Arc<dyn Fs>,
310 cx: &mut MutableAppContext,
311 ) -> ModelHandle<Self> {
312 cx.add_model(|cx: &mut ModelContext<Self>| {
313 let (remote_id_tx, remote_id_rx) = watch::channel();
314 let _maintain_remote_id_task = cx.spawn_weak({
315 let rpc = client.clone();
316 move |this, mut cx| {
317 async move {
318 let mut status = rpc.status();
319 while let Some(status) = status.next().await {
320 if let Some(this) = this.upgrade(&cx) {
321 if status.is_connected() {
322 this.update(&mut cx, |this, cx| this.register(cx)).await?;
323 } else {
324 this.update(&mut cx, |this, cx| this.unregister(cx));
325 }
326 }
327 }
328 Ok(())
329 }
330 .log_err()
331 }
332 });
333
334 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
335 Self {
336 worktrees: Default::default(),
337 collaborators: Default::default(),
338 opened_buffers: Default::default(),
339 shared_buffers: Default::default(),
340 loading_buffers: Default::default(),
341 loading_local_worktrees: Default::default(),
342 buffer_snapshots: Default::default(),
343 client_state: ProjectClientState::Local {
344 is_shared: false,
345 remote_id_tx,
346 remote_id_rx,
347 _maintain_remote_id_task,
348 },
349 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
350 subscriptions: Vec::new(),
351 active_entry: None,
352 languages,
353 client,
354 user_store,
355 fs,
356 next_entry_id: Default::default(),
357 language_servers: Default::default(),
358 started_language_servers: Default::default(),
359 language_server_statuses: Default::default(),
360 last_workspace_edits_by_language_server: Default::default(),
361 language_server_settings: Default::default(),
362 next_language_server_id: 0,
363 nonce: StdRng::from_entropy().gen(),
364 }
365 })
366 }
367
368 pub async fn remote(
369 remote_id: u64,
370 client: Arc<Client>,
371 user_store: ModelHandle<UserStore>,
372 languages: Arc<LanguageRegistry>,
373 fs: Arc<dyn Fs>,
374 cx: &mut AsyncAppContext,
375 ) -> Result<ModelHandle<Self>, JoinProjectError> {
376 client.authenticate_and_connect(true, &cx).await?;
377
378 let response = client
379 .request(proto::JoinProject {
380 project_id: remote_id,
381 })
382 .await?;
383
384 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
385 proto::join_project_response::Variant::Accept(response) => response,
386 proto::join_project_response::Variant::Decline(decline) => {
387 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
388 Some(proto::join_project_response::decline::Reason::Declined) => {
389 Err(JoinProjectError::HostDeclined)?
390 }
391 Some(proto::join_project_response::decline::Reason::Closed) => {
392 Err(JoinProjectError::HostClosedProject)?
393 }
394 Some(proto::join_project_response::decline::Reason::WentOffline) => {
395 Err(JoinProjectError::HostWentOffline)?
396 }
397 None => Err(anyhow!("missing decline reason"))?,
398 }
399 }
400 };
401
402 let replica_id = response.replica_id as ReplicaId;
403
404 let mut worktrees = Vec::new();
405 for worktree in response.worktrees {
406 let (worktree, load_task) = cx
407 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
408 worktrees.push(worktree);
409 load_task.detach();
410 }
411
412 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
413 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
414 let mut this = Self {
415 worktrees: Vec::new(),
416 loading_buffers: Default::default(),
417 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
418 shared_buffers: Default::default(),
419 loading_local_worktrees: Default::default(),
420 active_entry: None,
421 collaborators: Default::default(),
422 languages,
423 user_store: user_store.clone(),
424 fs,
425 next_entry_id: Default::default(),
426 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
427 client: client.clone(),
428 client_state: ProjectClientState::Remote {
429 sharing_has_stopped: false,
430 remote_id,
431 replica_id,
432 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
433 async move {
434 let mut status = client.status();
435 let is_connected =
436 status.next().await.map_or(false, |s| s.is_connected());
437 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
438 if !is_connected || status.next().await.is_some() {
439 if let Some(this) = this.upgrade(&cx) {
440 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
441 }
442 }
443 Ok(())
444 }
445 .log_err()
446 }),
447 },
448 language_servers: Default::default(),
449 started_language_servers: Default::default(),
450 language_server_settings: Default::default(),
451 language_server_statuses: response
452 .language_servers
453 .into_iter()
454 .map(|server| {
455 (
456 server.id as usize,
457 LanguageServerStatus {
458 name: server.name,
459 pending_work: Default::default(),
460 pending_diagnostic_updates: 0,
461 },
462 )
463 })
464 .collect(),
465 last_workspace_edits_by_language_server: Default::default(),
466 next_language_server_id: 0,
467 opened_buffers: Default::default(),
468 buffer_snapshots: Default::default(),
469 nonce: StdRng::from_entropy().gen(),
470 };
471 for worktree in worktrees {
472 this.add_worktree(&worktree, cx);
473 }
474 this
475 });
476
477 let user_ids = response
478 .collaborators
479 .iter()
480 .map(|peer| peer.user_id)
481 .collect();
482 user_store
483 .update(cx, |user_store, cx| user_store.get_users(user_ids, cx))
484 .await?;
485 let mut collaborators = HashMap::default();
486 for message in response.collaborators {
487 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
488 collaborators.insert(collaborator.peer_id, collaborator);
489 }
490
491 this.update(cx, |this, _| {
492 this.collaborators = collaborators;
493 });
494
495 Ok(this)
496 }
497
498 #[cfg(any(test, feature = "test-support"))]
499 pub async fn test(
500 fs: Arc<dyn Fs>,
501 root_paths: impl IntoIterator<Item = &Path>,
502 cx: &mut gpui::TestAppContext,
503 ) -> ModelHandle<Project> {
504 let languages = Arc::new(LanguageRegistry::test());
505 let http_client = client::test::FakeHttpClient::with_404_response();
506 let client = client::Client::new(http_client.clone());
507 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
508 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
509 for path in root_paths {
510 let (tree, _) = project
511 .update(cx, |project, cx| {
512 project.find_or_create_local_worktree(path, true, cx)
513 })
514 .await
515 .unwrap();
516 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
517 .await;
518 }
519 project
520 }
521
522 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
523 self.opened_buffers
524 .get(&remote_id)
525 .and_then(|buffer| buffer.upgrade(cx))
526 }
527
528 pub fn languages(&self) -> &Arc<LanguageRegistry> {
529 &self.languages
530 }
531
532 pub fn client(&self) -> Arc<Client> {
533 self.client.clone()
534 }
535
536 pub fn user_store(&self) -> ModelHandle<UserStore> {
537 self.user_store.clone()
538 }
539
540 #[cfg(any(test, feature = "test-support"))]
541 pub fn check_invariants(&self, cx: &AppContext) {
542 if self.is_local() {
543 let mut worktree_root_paths = HashMap::default();
544 for worktree in self.worktrees(cx) {
545 let worktree = worktree.read(cx);
546 let abs_path = worktree.as_local().unwrap().abs_path().clone();
547 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
548 assert_eq!(
549 prev_worktree_id,
550 None,
551 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
552 abs_path,
553 worktree.id(),
554 prev_worktree_id
555 )
556 }
557 } else {
558 let replica_id = self.replica_id();
559 for buffer in self.opened_buffers.values() {
560 if let Some(buffer) = buffer.upgrade(cx) {
561 let buffer = buffer.read(cx);
562 assert_eq!(
563 buffer.deferred_ops_len(),
564 0,
565 "replica {}, buffer {} has deferred operations",
566 replica_id,
567 buffer.remote_id()
568 );
569 }
570 }
571 }
572 }
573
574 #[cfg(any(test, feature = "test-support"))]
575 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
576 let path = path.into();
577 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
578 self.opened_buffers.iter().any(|(_, buffer)| {
579 if let Some(buffer) = buffer.upgrade(cx) {
580 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
581 if file.worktree == worktree && file.path() == &path.path {
582 return true;
583 }
584 }
585 }
586 false
587 })
588 } else {
589 false
590 }
591 }
592
593 pub fn fs(&self) -> &Arc<dyn Fs> {
594 &self.fs
595 }
596
597 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
598 self.unshared(cx);
599 for worktree in &self.worktrees {
600 if let Some(worktree) = worktree.upgrade(cx) {
601 worktree.update(cx, |worktree, _| {
602 worktree.as_local_mut().unwrap().unregister();
603 });
604 }
605 }
606
607 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
608 *remote_id_tx.borrow_mut() = None;
609 }
610
611 self.subscriptions.clear();
612 }
613
614 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
615 self.unregister(cx);
616
617 let response = self.client.request(proto::RegisterProject {});
618 cx.spawn(|this, mut cx| async move {
619 let remote_id = response.await?.project_id;
620
621 let mut registrations = Vec::new();
622 this.update(&mut cx, |this, cx| {
623 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
624 *remote_id_tx.borrow_mut() = Some(remote_id);
625 }
626
627 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
628
629 this.subscriptions
630 .push(this.client.add_model_for_remote_entity(remote_id, cx));
631
632 for worktree in &this.worktrees {
633 if let Some(worktree) = worktree.upgrade(cx) {
634 registrations.push(worktree.update(cx, |worktree, cx| {
635 let worktree = worktree.as_local_mut().unwrap();
636 worktree.register(remote_id, cx)
637 }));
638 }
639 }
640 });
641
642 futures::future::try_join_all(registrations).await?;
643 Ok(())
644 })
645 }
646
647 pub fn remote_id(&self) -> Option<u64> {
648 match &self.client_state {
649 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
650 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
651 }
652 }
653
654 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
655 let mut id = None;
656 let mut watch = None;
657 match &self.client_state {
658 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
659 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
660 }
661
662 async move {
663 if let Some(id) = id {
664 return id;
665 }
666 let mut watch = watch.unwrap();
667 loop {
668 let id = *watch.borrow();
669 if let Some(id) = id {
670 return id;
671 }
672 watch.next().await;
673 }
674 }
675 }
676
677 pub fn replica_id(&self) -> ReplicaId {
678 match &self.client_state {
679 ProjectClientState::Local { .. } => 0,
680 ProjectClientState::Remote { replica_id, .. } => *replica_id,
681 }
682 }
683
684 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
685 &self.collaborators
686 }
687
688 pub fn worktrees<'a>(
689 &'a self,
690 cx: &'a AppContext,
691 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
692 self.worktrees
693 .iter()
694 .filter_map(move |worktree| worktree.upgrade(cx))
695 }
696
697 pub fn visible_worktrees<'a>(
698 &'a self,
699 cx: &'a AppContext,
700 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
701 self.worktrees.iter().filter_map(|worktree| {
702 worktree.upgrade(cx).and_then(|worktree| {
703 if worktree.read(cx).is_visible() {
704 Some(worktree)
705 } else {
706 None
707 }
708 })
709 })
710 }
711
712 pub fn worktree_for_id(
713 &self,
714 id: WorktreeId,
715 cx: &AppContext,
716 ) -> Option<ModelHandle<Worktree>> {
717 self.worktrees(cx)
718 .find(|worktree| worktree.read(cx).id() == id)
719 }
720
721 pub fn worktree_for_entry(
722 &self,
723 entry_id: ProjectEntryId,
724 cx: &AppContext,
725 ) -> Option<ModelHandle<Worktree>> {
726 self.worktrees(cx)
727 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
728 }
729
730 pub fn worktree_id_for_entry(
731 &self,
732 entry_id: ProjectEntryId,
733 cx: &AppContext,
734 ) -> Option<WorktreeId> {
735 self.worktree_for_entry(entry_id, cx)
736 .map(|worktree| worktree.read(cx).id())
737 }
738
739 pub fn create_entry(
740 &mut self,
741 project_path: impl Into<ProjectPath>,
742 is_directory: bool,
743 cx: &mut ModelContext<Self>,
744 ) -> Option<Task<Result<Entry>>> {
745 let project_path = project_path.into();
746 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
747 if self.is_local() {
748 Some(worktree.update(cx, |worktree, cx| {
749 worktree
750 .as_local_mut()
751 .unwrap()
752 .create_entry(project_path.path, is_directory, cx)
753 }))
754 } else {
755 let client = self.client.clone();
756 let project_id = self.remote_id().unwrap();
757 Some(cx.spawn_weak(|_, mut cx| async move {
758 let response = client
759 .request(proto::CreateProjectEntry {
760 worktree_id: project_path.worktree_id.to_proto(),
761 project_id,
762 path: project_path.path.as_os_str().as_bytes().to_vec(),
763 is_directory,
764 })
765 .await?;
766 let entry = response
767 .entry
768 .ok_or_else(|| anyhow!("missing entry in response"))?;
769 worktree
770 .update(&mut cx, |worktree, cx| {
771 worktree.as_remote().unwrap().insert_entry(
772 entry,
773 response.worktree_scan_id as usize,
774 cx,
775 )
776 })
777 .await
778 }))
779 }
780 }
781
782 pub fn rename_entry(
783 &mut self,
784 entry_id: ProjectEntryId,
785 new_path: impl Into<Arc<Path>>,
786 cx: &mut ModelContext<Self>,
787 ) -> Option<Task<Result<Entry>>> {
788 let worktree = self.worktree_for_entry(entry_id, cx)?;
789 let new_path = new_path.into();
790 if self.is_local() {
791 worktree.update(cx, |worktree, cx| {
792 worktree
793 .as_local_mut()
794 .unwrap()
795 .rename_entry(entry_id, new_path, cx)
796 })
797 } else {
798 let client = self.client.clone();
799 let project_id = self.remote_id().unwrap();
800
801 Some(cx.spawn_weak(|_, mut cx| async move {
802 let response = client
803 .request(proto::RenameProjectEntry {
804 project_id,
805 entry_id: entry_id.to_proto(),
806 new_path: new_path.as_os_str().as_bytes().to_vec(),
807 })
808 .await?;
809 let entry = response
810 .entry
811 .ok_or_else(|| anyhow!("missing entry in response"))?;
812 worktree
813 .update(&mut cx, |worktree, cx| {
814 worktree.as_remote().unwrap().insert_entry(
815 entry,
816 response.worktree_scan_id as usize,
817 cx,
818 )
819 })
820 .await
821 }))
822 }
823 }
824
825 pub fn delete_entry(
826 &mut self,
827 entry_id: ProjectEntryId,
828 cx: &mut ModelContext<Self>,
829 ) -> Option<Task<Result<()>>> {
830 let worktree = self.worktree_for_entry(entry_id, cx)?;
831 if self.is_local() {
832 worktree.update(cx, |worktree, cx| {
833 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
834 })
835 } else {
836 let client = self.client.clone();
837 let project_id = self.remote_id().unwrap();
838 Some(cx.spawn_weak(|_, mut cx| async move {
839 let response = client
840 .request(proto::DeleteProjectEntry {
841 project_id,
842 entry_id: entry_id.to_proto(),
843 })
844 .await?;
845 worktree
846 .update(&mut cx, move |worktree, cx| {
847 worktree.as_remote().unwrap().delete_entry(
848 entry_id,
849 response.worktree_scan_id as usize,
850 cx,
851 )
852 })
853 .await
854 }))
855 }
856 }
857
858 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
859 let project_id;
860 if let ProjectClientState::Local {
861 remote_id_rx,
862 is_shared,
863 ..
864 } = &mut self.client_state
865 {
866 if *is_shared {
867 return Task::ready(Ok(()));
868 }
869 *is_shared = true;
870 if let Some(id) = *remote_id_rx.borrow() {
871 project_id = id;
872 } else {
873 return Task::ready(Err(anyhow!("project hasn't been registered")));
874 }
875 } else {
876 return Task::ready(Err(anyhow!("can't share a remote project")));
877 };
878
879 for open_buffer in self.opened_buffers.values_mut() {
880 match open_buffer {
881 OpenBuffer::Strong(_) => {}
882 OpenBuffer::Weak(buffer) => {
883 if let Some(buffer) = buffer.upgrade(cx) {
884 *open_buffer = OpenBuffer::Strong(buffer);
885 }
886 }
887 OpenBuffer::Loading(_) => unreachable!(),
888 }
889 }
890
891 for worktree_handle in self.worktrees.iter_mut() {
892 match worktree_handle {
893 WorktreeHandle::Strong(_) => {}
894 WorktreeHandle::Weak(worktree) => {
895 if let Some(worktree) = worktree.upgrade(cx) {
896 *worktree_handle = WorktreeHandle::Strong(worktree);
897 }
898 }
899 }
900 }
901
902 let mut tasks = Vec::new();
903 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
904 worktree.update(cx, |worktree, cx| {
905 let worktree = worktree.as_local_mut().unwrap();
906 tasks.push(worktree.share(project_id, cx));
907 });
908 }
909
910 cx.spawn(|this, mut cx| async move {
911 for task in tasks {
912 task.await?;
913 }
914 this.update(&mut cx, |_, cx| cx.notify());
915 Ok(())
916 })
917 }
918
919 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
920 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
921 if !*is_shared {
922 return;
923 }
924
925 *is_shared = false;
926 self.collaborators.clear();
927 self.shared_buffers.clear();
928 for worktree_handle in self.worktrees.iter_mut() {
929 if let WorktreeHandle::Strong(worktree) = worktree_handle {
930 let is_visible = worktree.update(cx, |worktree, _| {
931 worktree.as_local_mut().unwrap().unshare();
932 worktree.is_visible()
933 });
934 if !is_visible {
935 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
936 }
937 }
938 }
939
940 for open_buffer in self.opened_buffers.values_mut() {
941 match open_buffer {
942 OpenBuffer::Strong(buffer) => {
943 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
944 }
945 _ => {}
946 }
947 }
948
949 cx.notify();
950 } else {
951 log::error!("attempted to unshare a remote project");
952 }
953 }
954
955 pub fn respond_to_join_request(
956 &mut self,
957 requester_id: u64,
958 allow: bool,
959 cx: &mut ModelContext<Self>,
960 ) {
961 if let Some(project_id) = self.remote_id() {
962 let share = self.share(cx);
963 let client = self.client.clone();
964 cx.foreground()
965 .spawn(async move {
966 share.await?;
967 client.send(proto::RespondToJoinProjectRequest {
968 requester_id,
969 project_id,
970 allow,
971 })
972 })
973 .detach_and_log_err(cx);
974 }
975 }
976
977 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
978 if let ProjectClientState::Remote {
979 sharing_has_stopped,
980 ..
981 } = &mut self.client_state
982 {
983 *sharing_has_stopped = true;
984 self.collaborators.clear();
985 cx.notify();
986 }
987 }
988
989 pub fn is_read_only(&self) -> bool {
990 match &self.client_state {
991 ProjectClientState::Local { .. } => false,
992 ProjectClientState::Remote {
993 sharing_has_stopped,
994 ..
995 } => *sharing_has_stopped,
996 }
997 }
998
999 pub fn is_local(&self) -> bool {
1000 match &self.client_state {
1001 ProjectClientState::Local { .. } => true,
1002 ProjectClientState::Remote { .. } => false,
1003 }
1004 }
1005
1006 pub fn is_remote(&self) -> bool {
1007 !self.is_local()
1008 }
1009
1010 pub fn create_buffer(
1011 &mut self,
1012 text: &str,
1013 language: Option<Arc<Language>>,
1014 cx: &mut ModelContext<Self>,
1015 ) -> Result<ModelHandle<Buffer>> {
1016 if self.is_remote() {
1017 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1018 }
1019
1020 let buffer = cx.add_model(|cx| {
1021 Buffer::new(self.replica_id(), text, cx)
1022 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1023 });
1024 self.register_buffer(&buffer, cx)?;
1025 Ok(buffer)
1026 }
1027
1028 pub fn open_path(
1029 &mut self,
1030 path: impl Into<ProjectPath>,
1031 cx: &mut ModelContext<Self>,
1032 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1033 let task = self.open_buffer(path, cx);
1034 cx.spawn_weak(|_, cx| async move {
1035 let buffer = task.await?;
1036 let project_entry_id = buffer
1037 .read_with(&cx, |buffer, cx| {
1038 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1039 })
1040 .ok_or_else(|| anyhow!("no project entry"))?;
1041 Ok((project_entry_id, buffer.into()))
1042 })
1043 }
1044
1045 pub fn open_local_buffer(
1046 &mut self,
1047 abs_path: impl AsRef<Path>,
1048 cx: &mut ModelContext<Self>,
1049 ) -> Task<Result<ModelHandle<Buffer>>> {
1050 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1051 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1052 } else {
1053 Task::ready(Err(anyhow!("no such path")))
1054 }
1055 }
1056
1057 pub fn open_buffer(
1058 &mut self,
1059 path: impl Into<ProjectPath>,
1060 cx: &mut ModelContext<Self>,
1061 ) -> Task<Result<ModelHandle<Buffer>>> {
1062 let project_path = path.into();
1063 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1064 worktree
1065 } else {
1066 return Task::ready(Err(anyhow!("no such worktree")));
1067 };
1068
1069 // If there is already a buffer for the given path, then return it.
1070 let existing_buffer = self.get_open_buffer(&project_path, cx);
1071 if let Some(existing_buffer) = existing_buffer {
1072 return Task::ready(Ok(existing_buffer));
1073 }
1074
1075 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1076 // If the given path is already being loaded, then wait for that existing
1077 // task to complete and return the same buffer.
1078 hash_map::Entry::Occupied(e) => e.get().clone(),
1079
1080 // Otherwise, record the fact that this path is now being loaded.
1081 hash_map::Entry::Vacant(entry) => {
1082 let (mut tx, rx) = postage::watch::channel();
1083 entry.insert(rx.clone());
1084
1085 let load_buffer = if worktree.read(cx).is_local() {
1086 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1087 } else {
1088 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1089 };
1090
1091 cx.spawn(move |this, mut cx| async move {
1092 let load_result = load_buffer.await;
1093 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1094 // Record the fact that the buffer is no longer loading.
1095 this.loading_buffers.remove(&project_path);
1096 let buffer = load_result.map_err(Arc::new)?;
1097 Ok(buffer)
1098 }));
1099 })
1100 .detach();
1101 rx
1102 }
1103 };
1104
1105 cx.foreground().spawn(async move {
1106 loop {
1107 if let Some(result) = loading_watch.borrow().as_ref() {
1108 match result {
1109 Ok(buffer) => return Ok(buffer.clone()),
1110 Err(error) => return Err(anyhow!("{}", error)),
1111 }
1112 }
1113 loading_watch.next().await;
1114 }
1115 })
1116 }
1117
1118 fn open_local_buffer_internal(
1119 &mut self,
1120 path: &Arc<Path>,
1121 worktree: &ModelHandle<Worktree>,
1122 cx: &mut ModelContext<Self>,
1123 ) -> Task<Result<ModelHandle<Buffer>>> {
1124 let load_buffer = worktree.update(cx, |worktree, cx| {
1125 let worktree = worktree.as_local_mut().unwrap();
1126 worktree.load_buffer(path, cx)
1127 });
1128 cx.spawn(|this, mut cx| async move {
1129 let buffer = load_buffer.await?;
1130 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1131 Ok(buffer)
1132 })
1133 }
1134
1135 fn open_remote_buffer_internal(
1136 &mut self,
1137 path: &Arc<Path>,
1138 worktree: &ModelHandle<Worktree>,
1139 cx: &mut ModelContext<Self>,
1140 ) -> Task<Result<ModelHandle<Buffer>>> {
1141 let rpc = self.client.clone();
1142 let project_id = self.remote_id().unwrap();
1143 let remote_worktree_id = worktree.read(cx).id();
1144 let path = path.clone();
1145 let path_string = path.to_string_lossy().to_string();
1146 cx.spawn(|this, mut cx| async move {
1147 let response = rpc
1148 .request(proto::OpenBufferByPath {
1149 project_id,
1150 worktree_id: remote_worktree_id.to_proto(),
1151 path: path_string,
1152 })
1153 .await?;
1154 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1155 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1156 .await
1157 })
1158 }
1159
1160 fn open_local_buffer_via_lsp(
1161 &mut self,
1162 abs_path: lsp::Url,
1163 lsp_adapter: Arc<dyn LspAdapter>,
1164 lsp_server: Arc<LanguageServer>,
1165 cx: &mut ModelContext<Self>,
1166 ) -> Task<Result<ModelHandle<Buffer>>> {
1167 cx.spawn(|this, mut cx| async move {
1168 let abs_path = abs_path
1169 .to_file_path()
1170 .map_err(|_| anyhow!("can't convert URI to path"))?;
1171 let (worktree, relative_path) = if let Some(result) =
1172 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1173 {
1174 result
1175 } else {
1176 let worktree = this
1177 .update(&mut cx, |this, cx| {
1178 this.create_local_worktree(&abs_path, false, cx)
1179 })
1180 .await?;
1181 this.update(&mut cx, |this, cx| {
1182 this.language_servers.insert(
1183 (worktree.read(cx).id(), lsp_adapter.name()),
1184 (lsp_adapter, lsp_server),
1185 );
1186 });
1187 (worktree, PathBuf::new())
1188 };
1189
1190 let project_path = ProjectPath {
1191 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1192 path: relative_path.into(),
1193 };
1194 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1195 .await
1196 })
1197 }
1198
1199 pub fn open_buffer_by_id(
1200 &mut self,
1201 id: u64,
1202 cx: &mut ModelContext<Self>,
1203 ) -> Task<Result<ModelHandle<Buffer>>> {
1204 if let Some(buffer) = self.buffer_for_id(id, cx) {
1205 Task::ready(Ok(buffer))
1206 } else if self.is_local() {
1207 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1208 } else if let Some(project_id) = self.remote_id() {
1209 let request = self
1210 .client
1211 .request(proto::OpenBufferById { project_id, id });
1212 cx.spawn(|this, mut cx| async move {
1213 let buffer = request
1214 .await?
1215 .buffer
1216 .ok_or_else(|| anyhow!("invalid buffer"))?;
1217 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1218 .await
1219 })
1220 } else {
1221 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1222 }
1223 }
1224
1225 pub fn save_buffer_as(
1226 &mut self,
1227 buffer: ModelHandle<Buffer>,
1228 abs_path: PathBuf,
1229 cx: &mut ModelContext<Project>,
1230 ) -> Task<Result<()>> {
1231 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1232 let old_path =
1233 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1234 cx.spawn(|this, mut cx| async move {
1235 if let Some(old_path) = old_path {
1236 this.update(&mut cx, |this, cx| {
1237 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1238 });
1239 }
1240 let (worktree, path) = worktree_task.await?;
1241 worktree
1242 .update(&mut cx, |worktree, cx| {
1243 worktree
1244 .as_local_mut()
1245 .unwrap()
1246 .save_buffer_as(buffer.clone(), path, cx)
1247 })
1248 .await?;
1249 this.update(&mut cx, |this, cx| {
1250 this.assign_language_to_buffer(&buffer, cx);
1251 this.register_buffer_with_language_server(&buffer, cx);
1252 });
1253 Ok(())
1254 })
1255 }
1256
1257 pub fn get_open_buffer(
1258 &mut self,
1259 path: &ProjectPath,
1260 cx: &mut ModelContext<Self>,
1261 ) -> Option<ModelHandle<Buffer>> {
1262 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1263 self.opened_buffers.values().find_map(|buffer| {
1264 let buffer = buffer.upgrade(cx)?;
1265 let file = File::from_dyn(buffer.read(cx).file())?;
1266 if file.worktree == worktree && file.path() == &path.path {
1267 Some(buffer)
1268 } else {
1269 None
1270 }
1271 })
1272 }
1273
1274 fn register_buffer(
1275 &mut self,
1276 buffer: &ModelHandle<Buffer>,
1277 cx: &mut ModelContext<Self>,
1278 ) -> Result<()> {
1279 let remote_id = buffer.read(cx).remote_id();
1280 let open_buffer = if self.is_remote() || self.is_shared() {
1281 OpenBuffer::Strong(buffer.clone())
1282 } else {
1283 OpenBuffer::Weak(buffer.downgrade())
1284 };
1285
1286 match self.opened_buffers.insert(remote_id, open_buffer) {
1287 None => {}
1288 Some(OpenBuffer::Loading(operations)) => {
1289 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1290 }
1291 Some(OpenBuffer::Weak(existing_handle)) => {
1292 if existing_handle.upgrade(cx).is_some() {
1293 Err(anyhow!(
1294 "already registered buffer with remote id {}",
1295 remote_id
1296 ))?
1297 }
1298 }
1299 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1300 "already registered buffer with remote id {}",
1301 remote_id
1302 ))?,
1303 }
1304 cx.subscribe(buffer, |this, buffer, event, cx| {
1305 this.on_buffer_event(buffer, event, cx);
1306 })
1307 .detach();
1308
1309 self.assign_language_to_buffer(buffer, cx);
1310 self.register_buffer_with_language_server(buffer, cx);
1311 cx.observe_release(buffer, |this, buffer, cx| {
1312 if let Some(file) = File::from_dyn(buffer.file()) {
1313 if file.is_local() {
1314 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1315 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1316 server
1317 .notify::<lsp::notification::DidCloseTextDocument>(
1318 lsp::DidCloseTextDocumentParams {
1319 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1320 },
1321 )
1322 .log_err();
1323 }
1324 }
1325 }
1326 })
1327 .detach();
1328
1329 Ok(())
1330 }
1331
1332 fn register_buffer_with_language_server(
1333 &mut self,
1334 buffer_handle: &ModelHandle<Buffer>,
1335 cx: &mut ModelContext<Self>,
1336 ) {
1337 let buffer = buffer_handle.read(cx);
1338 let buffer_id = buffer.remote_id();
1339 if let Some(file) = File::from_dyn(buffer.file()) {
1340 if file.is_local() {
1341 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1342 let initial_snapshot = buffer.text_snapshot();
1343
1344 let mut language_server = None;
1345 let mut language_id = None;
1346 if let Some(language) = buffer.language() {
1347 let worktree_id = file.worktree_id(cx);
1348 if let Some(adapter) = language.lsp_adapter() {
1349 language_id = adapter.id_for_language(language.name().as_ref());
1350 language_server = self
1351 .language_servers
1352 .get(&(worktree_id, adapter.name()))
1353 .cloned();
1354 }
1355 }
1356
1357 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1358 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1359 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1360 .log_err();
1361 }
1362 }
1363
1364 if let Some((_, server)) = language_server {
1365 server
1366 .notify::<lsp::notification::DidOpenTextDocument>(
1367 lsp::DidOpenTextDocumentParams {
1368 text_document: lsp::TextDocumentItem::new(
1369 uri,
1370 language_id.unwrap_or_default(),
1371 0,
1372 initial_snapshot.text(),
1373 ),
1374 }
1375 .clone(),
1376 )
1377 .log_err();
1378 buffer_handle.update(cx, |buffer, cx| {
1379 buffer.set_completion_triggers(
1380 server
1381 .capabilities()
1382 .completion_provider
1383 .as_ref()
1384 .and_then(|provider| provider.trigger_characters.clone())
1385 .unwrap_or(Vec::new()),
1386 cx,
1387 )
1388 });
1389 self.buffer_snapshots
1390 .insert(buffer_id, vec![(0, initial_snapshot)]);
1391 }
1392 }
1393 }
1394 }
1395
1396 fn unregister_buffer_from_language_server(
1397 &mut self,
1398 buffer: &ModelHandle<Buffer>,
1399 old_path: PathBuf,
1400 cx: &mut ModelContext<Self>,
1401 ) {
1402 buffer.update(cx, |buffer, cx| {
1403 buffer.update_diagnostics(Default::default(), cx);
1404 self.buffer_snapshots.remove(&buffer.remote_id());
1405 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1406 language_server
1407 .notify::<lsp::notification::DidCloseTextDocument>(
1408 lsp::DidCloseTextDocumentParams {
1409 text_document: lsp::TextDocumentIdentifier::new(
1410 lsp::Url::from_file_path(old_path).unwrap(),
1411 ),
1412 },
1413 )
1414 .log_err();
1415 }
1416 });
1417 }
1418
1419 fn on_buffer_event(
1420 &mut self,
1421 buffer: ModelHandle<Buffer>,
1422 event: &BufferEvent,
1423 cx: &mut ModelContext<Self>,
1424 ) -> Option<()> {
1425 match event {
1426 BufferEvent::Operation(operation) => {
1427 let project_id = self.remote_id()?;
1428 let request = self.client.request(proto::UpdateBuffer {
1429 project_id,
1430 buffer_id: buffer.read(cx).remote_id(),
1431 operations: vec![language::proto::serialize_operation(&operation)],
1432 });
1433 cx.background().spawn(request).detach_and_log_err(cx);
1434 }
1435 BufferEvent::Edited { .. } => {
1436 let (_, language_server) = self
1437 .language_server_for_buffer(buffer.read(cx), cx)?
1438 .clone();
1439 let buffer = buffer.read(cx);
1440 let file = File::from_dyn(buffer.file())?;
1441 let abs_path = file.as_local()?.abs_path(cx);
1442 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1443 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1444 let (version, prev_snapshot) = buffer_snapshots.last()?;
1445 let next_snapshot = buffer.text_snapshot();
1446 let next_version = version + 1;
1447
1448 let content_changes = buffer
1449 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1450 .map(|edit| {
1451 let edit_start = edit.new.start.0;
1452 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1453 let new_text = next_snapshot
1454 .text_for_range(edit.new.start.1..edit.new.end.1)
1455 .collect();
1456 lsp::TextDocumentContentChangeEvent {
1457 range: Some(lsp::Range::new(
1458 point_to_lsp(edit_start),
1459 point_to_lsp(edit_end),
1460 )),
1461 range_length: None,
1462 text: new_text,
1463 }
1464 })
1465 .collect();
1466
1467 buffer_snapshots.push((next_version, next_snapshot));
1468
1469 language_server
1470 .notify::<lsp::notification::DidChangeTextDocument>(
1471 lsp::DidChangeTextDocumentParams {
1472 text_document: lsp::VersionedTextDocumentIdentifier::new(
1473 uri,
1474 next_version,
1475 ),
1476 content_changes,
1477 },
1478 )
1479 .log_err();
1480 }
1481 BufferEvent::Saved => {
1482 let file = File::from_dyn(buffer.read(cx).file())?;
1483 let worktree_id = file.worktree_id(cx);
1484 let abs_path = file.as_local()?.abs_path(cx);
1485 let text_document = lsp::TextDocumentIdentifier {
1486 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1487 };
1488
1489 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1490 server
1491 .notify::<lsp::notification::DidSaveTextDocument>(
1492 lsp::DidSaveTextDocumentParams {
1493 text_document: text_document.clone(),
1494 text: None,
1495 },
1496 )
1497 .log_err();
1498 }
1499 }
1500 _ => {}
1501 }
1502
1503 None
1504 }
1505
1506 fn language_servers_for_worktree(
1507 &self,
1508 worktree_id: WorktreeId,
1509 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1510 self.language_servers.iter().filter_map(
1511 move |((language_server_worktree_id, _), server)| {
1512 if *language_server_worktree_id == worktree_id {
1513 Some(server)
1514 } else {
1515 None
1516 }
1517 },
1518 )
1519 }
1520
1521 fn assign_language_to_buffer(
1522 &mut self,
1523 buffer: &ModelHandle<Buffer>,
1524 cx: &mut ModelContext<Self>,
1525 ) -> Option<()> {
1526 // If the buffer has a language, set it and start the language server if we haven't already.
1527 let full_path = buffer.read(cx).file()?.full_path(cx);
1528 let language = self.languages.select_language(&full_path)?;
1529 buffer.update(cx, |buffer, cx| {
1530 buffer.set_language(Some(language.clone()), cx);
1531 });
1532
1533 let file = File::from_dyn(buffer.read(cx).file())?;
1534 let worktree = file.worktree.read(cx).as_local()?;
1535 let worktree_id = worktree.id();
1536 let worktree_abs_path = worktree.abs_path().clone();
1537 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1538
1539 None
1540 }
1541
1542 fn start_language_server(
1543 &mut self,
1544 worktree_id: WorktreeId,
1545 worktree_path: Arc<Path>,
1546 language: Arc<Language>,
1547 cx: &mut ModelContext<Self>,
1548 ) {
1549 let adapter = if let Some(adapter) = language.lsp_adapter() {
1550 adapter
1551 } else {
1552 return;
1553 };
1554 let key = (worktree_id, adapter.name());
1555 self.started_language_servers
1556 .entry(key.clone())
1557 .or_insert_with(|| {
1558 let server_id = post_inc(&mut self.next_language_server_id);
1559 let language_server = self.languages.start_language_server(
1560 server_id,
1561 language.clone(),
1562 worktree_path,
1563 self.client.http_client(),
1564 cx,
1565 );
1566 cx.spawn_weak(|this, mut cx| async move {
1567 let language_server = language_server?.await.log_err()?;
1568 let language_server = language_server
1569 .initialize(adapter.initialization_options())
1570 .await
1571 .log_err()?;
1572 let this = this.upgrade(&cx)?;
1573 let disk_based_diagnostics_progress_token =
1574 adapter.disk_based_diagnostics_progress_token();
1575
1576 language_server
1577 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1578 let this = this.downgrade();
1579 let adapter = adapter.clone();
1580 move |params, mut cx| {
1581 if let Some(this) = this.upgrade(&cx) {
1582 this.update(&mut cx, |this, cx| {
1583 this.on_lsp_diagnostics_published(
1584 server_id,
1585 params,
1586 &adapter,
1587 disk_based_diagnostics_progress_token,
1588 cx,
1589 );
1590 });
1591 }
1592 }
1593 })
1594 .detach();
1595
1596 language_server
1597 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1598 let settings = this
1599 .read_with(&cx, |this, _| this.language_server_settings.clone());
1600 move |params, _| {
1601 let settings = settings.lock().clone();
1602 async move {
1603 Ok(params
1604 .items
1605 .into_iter()
1606 .map(|item| {
1607 if let Some(section) = &item.section {
1608 settings
1609 .get(section)
1610 .cloned()
1611 .unwrap_or(serde_json::Value::Null)
1612 } else {
1613 settings.clone()
1614 }
1615 })
1616 .collect())
1617 }
1618 }
1619 })
1620 .detach();
1621
1622 language_server
1623 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1624 let this = this.downgrade();
1625 let adapter = adapter.clone();
1626 let language_server = language_server.clone();
1627 move |params, cx| {
1628 Self::on_lsp_workspace_edit(
1629 this,
1630 params,
1631 server_id,
1632 adapter.clone(),
1633 language_server.clone(),
1634 cx,
1635 )
1636 }
1637 })
1638 .detach();
1639
1640 language_server
1641 .on_notification::<lsp::notification::Progress, _>({
1642 let this = this.downgrade();
1643 move |params, mut cx| {
1644 if let Some(this) = this.upgrade(&cx) {
1645 this.update(&mut cx, |this, cx| {
1646 this.on_lsp_progress(
1647 params,
1648 server_id,
1649 disk_based_diagnostics_progress_token,
1650 cx,
1651 );
1652 });
1653 }
1654 }
1655 })
1656 .detach();
1657
1658 this.update(&mut cx, |this, cx| {
1659 this.language_servers
1660 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1661 this.language_server_statuses.insert(
1662 server_id,
1663 LanguageServerStatus {
1664 name: language_server.name().to_string(),
1665 pending_work: Default::default(),
1666 pending_diagnostic_updates: 0,
1667 },
1668 );
1669 language_server
1670 .notify::<lsp::notification::DidChangeConfiguration>(
1671 lsp::DidChangeConfigurationParams {
1672 settings: this.language_server_settings.lock().clone(),
1673 },
1674 )
1675 .ok();
1676
1677 if let Some(project_id) = this.remote_id() {
1678 this.client
1679 .send(proto::StartLanguageServer {
1680 project_id,
1681 server: Some(proto::LanguageServer {
1682 id: server_id as u64,
1683 name: language_server.name().to_string(),
1684 }),
1685 })
1686 .log_err();
1687 }
1688
1689 // Tell the language server about every open buffer in the worktree that matches the language.
1690 for buffer in this.opened_buffers.values() {
1691 if let Some(buffer_handle) = buffer.upgrade(cx) {
1692 let buffer = buffer_handle.read(cx);
1693 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1694 file
1695 } else {
1696 continue;
1697 };
1698 let language = if let Some(language) = buffer.language() {
1699 language
1700 } else {
1701 continue;
1702 };
1703 if file.worktree.read(cx).id() != key.0
1704 || language.lsp_adapter().map(|a| a.name())
1705 != Some(key.1.clone())
1706 {
1707 continue;
1708 }
1709
1710 let file = file.as_local()?;
1711 let versions = this
1712 .buffer_snapshots
1713 .entry(buffer.remote_id())
1714 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1715 let (version, initial_snapshot) = versions.last().unwrap();
1716 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1717 let language_id = adapter.id_for_language(language.name().as_ref());
1718 language_server
1719 .notify::<lsp::notification::DidOpenTextDocument>(
1720 lsp::DidOpenTextDocumentParams {
1721 text_document: lsp::TextDocumentItem::new(
1722 uri,
1723 language_id.unwrap_or_default(),
1724 *version,
1725 initial_snapshot.text(),
1726 ),
1727 },
1728 )
1729 .log_err()?;
1730 buffer_handle.update(cx, |buffer, cx| {
1731 buffer.set_completion_triggers(
1732 language_server
1733 .capabilities()
1734 .completion_provider
1735 .as_ref()
1736 .and_then(|provider| {
1737 provider.trigger_characters.clone()
1738 })
1739 .unwrap_or(Vec::new()),
1740 cx,
1741 )
1742 });
1743 }
1744 }
1745
1746 cx.notify();
1747 Some(())
1748 });
1749
1750 Some(language_server)
1751 })
1752 });
1753 }
1754
1755 pub fn restart_language_servers_for_buffers(
1756 &mut self,
1757 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1758 cx: &mut ModelContext<Self>,
1759 ) -> Option<()> {
1760 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1761 .into_iter()
1762 .filter_map(|buffer| {
1763 let file = File::from_dyn(buffer.read(cx).file())?;
1764 let worktree = file.worktree.read(cx).as_local()?;
1765 let worktree_id = worktree.id();
1766 let worktree_abs_path = worktree.abs_path().clone();
1767 let full_path = file.full_path(cx);
1768 Some((worktree_id, worktree_abs_path, full_path))
1769 })
1770 .collect();
1771 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1772 let language = self.languages.select_language(&full_path)?;
1773 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1774 }
1775
1776 None
1777 }
1778
1779 fn restart_language_server(
1780 &mut self,
1781 worktree_id: WorktreeId,
1782 worktree_path: Arc<Path>,
1783 language: Arc<Language>,
1784 cx: &mut ModelContext<Self>,
1785 ) {
1786 let adapter = if let Some(adapter) = language.lsp_adapter() {
1787 adapter
1788 } else {
1789 return;
1790 };
1791 let key = (worktree_id, adapter.name());
1792 let server_to_shutdown = self.language_servers.remove(&key);
1793 self.started_language_servers.remove(&key);
1794 server_to_shutdown
1795 .as_ref()
1796 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1797 cx.spawn_weak(|this, mut cx| async move {
1798 if let Some(this) = this.upgrade(&cx) {
1799 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1800 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1801 shutdown_task.await;
1802 }
1803 }
1804
1805 this.update(&mut cx, |this, cx| {
1806 this.start_language_server(worktree_id, worktree_path, language, cx);
1807 });
1808 }
1809 })
1810 .detach();
1811 }
1812
1813 fn on_lsp_diagnostics_published(
1814 &mut self,
1815 server_id: usize,
1816 mut params: lsp::PublishDiagnosticsParams,
1817 adapter: &Arc<dyn LspAdapter>,
1818 disk_based_diagnostics_progress_token: Option<&str>,
1819 cx: &mut ModelContext<Self>,
1820 ) {
1821 adapter.process_diagnostics(&mut params);
1822 if disk_based_diagnostics_progress_token.is_none() {
1823 self.disk_based_diagnostics_started(cx);
1824 self.broadcast_language_server_update(
1825 server_id,
1826 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1827 proto::LspDiskBasedDiagnosticsUpdating {},
1828 ),
1829 );
1830 }
1831 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1832 .log_err();
1833 if disk_based_diagnostics_progress_token.is_none() {
1834 self.disk_based_diagnostics_finished(cx);
1835 self.broadcast_language_server_update(
1836 server_id,
1837 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1838 proto::LspDiskBasedDiagnosticsUpdated {},
1839 ),
1840 );
1841 }
1842 }
1843
1844 fn on_lsp_progress(
1845 &mut self,
1846 progress: lsp::ProgressParams,
1847 server_id: usize,
1848 disk_based_diagnostics_progress_token: Option<&str>,
1849 cx: &mut ModelContext<Self>,
1850 ) {
1851 let token = match progress.token {
1852 lsp::NumberOrString::String(token) => token,
1853 lsp::NumberOrString::Number(token) => {
1854 log::info!("skipping numeric progress token {}", token);
1855 return;
1856 }
1857 };
1858 let progress = match progress.value {
1859 lsp::ProgressParamsValue::WorkDone(value) => value,
1860 };
1861 let language_server_status =
1862 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1863 status
1864 } else {
1865 return;
1866 };
1867 match progress {
1868 lsp::WorkDoneProgress::Begin(_) => {
1869 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1870 language_server_status.pending_diagnostic_updates += 1;
1871 if language_server_status.pending_diagnostic_updates == 1 {
1872 self.disk_based_diagnostics_started(cx);
1873 self.broadcast_language_server_update(
1874 server_id,
1875 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1876 proto::LspDiskBasedDiagnosticsUpdating {},
1877 ),
1878 );
1879 }
1880 } else {
1881 self.on_lsp_work_start(server_id, token.clone(), cx);
1882 self.broadcast_language_server_update(
1883 server_id,
1884 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1885 token,
1886 }),
1887 );
1888 }
1889 }
1890 lsp::WorkDoneProgress::Report(report) => {
1891 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1892 self.on_lsp_work_progress(
1893 server_id,
1894 token.clone(),
1895 LanguageServerProgress {
1896 message: report.message.clone(),
1897 percentage: report.percentage.map(|p| p as usize),
1898 last_update_at: Instant::now(),
1899 },
1900 cx,
1901 );
1902 self.broadcast_language_server_update(
1903 server_id,
1904 proto::update_language_server::Variant::WorkProgress(
1905 proto::LspWorkProgress {
1906 token,
1907 message: report.message,
1908 percentage: report.percentage.map(|p| p as u32),
1909 },
1910 ),
1911 );
1912 }
1913 }
1914 lsp::WorkDoneProgress::End(_) => {
1915 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1916 language_server_status.pending_diagnostic_updates -= 1;
1917 if language_server_status.pending_diagnostic_updates == 0 {
1918 self.disk_based_diagnostics_finished(cx);
1919 self.broadcast_language_server_update(
1920 server_id,
1921 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1922 proto::LspDiskBasedDiagnosticsUpdated {},
1923 ),
1924 );
1925 }
1926 } else {
1927 self.on_lsp_work_end(server_id, token.clone(), cx);
1928 self.broadcast_language_server_update(
1929 server_id,
1930 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1931 token,
1932 }),
1933 );
1934 }
1935 }
1936 }
1937 }
1938
1939 fn on_lsp_work_start(
1940 &mut self,
1941 language_server_id: usize,
1942 token: String,
1943 cx: &mut ModelContext<Self>,
1944 ) {
1945 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1946 status.pending_work.insert(
1947 token,
1948 LanguageServerProgress {
1949 message: None,
1950 percentage: None,
1951 last_update_at: Instant::now(),
1952 },
1953 );
1954 cx.notify();
1955 }
1956 }
1957
1958 fn on_lsp_work_progress(
1959 &mut self,
1960 language_server_id: usize,
1961 token: String,
1962 progress: LanguageServerProgress,
1963 cx: &mut ModelContext<Self>,
1964 ) {
1965 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1966 status.pending_work.insert(token, progress);
1967 cx.notify();
1968 }
1969 }
1970
1971 fn on_lsp_work_end(
1972 &mut self,
1973 language_server_id: usize,
1974 token: String,
1975 cx: &mut ModelContext<Self>,
1976 ) {
1977 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1978 status.pending_work.remove(&token);
1979 cx.notify();
1980 }
1981 }
1982
1983 async fn on_lsp_workspace_edit(
1984 this: WeakModelHandle<Self>,
1985 params: lsp::ApplyWorkspaceEditParams,
1986 server_id: usize,
1987 adapter: Arc<dyn LspAdapter>,
1988 language_server: Arc<LanguageServer>,
1989 mut cx: AsyncAppContext,
1990 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1991 let this = this
1992 .upgrade(&cx)
1993 .ok_or_else(|| anyhow!("project project closed"))?;
1994 let transaction = Self::deserialize_workspace_edit(
1995 this.clone(),
1996 params.edit,
1997 true,
1998 adapter.clone(),
1999 language_server.clone(),
2000 &mut cx,
2001 )
2002 .await
2003 .log_err();
2004 this.update(&mut cx, |this, _| {
2005 if let Some(transaction) = transaction {
2006 this.last_workspace_edits_by_language_server
2007 .insert(server_id, transaction);
2008 }
2009 });
2010 Ok(lsp::ApplyWorkspaceEditResponse {
2011 applied: true,
2012 failed_change: None,
2013 failure_reason: None,
2014 })
2015 }
2016
2017 fn broadcast_language_server_update(
2018 &self,
2019 language_server_id: usize,
2020 event: proto::update_language_server::Variant,
2021 ) {
2022 if let Some(project_id) = self.remote_id() {
2023 self.client
2024 .send(proto::UpdateLanguageServer {
2025 project_id,
2026 language_server_id: language_server_id as u64,
2027 variant: Some(event),
2028 })
2029 .log_err();
2030 }
2031 }
2032
2033 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2034 for (_, server) in self.language_servers.values() {
2035 server
2036 .notify::<lsp::notification::DidChangeConfiguration>(
2037 lsp::DidChangeConfigurationParams {
2038 settings: settings.clone(),
2039 },
2040 )
2041 .ok();
2042 }
2043 *self.language_server_settings.lock() = settings;
2044 }
2045
2046 pub fn language_server_statuses(
2047 &self,
2048 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2049 self.language_server_statuses.values()
2050 }
2051
2052 pub fn update_diagnostics(
2053 &mut self,
2054 params: lsp::PublishDiagnosticsParams,
2055 disk_based_sources: &[&str],
2056 cx: &mut ModelContext<Self>,
2057 ) -> Result<()> {
2058 let abs_path = params
2059 .uri
2060 .to_file_path()
2061 .map_err(|_| anyhow!("URI is not a file"))?;
2062 let mut next_group_id = 0;
2063 let mut diagnostics = Vec::default();
2064 let mut primary_diagnostic_group_ids = HashMap::default();
2065 let mut sources_by_group_id = HashMap::default();
2066 let mut supporting_diagnostics = HashMap::default();
2067 for diagnostic in ¶ms.diagnostics {
2068 let source = diagnostic.source.as_ref();
2069 let code = diagnostic.code.as_ref().map(|code| match code {
2070 lsp::NumberOrString::Number(code) => code.to_string(),
2071 lsp::NumberOrString::String(code) => code.clone(),
2072 });
2073 let range = range_from_lsp(diagnostic.range);
2074 let is_supporting = diagnostic
2075 .related_information
2076 .as_ref()
2077 .map_or(false, |infos| {
2078 infos.iter().any(|info| {
2079 primary_diagnostic_group_ids.contains_key(&(
2080 source,
2081 code.clone(),
2082 range_from_lsp(info.location.range),
2083 ))
2084 })
2085 });
2086
2087 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2088 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2089 });
2090
2091 if is_supporting {
2092 supporting_diagnostics.insert(
2093 (source, code.clone(), range),
2094 (diagnostic.severity, is_unnecessary),
2095 );
2096 } else {
2097 let group_id = post_inc(&mut next_group_id);
2098 let is_disk_based = source.map_or(false, |source| {
2099 disk_based_sources.contains(&source.as_str())
2100 });
2101
2102 sources_by_group_id.insert(group_id, source);
2103 primary_diagnostic_group_ids
2104 .insert((source, code.clone(), range.clone()), group_id);
2105
2106 diagnostics.push(DiagnosticEntry {
2107 range,
2108 diagnostic: Diagnostic {
2109 code: code.clone(),
2110 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2111 message: diagnostic.message.clone(),
2112 group_id,
2113 is_primary: true,
2114 is_valid: true,
2115 is_disk_based,
2116 is_unnecessary,
2117 },
2118 });
2119 if let Some(infos) = &diagnostic.related_information {
2120 for info in infos {
2121 if info.location.uri == params.uri && !info.message.is_empty() {
2122 let range = range_from_lsp(info.location.range);
2123 diagnostics.push(DiagnosticEntry {
2124 range,
2125 diagnostic: Diagnostic {
2126 code: code.clone(),
2127 severity: DiagnosticSeverity::INFORMATION,
2128 message: info.message.clone(),
2129 group_id,
2130 is_primary: false,
2131 is_valid: true,
2132 is_disk_based,
2133 is_unnecessary: false,
2134 },
2135 });
2136 }
2137 }
2138 }
2139 }
2140 }
2141
2142 for entry in &mut diagnostics {
2143 let diagnostic = &mut entry.diagnostic;
2144 if !diagnostic.is_primary {
2145 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2146 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2147 source,
2148 diagnostic.code.clone(),
2149 entry.range.clone(),
2150 )) {
2151 if let Some(severity) = severity {
2152 diagnostic.severity = severity;
2153 }
2154 diagnostic.is_unnecessary = is_unnecessary;
2155 }
2156 }
2157 }
2158
2159 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2160 Ok(())
2161 }
2162
2163 pub fn update_diagnostic_entries(
2164 &mut self,
2165 abs_path: PathBuf,
2166 version: Option<i32>,
2167 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2168 cx: &mut ModelContext<Project>,
2169 ) -> Result<(), anyhow::Error> {
2170 let (worktree, relative_path) = self
2171 .find_local_worktree(&abs_path, cx)
2172 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2173 if !worktree.read(cx).is_visible() {
2174 return Ok(());
2175 }
2176
2177 let project_path = ProjectPath {
2178 worktree_id: worktree.read(cx).id(),
2179 path: relative_path.into(),
2180 };
2181 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2182 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2183 }
2184
2185 let updated = worktree.update(cx, |worktree, cx| {
2186 worktree
2187 .as_local_mut()
2188 .ok_or_else(|| anyhow!("not a local worktree"))?
2189 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2190 })?;
2191 if updated {
2192 cx.emit(Event::DiagnosticsUpdated(project_path));
2193 }
2194 Ok(())
2195 }
2196
2197 fn update_buffer_diagnostics(
2198 &mut self,
2199 buffer: &ModelHandle<Buffer>,
2200 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2201 version: Option<i32>,
2202 cx: &mut ModelContext<Self>,
2203 ) -> Result<()> {
2204 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2205 Ordering::Equal
2206 .then_with(|| b.is_primary.cmp(&a.is_primary))
2207 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2208 .then_with(|| a.severity.cmp(&b.severity))
2209 .then_with(|| a.message.cmp(&b.message))
2210 }
2211
2212 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2213
2214 diagnostics.sort_unstable_by(|a, b| {
2215 Ordering::Equal
2216 .then_with(|| a.range.start.cmp(&b.range.start))
2217 .then_with(|| b.range.end.cmp(&a.range.end))
2218 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2219 });
2220
2221 let mut sanitized_diagnostics = Vec::new();
2222 let edits_since_save = Patch::new(
2223 snapshot
2224 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2225 .collect(),
2226 );
2227 for entry in diagnostics {
2228 let start;
2229 let end;
2230 if entry.diagnostic.is_disk_based {
2231 // Some diagnostics are based on files on disk instead of buffers'
2232 // current contents. Adjust these diagnostics' ranges to reflect
2233 // any unsaved edits.
2234 start = edits_since_save.old_to_new(entry.range.start);
2235 end = edits_since_save.old_to_new(entry.range.end);
2236 } else {
2237 start = entry.range.start;
2238 end = entry.range.end;
2239 }
2240
2241 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2242 ..snapshot.clip_point_utf16(end, Bias::Right);
2243
2244 // Expand empty ranges by one character
2245 if range.start == range.end {
2246 range.end.column += 1;
2247 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2248 if range.start == range.end && range.end.column > 0 {
2249 range.start.column -= 1;
2250 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2251 }
2252 }
2253
2254 sanitized_diagnostics.push(DiagnosticEntry {
2255 range,
2256 diagnostic: entry.diagnostic,
2257 });
2258 }
2259 drop(edits_since_save);
2260
2261 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2262 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2263 Ok(())
2264 }
2265
2266 pub fn reload_buffers(
2267 &self,
2268 buffers: HashSet<ModelHandle<Buffer>>,
2269 push_to_history: bool,
2270 cx: &mut ModelContext<Self>,
2271 ) -> Task<Result<ProjectTransaction>> {
2272 let mut local_buffers = Vec::new();
2273 let mut remote_buffers = None;
2274 for buffer_handle in buffers {
2275 let buffer = buffer_handle.read(cx);
2276 if buffer.is_dirty() {
2277 if let Some(file) = File::from_dyn(buffer.file()) {
2278 if file.is_local() {
2279 local_buffers.push(buffer_handle);
2280 } else {
2281 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2282 }
2283 }
2284 }
2285 }
2286
2287 let remote_buffers = self.remote_id().zip(remote_buffers);
2288 let client = self.client.clone();
2289
2290 cx.spawn(|this, mut cx| async move {
2291 let mut project_transaction = ProjectTransaction::default();
2292
2293 if let Some((project_id, remote_buffers)) = remote_buffers {
2294 let response = client
2295 .request(proto::ReloadBuffers {
2296 project_id,
2297 buffer_ids: remote_buffers
2298 .iter()
2299 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2300 .collect(),
2301 })
2302 .await?
2303 .transaction
2304 .ok_or_else(|| anyhow!("missing transaction"))?;
2305 project_transaction = this
2306 .update(&mut cx, |this, cx| {
2307 this.deserialize_project_transaction(response, push_to_history, cx)
2308 })
2309 .await?;
2310 }
2311
2312 for buffer in local_buffers {
2313 let transaction = buffer
2314 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2315 .await?;
2316 buffer.update(&mut cx, |buffer, cx| {
2317 if let Some(transaction) = transaction {
2318 if !push_to_history {
2319 buffer.forget_transaction(transaction.id);
2320 }
2321 project_transaction.0.insert(cx.handle(), transaction);
2322 }
2323 });
2324 }
2325
2326 Ok(project_transaction)
2327 })
2328 }
2329
2330 pub fn format(
2331 &self,
2332 buffers: HashSet<ModelHandle<Buffer>>,
2333 push_to_history: bool,
2334 cx: &mut ModelContext<Project>,
2335 ) -> Task<Result<ProjectTransaction>> {
2336 let mut local_buffers = Vec::new();
2337 let mut remote_buffers = None;
2338 for buffer_handle in buffers {
2339 let buffer = buffer_handle.read(cx);
2340 if let Some(file) = File::from_dyn(buffer.file()) {
2341 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2342 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2343 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2344 }
2345 } else {
2346 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2347 }
2348 } else {
2349 return Task::ready(Ok(Default::default()));
2350 }
2351 }
2352
2353 let remote_buffers = self.remote_id().zip(remote_buffers);
2354 let client = self.client.clone();
2355
2356 cx.spawn(|this, mut cx| async move {
2357 let mut project_transaction = ProjectTransaction::default();
2358
2359 if let Some((project_id, remote_buffers)) = remote_buffers {
2360 let response = client
2361 .request(proto::FormatBuffers {
2362 project_id,
2363 buffer_ids: remote_buffers
2364 .iter()
2365 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2366 .collect(),
2367 })
2368 .await?
2369 .transaction
2370 .ok_or_else(|| anyhow!("missing transaction"))?;
2371 project_transaction = this
2372 .update(&mut cx, |this, cx| {
2373 this.deserialize_project_transaction(response, push_to_history, cx)
2374 })
2375 .await?;
2376 }
2377
2378 for (buffer, buffer_abs_path, language_server) in local_buffers {
2379 let text_document = lsp::TextDocumentIdentifier::new(
2380 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2381 );
2382 let capabilities = &language_server.capabilities();
2383 let tab_size = cx.update(|cx| {
2384 let language_name = buffer.read(cx).language().map(|language| language.name());
2385 cx.global::<Settings>().tab_size(language_name.as_deref())
2386 });
2387 let lsp_edits = if capabilities
2388 .document_formatting_provider
2389 .as_ref()
2390 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2391 {
2392 language_server
2393 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2394 text_document,
2395 options: lsp::FormattingOptions {
2396 tab_size,
2397 insert_spaces: true,
2398 insert_final_newline: Some(true),
2399 ..Default::default()
2400 },
2401 work_done_progress_params: Default::default(),
2402 })
2403 .await?
2404 } else if capabilities
2405 .document_range_formatting_provider
2406 .as_ref()
2407 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2408 {
2409 let buffer_start = lsp::Position::new(0, 0);
2410 let buffer_end =
2411 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2412 language_server
2413 .request::<lsp::request::RangeFormatting>(
2414 lsp::DocumentRangeFormattingParams {
2415 text_document,
2416 range: lsp::Range::new(buffer_start, buffer_end),
2417 options: lsp::FormattingOptions {
2418 tab_size: 4,
2419 insert_spaces: true,
2420 insert_final_newline: Some(true),
2421 ..Default::default()
2422 },
2423 work_done_progress_params: Default::default(),
2424 },
2425 )
2426 .await?
2427 } else {
2428 continue;
2429 };
2430
2431 if let Some(lsp_edits) = lsp_edits {
2432 let edits = this
2433 .update(&mut cx, |this, cx| {
2434 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2435 })
2436 .await?;
2437 buffer.update(&mut cx, |buffer, cx| {
2438 buffer.finalize_last_transaction();
2439 buffer.start_transaction();
2440 for (range, text) in edits {
2441 buffer.edit([(range, text)], cx);
2442 }
2443 if buffer.end_transaction(cx).is_some() {
2444 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2445 if !push_to_history {
2446 buffer.forget_transaction(transaction.id);
2447 }
2448 project_transaction.0.insert(cx.handle(), transaction);
2449 }
2450 });
2451 }
2452 }
2453
2454 Ok(project_transaction)
2455 })
2456 }
2457
2458 pub fn definition<T: ToPointUtf16>(
2459 &self,
2460 buffer: &ModelHandle<Buffer>,
2461 position: T,
2462 cx: &mut ModelContext<Self>,
2463 ) -> Task<Result<Vec<Location>>> {
2464 let position = position.to_point_utf16(buffer.read(cx));
2465 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2466 }
2467
2468 pub fn references<T: ToPointUtf16>(
2469 &self,
2470 buffer: &ModelHandle<Buffer>,
2471 position: T,
2472 cx: &mut ModelContext<Self>,
2473 ) -> Task<Result<Vec<Location>>> {
2474 let position = position.to_point_utf16(buffer.read(cx));
2475 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2476 }
2477
2478 pub fn document_highlights<T: ToPointUtf16>(
2479 &self,
2480 buffer: &ModelHandle<Buffer>,
2481 position: T,
2482 cx: &mut ModelContext<Self>,
2483 ) -> Task<Result<Vec<DocumentHighlight>>> {
2484 let position = position.to_point_utf16(buffer.read(cx));
2485
2486 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2487 }
2488
2489 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2490 if self.is_local() {
2491 let mut requests = Vec::new();
2492 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2493 let worktree_id = *worktree_id;
2494 if let Some(worktree) = self
2495 .worktree_for_id(worktree_id, cx)
2496 .and_then(|worktree| worktree.read(cx).as_local())
2497 {
2498 let lsp_adapter = lsp_adapter.clone();
2499 let worktree_abs_path = worktree.abs_path().clone();
2500 requests.push(
2501 language_server
2502 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2503 query: query.to_string(),
2504 ..Default::default()
2505 })
2506 .log_err()
2507 .map(move |response| {
2508 (
2509 lsp_adapter,
2510 worktree_id,
2511 worktree_abs_path,
2512 response.unwrap_or_default(),
2513 )
2514 }),
2515 );
2516 }
2517 }
2518
2519 cx.spawn_weak(|this, cx| async move {
2520 let responses = futures::future::join_all(requests).await;
2521 let this = if let Some(this) = this.upgrade(&cx) {
2522 this
2523 } else {
2524 return Ok(Default::default());
2525 };
2526 this.read_with(&cx, |this, cx| {
2527 let mut symbols = Vec::new();
2528 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2529 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2530 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2531 let mut worktree_id = source_worktree_id;
2532 let path;
2533 if let Some((worktree, rel_path)) =
2534 this.find_local_worktree(&abs_path, cx)
2535 {
2536 worktree_id = worktree.read(cx).id();
2537 path = rel_path;
2538 } else {
2539 path = relativize_path(&worktree_abs_path, &abs_path);
2540 }
2541
2542 let label = this
2543 .languages
2544 .select_language(&path)
2545 .and_then(|language| {
2546 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2547 })
2548 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2549 let signature = this.symbol_signature(worktree_id, &path);
2550
2551 Some(Symbol {
2552 source_worktree_id,
2553 worktree_id,
2554 language_server_name: adapter.name(),
2555 name: lsp_symbol.name,
2556 kind: lsp_symbol.kind,
2557 label,
2558 path,
2559 range: range_from_lsp(lsp_symbol.location.range),
2560 signature,
2561 })
2562 }));
2563 }
2564 Ok(symbols)
2565 })
2566 })
2567 } else if let Some(project_id) = self.remote_id() {
2568 let request = self.client.request(proto::GetProjectSymbols {
2569 project_id,
2570 query: query.to_string(),
2571 });
2572 cx.spawn_weak(|this, cx| async move {
2573 let response = request.await?;
2574 let mut symbols = Vec::new();
2575 if let Some(this) = this.upgrade(&cx) {
2576 this.read_with(&cx, |this, _| {
2577 symbols.extend(
2578 response
2579 .symbols
2580 .into_iter()
2581 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2582 );
2583 })
2584 }
2585 Ok(symbols)
2586 })
2587 } else {
2588 Task::ready(Ok(Default::default()))
2589 }
2590 }
2591
2592 pub fn open_buffer_for_symbol(
2593 &mut self,
2594 symbol: &Symbol,
2595 cx: &mut ModelContext<Self>,
2596 ) -> Task<Result<ModelHandle<Buffer>>> {
2597 if self.is_local() {
2598 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2599 symbol.source_worktree_id,
2600 symbol.language_server_name.clone(),
2601 )) {
2602 server.clone()
2603 } else {
2604 return Task::ready(Err(anyhow!(
2605 "language server for worktree and language not found"
2606 )));
2607 };
2608
2609 let worktree_abs_path = if let Some(worktree_abs_path) = self
2610 .worktree_for_id(symbol.worktree_id, cx)
2611 .and_then(|worktree| worktree.read(cx).as_local())
2612 .map(|local_worktree| local_worktree.abs_path())
2613 {
2614 worktree_abs_path
2615 } else {
2616 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2617 };
2618 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2619 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2620 uri
2621 } else {
2622 return Task::ready(Err(anyhow!("invalid symbol path")));
2623 };
2624
2625 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2626 } else if let Some(project_id) = self.remote_id() {
2627 let request = self.client.request(proto::OpenBufferForSymbol {
2628 project_id,
2629 symbol: Some(serialize_symbol(symbol)),
2630 });
2631 cx.spawn(|this, mut cx| async move {
2632 let response = request.await?;
2633 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2634 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2635 .await
2636 })
2637 } else {
2638 Task::ready(Err(anyhow!("project does not have a remote id")))
2639 }
2640 }
2641
2642 pub fn completions<T: ToPointUtf16>(
2643 &self,
2644 source_buffer_handle: &ModelHandle<Buffer>,
2645 position: T,
2646 cx: &mut ModelContext<Self>,
2647 ) -> Task<Result<Vec<Completion>>> {
2648 let source_buffer_handle = source_buffer_handle.clone();
2649 let source_buffer = source_buffer_handle.read(cx);
2650 let buffer_id = source_buffer.remote_id();
2651 let language = source_buffer.language().cloned();
2652 let worktree;
2653 let buffer_abs_path;
2654 if let Some(file) = File::from_dyn(source_buffer.file()) {
2655 worktree = file.worktree.clone();
2656 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2657 } else {
2658 return Task::ready(Ok(Default::default()));
2659 };
2660
2661 let position = position.to_point_utf16(source_buffer);
2662 let anchor = source_buffer.anchor_after(position);
2663
2664 if worktree.read(cx).as_local().is_some() {
2665 let buffer_abs_path = buffer_abs_path.unwrap();
2666 let (_, lang_server) =
2667 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2668 server.clone()
2669 } else {
2670 return Task::ready(Ok(Default::default()));
2671 };
2672
2673 cx.spawn(|_, cx| async move {
2674 let completions = lang_server
2675 .request::<lsp::request::Completion>(lsp::CompletionParams {
2676 text_document_position: lsp::TextDocumentPositionParams::new(
2677 lsp::TextDocumentIdentifier::new(
2678 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2679 ),
2680 point_to_lsp(position),
2681 ),
2682 context: Default::default(),
2683 work_done_progress_params: Default::default(),
2684 partial_result_params: Default::default(),
2685 })
2686 .await
2687 .context("lsp completion request failed")?;
2688
2689 let completions = if let Some(completions) = completions {
2690 match completions {
2691 lsp::CompletionResponse::Array(completions) => completions,
2692 lsp::CompletionResponse::List(list) => list.items,
2693 }
2694 } else {
2695 Default::default()
2696 };
2697
2698 source_buffer_handle.read_with(&cx, |this, _| {
2699 let snapshot = this.snapshot();
2700 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2701 let mut range_for_token = None;
2702 Ok(completions
2703 .into_iter()
2704 .filter_map(|lsp_completion| {
2705 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2706 // If the language server provides a range to overwrite, then
2707 // check that the range is valid.
2708 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2709 let range = range_from_lsp(edit.range);
2710 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2711 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2712 if start != range.start || end != range.end {
2713 log::info!("completion out of expected range");
2714 return None;
2715 }
2716 (
2717 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2718 edit.new_text.clone(),
2719 )
2720 }
2721 // If the language server does not provide a range, then infer
2722 // the range based on the syntax tree.
2723 None => {
2724 if position != clipped_position {
2725 log::info!("completion out of expected range");
2726 return None;
2727 }
2728 let Range { start, end } = range_for_token
2729 .get_or_insert_with(|| {
2730 let offset = position.to_offset(&snapshot);
2731 snapshot
2732 .range_for_word_token_at(offset)
2733 .unwrap_or_else(|| offset..offset)
2734 })
2735 .clone();
2736 let text = lsp_completion
2737 .insert_text
2738 .as_ref()
2739 .unwrap_or(&lsp_completion.label)
2740 .clone();
2741 (
2742 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2743 text.clone(),
2744 )
2745 }
2746 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2747 log::info!("unsupported insert/replace completion");
2748 return None;
2749 }
2750 };
2751
2752 Some(Completion {
2753 old_range,
2754 new_text,
2755 label: language
2756 .as_ref()
2757 .and_then(|l| l.label_for_completion(&lsp_completion))
2758 .unwrap_or_else(|| {
2759 CodeLabel::plain(
2760 lsp_completion.label.clone(),
2761 lsp_completion.filter_text.as_deref(),
2762 )
2763 }),
2764 lsp_completion,
2765 })
2766 })
2767 .collect())
2768 })
2769 })
2770 } else if let Some(project_id) = self.remote_id() {
2771 let rpc = self.client.clone();
2772 let message = proto::GetCompletions {
2773 project_id,
2774 buffer_id,
2775 position: Some(language::proto::serialize_anchor(&anchor)),
2776 version: serialize_version(&source_buffer.version()),
2777 };
2778 cx.spawn_weak(|_, mut cx| async move {
2779 let response = rpc.request(message).await?;
2780
2781 source_buffer_handle
2782 .update(&mut cx, |buffer, _| {
2783 buffer.wait_for_version(deserialize_version(response.version))
2784 })
2785 .await;
2786
2787 response
2788 .completions
2789 .into_iter()
2790 .map(|completion| {
2791 language::proto::deserialize_completion(completion, language.as_ref())
2792 })
2793 .collect()
2794 })
2795 } else {
2796 Task::ready(Ok(Default::default()))
2797 }
2798 }
2799
2800 pub fn apply_additional_edits_for_completion(
2801 &self,
2802 buffer_handle: ModelHandle<Buffer>,
2803 completion: Completion,
2804 push_to_history: bool,
2805 cx: &mut ModelContext<Self>,
2806 ) -> Task<Result<Option<Transaction>>> {
2807 let buffer = buffer_handle.read(cx);
2808 let buffer_id = buffer.remote_id();
2809
2810 if self.is_local() {
2811 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2812 {
2813 server.clone()
2814 } else {
2815 return Task::ready(Ok(Default::default()));
2816 };
2817
2818 cx.spawn(|this, mut cx| async move {
2819 let resolved_completion = lang_server
2820 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2821 .await?;
2822 if let Some(edits) = resolved_completion.additional_text_edits {
2823 let edits = this
2824 .update(&mut cx, |this, cx| {
2825 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2826 })
2827 .await?;
2828 buffer_handle.update(&mut cx, |buffer, cx| {
2829 buffer.finalize_last_transaction();
2830 buffer.start_transaction();
2831 for (range, text) in edits {
2832 buffer.edit([(range, text)], cx);
2833 }
2834 let transaction = if buffer.end_transaction(cx).is_some() {
2835 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2836 if !push_to_history {
2837 buffer.forget_transaction(transaction.id);
2838 }
2839 Some(transaction)
2840 } else {
2841 None
2842 };
2843 Ok(transaction)
2844 })
2845 } else {
2846 Ok(None)
2847 }
2848 })
2849 } else if let Some(project_id) = self.remote_id() {
2850 let client = self.client.clone();
2851 cx.spawn(|_, mut cx| async move {
2852 let response = client
2853 .request(proto::ApplyCompletionAdditionalEdits {
2854 project_id,
2855 buffer_id,
2856 completion: Some(language::proto::serialize_completion(&completion)),
2857 })
2858 .await?;
2859
2860 if let Some(transaction) = response.transaction {
2861 let transaction = language::proto::deserialize_transaction(transaction)?;
2862 buffer_handle
2863 .update(&mut cx, |buffer, _| {
2864 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2865 })
2866 .await;
2867 if push_to_history {
2868 buffer_handle.update(&mut cx, |buffer, _| {
2869 buffer.push_transaction(transaction.clone(), Instant::now());
2870 });
2871 }
2872 Ok(Some(transaction))
2873 } else {
2874 Ok(None)
2875 }
2876 })
2877 } else {
2878 Task::ready(Err(anyhow!("project does not have a remote id")))
2879 }
2880 }
2881
2882 pub fn code_actions<T: Clone + ToOffset>(
2883 &self,
2884 buffer_handle: &ModelHandle<Buffer>,
2885 range: Range<T>,
2886 cx: &mut ModelContext<Self>,
2887 ) -> Task<Result<Vec<CodeAction>>> {
2888 let buffer_handle = buffer_handle.clone();
2889 let buffer = buffer_handle.read(cx);
2890 let snapshot = buffer.snapshot();
2891 let relevant_diagnostics = snapshot
2892 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2893 .map(|entry| entry.to_lsp_diagnostic_stub())
2894 .collect();
2895 let buffer_id = buffer.remote_id();
2896 let worktree;
2897 let buffer_abs_path;
2898 if let Some(file) = File::from_dyn(buffer.file()) {
2899 worktree = file.worktree.clone();
2900 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2901 } else {
2902 return Task::ready(Ok(Default::default()));
2903 };
2904 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2905
2906 if worktree.read(cx).as_local().is_some() {
2907 let buffer_abs_path = buffer_abs_path.unwrap();
2908 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2909 {
2910 server.clone()
2911 } else {
2912 return Task::ready(Ok(Default::default()));
2913 };
2914
2915 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2916 cx.foreground().spawn(async move {
2917 if !lang_server.capabilities().code_action_provider.is_some() {
2918 return Ok(Default::default());
2919 }
2920
2921 Ok(lang_server
2922 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2923 text_document: lsp::TextDocumentIdentifier::new(
2924 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2925 ),
2926 range: lsp_range,
2927 work_done_progress_params: Default::default(),
2928 partial_result_params: Default::default(),
2929 context: lsp::CodeActionContext {
2930 diagnostics: relevant_diagnostics,
2931 only: Some(vec![
2932 lsp::CodeActionKind::QUICKFIX,
2933 lsp::CodeActionKind::REFACTOR,
2934 lsp::CodeActionKind::REFACTOR_EXTRACT,
2935 lsp::CodeActionKind::SOURCE,
2936 ]),
2937 },
2938 })
2939 .await?
2940 .unwrap_or_default()
2941 .into_iter()
2942 .filter_map(|entry| {
2943 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2944 Some(CodeAction {
2945 range: range.clone(),
2946 lsp_action,
2947 })
2948 } else {
2949 None
2950 }
2951 })
2952 .collect())
2953 })
2954 } else if let Some(project_id) = self.remote_id() {
2955 let rpc = self.client.clone();
2956 let version = buffer.version();
2957 cx.spawn_weak(|_, mut cx| async move {
2958 let response = rpc
2959 .request(proto::GetCodeActions {
2960 project_id,
2961 buffer_id,
2962 start: Some(language::proto::serialize_anchor(&range.start)),
2963 end: Some(language::proto::serialize_anchor(&range.end)),
2964 version: serialize_version(&version),
2965 })
2966 .await?;
2967
2968 buffer_handle
2969 .update(&mut cx, |buffer, _| {
2970 buffer.wait_for_version(deserialize_version(response.version))
2971 })
2972 .await;
2973
2974 response
2975 .actions
2976 .into_iter()
2977 .map(language::proto::deserialize_code_action)
2978 .collect()
2979 })
2980 } else {
2981 Task::ready(Ok(Default::default()))
2982 }
2983 }
2984
2985 pub fn apply_code_action(
2986 &self,
2987 buffer_handle: ModelHandle<Buffer>,
2988 mut action: CodeAction,
2989 push_to_history: bool,
2990 cx: &mut ModelContext<Self>,
2991 ) -> Task<Result<ProjectTransaction>> {
2992 if self.is_local() {
2993 let buffer = buffer_handle.read(cx);
2994 let (lsp_adapter, lang_server) =
2995 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2996 server.clone()
2997 } else {
2998 return Task::ready(Ok(Default::default()));
2999 };
3000 let range = action.range.to_point_utf16(buffer);
3001
3002 cx.spawn(|this, mut cx| async move {
3003 if let Some(lsp_range) = action
3004 .lsp_action
3005 .data
3006 .as_mut()
3007 .and_then(|d| d.get_mut("codeActionParams"))
3008 .and_then(|d| d.get_mut("range"))
3009 {
3010 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3011 action.lsp_action = lang_server
3012 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3013 .await?;
3014 } else {
3015 let actions = this
3016 .update(&mut cx, |this, cx| {
3017 this.code_actions(&buffer_handle, action.range, cx)
3018 })
3019 .await?;
3020 action.lsp_action = actions
3021 .into_iter()
3022 .find(|a| a.lsp_action.title == action.lsp_action.title)
3023 .ok_or_else(|| anyhow!("code action is outdated"))?
3024 .lsp_action;
3025 }
3026
3027 if let Some(edit) = action.lsp_action.edit {
3028 Self::deserialize_workspace_edit(
3029 this,
3030 edit,
3031 push_to_history,
3032 lsp_adapter,
3033 lang_server,
3034 &mut cx,
3035 )
3036 .await
3037 } else if let Some(command) = action.lsp_action.command {
3038 this.update(&mut cx, |this, _| {
3039 this.last_workspace_edits_by_language_server
3040 .remove(&lang_server.server_id());
3041 });
3042 lang_server
3043 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3044 command: command.command,
3045 arguments: command.arguments.unwrap_or_default(),
3046 ..Default::default()
3047 })
3048 .await?;
3049 Ok(this.update(&mut cx, |this, _| {
3050 this.last_workspace_edits_by_language_server
3051 .remove(&lang_server.server_id())
3052 .unwrap_or_default()
3053 }))
3054 } else {
3055 Ok(ProjectTransaction::default())
3056 }
3057 })
3058 } else if let Some(project_id) = self.remote_id() {
3059 let client = self.client.clone();
3060 let request = proto::ApplyCodeAction {
3061 project_id,
3062 buffer_id: buffer_handle.read(cx).remote_id(),
3063 action: Some(language::proto::serialize_code_action(&action)),
3064 };
3065 cx.spawn(|this, mut cx| async move {
3066 let response = client
3067 .request(request)
3068 .await?
3069 .transaction
3070 .ok_or_else(|| anyhow!("missing transaction"))?;
3071 this.update(&mut cx, |this, cx| {
3072 this.deserialize_project_transaction(response, push_to_history, cx)
3073 })
3074 .await
3075 })
3076 } else {
3077 Task::ready(Err(anyhow!("project does not have a remote id")))
3078 }
3079 }
3080
3081 async fn deserialize_workspace_edit(
3082 this: ModelHandle<Self>,
3083 edit: lsp::WorkspaceEdit,
3084 push_to_history: bool,
3085 lsp_adapter: Arc<dyn LspAdapter>,
3086 language_server: Arc<LanguageServer>,
3087 cx: &mut AsyncAppContext,
3088 ) -> Result<ProjectTransaction> {
3089 let fs = this.read_with(cx, |this, _| this.fs.clone());
3090 let mut operations = Vec::new();
3091 if let Some(document_changes) = edit.document_changes {
3092 match document_changes {
3093 lsp::DocumentChanges::Edits(edits) => {
3094 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3095 }
3096 lsp::DocumentChanges::Operations(ops) => operations = ops,
3097 }
3098 } else if let Some(changes) = edit.changes {
3099 operations.extend(changes.into_iter().map(|(uri, edits)| {
3100 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3101 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3102 uri,
3103 version: None,
3104 },
3105 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3106 })
3107 }));
3108 }
3109
3110 let mut project_transaction = ProjectTransaction::default();
3111 for operation in operations {
3112 match operation {
3113 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3114 let abs_path = op
3115 .uri
3116 .to_file_path()
3117 .map_err(|_| anyhow!("can't convert URI to path"))?;
3118
3119 if let Some(parent_path) = abs_path.parent() {
3120 fs.create_dir(parent_path).await?;
3121 }
3122 if abs_path.ends_with("/") {
3123 fs.create_dir(&abs_path).await?;
3124 } else {
3125 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3126 .await?;
3127 }
3128 }
3129 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3130 let source_abs_path = op
3131 .old_uri
3132 .to_file_path()
3133 .map_err(|_| anyhow!("can't convert URI to path"))?;
3134 let target_abs_path = op
3135 .new_uri
3136 .to_file_path()
3137 .map_err(|_| anyhow!("can't convert URI to path"))?;
3138 fs.rename(
3139 &source_abs_path,
3140 &target_abs_path,
3141 op.options.map(Into::into).unwrap_or_default(),
3142 )
3143 .await?;
3144 }
3145 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3146 let abs_path = op
3147 .uri
3148 .to_file_path()
3149 .map_err(|_| anyhow!("can't convert URI to path"))?;
3150 let options = op.options.map(Into::into).unwrap_or_default();
3151 if abs_path.ends_with("/") {
3152 fs.remove_dir(&abs_path, options).await?;
3153 } else {
3154 fs.remove_file(&abs_path, options).await?;
3155 }
3156 }
3157 lsp::DocumentChangeOperation::Edit(op) => {
3158 let buffer_to_edit = this
3159 .update(cx, |this, cx| {
3160 this.open_local_buffer_via_lsp(
3161 op.text_document.uri,
3162 lsp_adapter.clone(),
3163 language_server.clone(),
3164 cx,
3165 )
3166 })
3167 .await?;
3168
3169 let edits = this
3170 .update(cx, |this, cx| {
3171 let edits = op.edits.into_iter().map(|edit| match edit {
3172 lsp::OneOf::Left(edit) => edit,
3173 lsp::OneOf::Right(edit) => edit.text_edit,
3174 });
3175 this.edits_from_lsp(
3176 &buffer_to_edit,
3177 edits,
3178 op.text_document.version,
3179 cx,
3180 )
3181 })
3182 .await?;
3183
3184 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3185 buffer.finalize_last_transaction();
3186 buffer.start_transaction();
3187 for (range, text) in edits {
3188 buffer.edit([(range, text)], cx);
3189 }
3190 let transaction = if buffer.end_transaction(cx).is_some() {
3191 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3192 if !push_to_history {
3193 buffer.forget_transaction(transaction.id);
3194 }
3195 Some(transaction)
3196 } else {
3197 None
3198 };
3199
3200 transaction
3201 });
3202 if let Some(transaction) = transaction {
3203 project_transaction.0.insert(buffer_to_edit, transaction);
3204 }
3205 }
3206 }
3207 }
3208
3209 Ok(project_transaction)
3210 }
3211
3212 pub fn prepare_rename<T: ToPointUtf16>(
3213 &self,
3214 buffer: ModelHandle<Buffer>,
3215 position: T,
3216 cx: &mut ModelContext<Self>,
3217 ) -> Task<Result<Option<Range<Anchor>>>> {
3218 let position = position.to_point_utf16(buffer.read(cx));
3219 self.request_lsp(buffer, PrepareRename { position }, cx)
3220 }
3221
3222 pub fn perform_rename<T: ToPointUtf16>(
3223 &self,
3224 buffer: ModelHandle<Buffer>,
3225 position: T,
3226 new_name: String,
3227 push_to_history: bool,
3228 cx: &mut ModelContext<Self>,
3229 ) -> Task<Result<ProjectTransaction>> {
3230 let position = position.to_point_utf16(buffer.read(cx));
3231 self.request_lsp(
3232 buffer,
3233 PerformRename {
3234 position,
3235 new_name,
3236 push_to_history,
3237 },
3238 cx,
3239 )
3240 }
3241
3242 pub fn search(
3243 &self,
3244 query: SearchQuery,
3245 cx: &mut ModelContext<Self>,
3246 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3247 if self.is_local() {
3248 let snapshots = self
3249 .visible_worktrees(cx)
3250 .filter_map(|tree| {
3251 let tree = tree.read(cx).as_local()?;
3252 Some(tree.snapshot())
3253 })
3254 .collect::<Vec<_>>();
3255
3256 let background = cx.background().clone();
3257 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3258 if path_count == 0 {
3259 return Task::ready(Ok(Default::default()));
3260 }
3261 let workers = background.num_cpus().min(path_count);
3262 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3263 cx.background()
3264 .spawn({
3265 let fs = self.fs.clone();
3266 let background = cx.background().clone();
3267 let query = query.clone();
3268 async move {
3269 let fs = &fs;
3270 let query = &query;
3271 let matching_paths_tx = &matching_paths_tx;
3272 let paths_per_worker = (path_count + workers - 1) / workers;
3273 let snapshots = &snapshots;
3274 background
3275 .scoped(|scope| {
3276 for worker_ix in 0..workers {
3277 let worker_start_ix = worker_ix * paths_per_worker;
3278 let worker_end_ix = worker_start_ix + paths_per_worker;
3279 scope.spawn(async move {
3280 let mut snapshot_start_ix = 0;
3281 let mut abs_path = PathBuf::new();
3282 for snapshot in snapshots {
3283 let snapshot_end_ix =
3284 snapshot_start_ix + snapshot.visible_file_count();
3285 if worker_end_ix <= snapshot_start_ix {
3286 break;
3287 } else if worker_start_ix > snapshot_end_ix {
3288 snapshot_start_ix = snapshot_end_ix;
3289 continue;
3290 } else {
3291 let start_in_snapshot = worker_start_ix
3292 .saturating_sub(snapshot_start_ix);
3293 let end_in_snapshot =
3294 cmp::min(worker_end_ix, snapshot_end_ix)
3295 - snapshot_start_ix;
3296
3297 for entry in snapshot
3298 .files(false, start_in_snapshot)
3299 .take(end_in_snapshot - start_in_snapshot)
3300 {
3301 if matching_paths_tx.is_closed() {
3302 break;
3303 }
3304
3305 abs_path.clear();
3306 abs_path.push(&snapshot.abs_path());
3307 abs_path.push(&entry.path);
3308 let matches = if let Some(file) =
3309 fs.open_sync(&abs_path).await.log_err()
3310 {
3311 query.detect(file).unwrap_or(false)
3312 } else {
3313 false
3314 };
3315
3316 if matches {
3317 let project_path =
3318 (snapshot.id(), entry.path.clone());
3319 if matching_paths_tx
3320 .send(project_path)
3321 .await
3322 .is_err()
3323 {
3324 break;
3325 }
3326 }
3327 }
3328
3329 snapshot_start_ix = snapshot_end_ix;
3330 }
3331 }
3332 });
3333 }
3334 })
3335 .await;
3336 }
3337 })
3338 .detach();
3339
3340 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3341 let open_buffers = self
3342 .opened_buffers
3343 .values()
3344 .filter_map(|b| b.upgrade(cx))
3345 .collect::<HashSet<_>>();
3346 cx.spawn(|this, cx| async move {
3347 for buffer in &open_buffers {
3348 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3349 buffers_tx.send((buffer.clone(), snapshot)).await?;
3350 }
3351
3352 let open_buffers = Rc::new(RefCell::new(open_buffers));
3353 while let Some(project_path) = matching_paths_rx.next().await {
3354 if buffers_tx.is_closed() {
3355 break;
3356 }
3357
3358 let this = this.clone();
3359 let open_buffers = open_buffers.clone();
3360 let buffers_tx = buffers_tx.clone();
3361 cx.spawn(|mut cx| async move {
3362 if let Some(buffer) = this
3363 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3364 .await
3365 .log_err()
3366 {
3367 if open_buffers.borrow_mut().insert(buffer.clone()) {
3368 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3369 buffers_tx.send((buffer, snapshot)).await?;
3370 }
3371 }
3372
3373 Ok::<_, anyhow::Error>(())
3374 })
3375 .detach();
3376 }
3377
3378 Ok::<_, anyhow::Error>(())
3379 })
3380 .detach_and_log_err(cx);
3381
3382 let background = cx.background().clone();
3383 cx.background().spawn(async move {
3384 let query = &query;
3385 let mut matched_buffers = Vec::new();
3386 for _ in 0..workers {
3387 matched_buffers.push(HashMap::default());
3388 }
3389 background
3390 .scoped(|scope| {
3391 for worker_matched_buffers in matched_buffers.iter_mut() {
3392 let mut buffers_rx = buffers_rx.clone();
3393 scope.spawn(async move {
3394 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3395 let buffer_matches = query
3396 .search(snapshot.as_rope())
3397 .await
3398 .iter()
3399 .map(|range| {
3400 snapshot.anchor_before(range.start)
3401 ..snapshot.anchor_after(range.end)
3402 })
3403 .collect::<Vec<_>>();
3404 if !buffer_matches.is_empty() {
3405 worker_matched_buffers
3406 .insert(buffer.clone(), buffer_matches);
3407 }
3408 }
3409 });
3410 }
3411 })
3412 .await;
3413 Ok(matched_buffers.into_iter().flatten().collect())
3414 })
3415 } else if let Some(project_id) = self.remote_id() {
3416 let request = self.client.request(query.to_proto(project_id));
3417 cx.spawn(|this, mut cx| async move {
3418 let response = request.await?;
3419 let mut result = HashMap::default();
3420 for location in response.locations {
3421 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3422 let target_buffer = this
3423 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3424 .await?;
3425 let start = location
3426 .start
3427 .and_then(deserialize_anchor)
3428 .ok_or_else(|| anyhow!("missing target start"))?;
3429 let end = location
3430 .end
3431 .and_then(deserialize_anchor)
3432 .ok_or_else(|| anyhow!("missing target end"))?;
3433 result
3434 .entry(target_buffer)
3435 .or_insert(Vec::new())
3436 .push(start..end)
3437 }
3438 Ok(result)
3439 })
3440 } else {
3441 Task::ready(Ok(Default::default()))
3442 }
3443 }
3444
3445 fn request_lsp<R: LspCommand>(
3446 &self,
3447 buffer_handle: ModelHandle<Buffer>,
3448 request: R,
3449 cx: &mut ModelContext<Self>,
3450 ) -> Task<Result<R::Response>>
3451 where
3452 <R::LspRequest as lsp::request::Request>::Result: Send,
3453 {
3454 let buffer = buffer_handle.read(cx);
3455 if self.is_local() {
3456 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3457 if let Some((file, (_, language_server))) =
3458 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3459 {
3460 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3461 return cx.spawn(|this, cx| async move {
3462 if !request.check_capabilities(&language_server.capabilities()) {
3463 return Ok(Default::default());
3464 }
3465
3466 let response = language_server
3467 .request::<R::LspRequest>(lsp_params)
3468 .await
3469 .context("lsp request failed")?;
3470 request
3471 .response_from_lsp(response, this, buffer_handle, cx)
3472 .await
3473 });
3474 }
3475 } else if let Some(project_id) = self.remote_id() {
3476 let rpc = self.client.clone();
3477 let message = request.to_proto(project_id, buffer);
3478 return cx.spawn(|this, cx| async move {
3479 let response = rpc.request(message).await?;
3480 request
3481 .response_from_proto(response, this, buffer_handle, cx)
3482 .await
3483 });
3484 }
3485 Task::ready(Ok(Default::default()))
3486 }
3487
3488 pub fn find_or_create_local_worktree(
3489 &mut self,
3490 abs_path: impl AsRef<Path>,
3491 visible: bool,
3492 cx: &mut ModelContext<Self>,
3493 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3494 let abs_path = abs_path.as_ref();
3495 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3496 Task::ready(Ok((tree.clone(), relative_path.into())))
3497 } else {
3498 let worktree = self.create_local_worktree(abs_path, visible, cx);
3499 cx.foreground()
3500 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3501 }
3502 }
3503
3504 pub fn find_local_worktree(
3505 &self,
3506 abs_path: &Path,
3507 cx: &AppContext,
3508 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3509 for tree in self.worktrees(cx) {
3510 if let Some(relative_path) = tree
3511 .read(cx)
3512 .as_local()
3513 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3514 {
3515 return Some((tree.clone(), relative_path.into()));
3516 }
3517 }
3518 None
3519 }
3520
3521 pub fn is_shared(&self) -> bool {
3522 match &self.client_state {
3523 ProjectClientState::Local { is_shared, .. } => *is_shared,
3524 ProjectClientState::Remote { .. } => false,
3525 }
3526 }
3527
3528 fn create_local_worktree(
3529 &mut self,
3530 abs_path: impl AsRef<Path>,
3531 visible: bool,
3532 cx: &mut ModelContext<Self>,
3533 ) -> Task<Result<ModelHandle<Worktree>>> {
3534 let fs = self.fs.clone();
3535 let client = self.client.clone();
3536 let next_entry_id = self.next_entry_id.clone();
3537 let path: Arc<Path> = abs_path.as_ref().into();
3538 let task = self
3539 .loading_local_worktrees
3540 .entry(path.clone())
3541 .or_insert_with(|| {
3542 cx.spawn(|project, mut cx| {
3543 async move {
3544 let worktree = Worktree::local(
3545 client.clone(),
3546 path.clone(),
3547 visible,
3548 fs,
3549 next_entry_id,
3550 &mut cx,
3551 )
3552 .await;
3553 project.update(&mut cx, |project, _| {
3554 project.loading_local_worktrees.remove(&path);
3555 });
3556 let worktree = worktree?;
3557
3558 let remote_project_id = project.update(&mut cx, |project, cx| {
3559 project.add_worktree(&worktree, cx);
3560 project.remote_id()
3561 });
3562
3563 if let Some(project_id) = remote_project_id {
3564 // Because sharing is async, we may have *unshared* the project by the time it completes,
3565 // in which case we need to register the worktree instead.
3566 loop {
3567 if project.read_with(&cx, |project, _| project.is_shared()) {
3568 if worktree
3569 .update(&mut cx, |worktree, cx| {
3570 worktree.as_local_mut().unwrap().share(project_id, cx)
3571 })
3572 .await
3573 .is_ok()
3574 {
3575 break;
3576 }
3577 } else {
3578 worktree
3579 .update(&mut cx, |worktree, cx| {
3580 worktree
3581 .as_local_mut()
3582 .unwrap()
3583 .register(project_id, cx)
3584 })
3585 .await?;
3586 break;
3587 }
3588 }
3589 }
3590
3591 Ok(worktree)
3592 }
3593 .map_err(|err| Arc::new(err))
3594 })
3595 .shared()
3596 })
3597 .clone();
3598 cx.foreground().spawn(async move {
3599 match task.await {
3600 Ok(worktree) => Ok(worktree),
3601 Err(err) => Err(anyhow!("{}", err)),
3602 }
3603 })
3604 }
3605
3606 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3607 self.worktrees.retain(|worktree| {
3608 if let Some(worktree) = worktree.upgrade(cx) {
3609 let id = worktree.read(cx).id();
3610 if id == id_to_remove {
3611 cx.emit(Event::WorktreeRemoved(id));
3612 false
3613 } else {
3614 true
3615 }
3616 } else {
3617 false
3618 }
3619 });
3620 cx.notify();
3621 }
3622
3623 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3624 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3625 if worktree.read(cx).is_local() {
3626 cx.subscribe(&worktree, |this, worktree, _, cx| {
3627 this.update_local_worktree_buffers(worktree, cx);
3628 })
3629 .detach();
3630 }
3631
3632 let push_strong_handle = {
3633 let worktree = worktree.read(cx);
3634 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3635 };
3636 if push_strong_handle {
3637 self.worktrees
3638 .push(WorktreeHandle::Strong(worktree.clone()));
3639 } else {
3640 cx.observe_release(&worktree, |this, _, cx| {
3641 this.worktrees
3642 .retain(|worktree| worktree.upgrade(cx).is_some());
3643 cx.notify();
3644 })
3645 .detach();
3646 self.worktrees
3647 .push(WorktreeHandle::Weak(worktree.downgrade()));
3648 }
3649 cx.emit(Event::WorktreeAdded);
3650 cx.notify();
3651 }
3652
3653 fn update_local_worktree_buffers(
3654 &mut self,
3655 worktree_handle: ModelHandle<Worktree>,
3656 cx: &mut ModelContext<Self>,
3657 ) {
3658 let snapshot = worktree_handle.read(cx).snapshot();
3659 let mut buffers_to_delete = Vec::new();
3660 let mut renamed_buffers = Vec::new();
3661 for (buffer_id, buffer) in &self.opened_buffers {
3662 if let Some(buffer) = buffer.upgrade(cx) {
3663 buffer.update(cx, |buffer, cx| {
3664 if let Some(old_file) = File::from_dyn(buffer.file()) {
3665 if old_file.worktree != worktree_handle {
3666 return;
3667 }
3668
3669 let new_file = if let Some(entry) = old_file
3670 .entry_id
3671 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3672 {
3673 File {
3674 is_local: true,
3675 entry_id: Some(entry.id),
3676 mtime: entry.mtime,
3677 path: entry.path.clone(),
3678 worktree: worktree_handle.clone(),
3679 }
3680 } else if let Some(entry) =
3681 snapshot.entry_for_path(old_file.path().as_ref())
3682 {
3683 File {
3684 is_local: true,
3685 entry_id: Some(entry.id),
3686 mtime: entry.mtime,
3687 path: entry.path.clone(),
3688 worktree: worktree_handle.clone(),
3689 }
3690 } else {
3691 File {
3692 is_local: true,
3693 entry_id: None,
3694 path: old_file.path().clone(),
3695 mtime: old_file.mtime(),
3696 worktree: worktree_handle.clone(),
3697 }
3698 };
3699
3700 let old_path = old_file.abs_path(cx);
3701 if new_file.abs_path(cx) != old_path {
3702 renamed_buffers.push((cx.handle(), old_path));
3703 }
3704
3705 if let Some(project_id) = self.remote_id() {
3706 self.client
3707 .send(proto::UpdateBufferFile {
3708 project_id,
3709 buffer_id: *buffer_id as u64,
3710 file: Some(new_file.to_proto()),
3711 })
3712 .log_err();
3713 }
3714 buffer.file_updated(Box::new(new_file), cx).detach();
3715 }
3716 });
3717 } else {
3718 buffers_to_delete.push(*buffer_id);
3719 }
3720 }
3721
3722 for buffer_id in buffers_to_delete {
3723 self.opened_buffers.remove(&buffer_id);
3724 }
3725
3726 for (buffer, old_path) in renamed_buffers {
3727 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3728 self.assign_language_to_buffer(&buffer, cx);
3729 self.register_buffer_with_language_server(&buffer, cx);
3730 }
3731 }
3732
3733 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3734 let new_active_entry = entry.and_then(|project_path| {
3735 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3736 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3737 Some(entry.id)
3738 });
3739 if new_active_entry != self.active_entry {
3740 self.active_entry = new_active_entry;
3741 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3742 }
3743 }
3744
3745 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3746 self.language_server_statuses
3747 .values()
3748 .any(|status| status.pending_diagnostic_updates > 0)
3749 }
3750
3751 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3752 let mut summary = DiagnosticSummary::default();
3753 for (_, path_summary) in self.diagnostic_summaries(cx) {
3754 summary.error_count += path_summary.error_count;
3755 summary.warning_count += path_summary.warning_count;
3756 }
3757 summary
3758 }
3759
3760 pub fn diagnostic_summaries<'a>(
3761 &'a self,
3762 cx: &'a AppContext,
3763 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3764 self.worktrees(cx).flat_map(move |worktree| {
3765 let worktree = worktree.read(cx);
3766 let worktree_id = worktree.id();
3767 worktree
3768 .diagnostic_summaries()
3769 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3770 })
3771 }
3772
3773 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3774 if self
3775 .language_server_statuses
3776 .values()
3777 .map(|status| status.pending_diagnostic_updates)
3778 .sum::<isize>()
3779 == 1
3780 {
3781 cx.emit(Event::DiskBasedDiagnosticsStarted);
3782 }
3783 }
3784
3785 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3786 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3787 if self
3788 .language_server_statuses
3789 .values()
3790 .map(|status| status.pending_diagnostic_updates)
3791 .sum::<isize>()
3792 == 0
3793 {
3794 cx.emit(Event::DiskBasedDiagnosticsFinished);
3795 }
3796 }
3797
3798 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3799 self.active_entry
3800 }
3801
3802 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3803 self.worktree_for_id(path.worktree_id, cx)?
3804 .read(cx)
3805 .entry_for_path(&path.path)
3806 .map(|entry| entry.id)
3807 }
3808
3809 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3810 let worktree = self.worktree_for_entry(entry_id, cx)?;
3811 let worktree = worktree.read(cx);
3812 let worktree_id = worktree.id();
3813 let path = worktree.entry_for_id(entry_id)?.path.clone();
3814 Some(ProjectPath { worktree_id, path })
3815 }
3816
3817 // RPC message handlers
3818
3819 async fn handle_request_join_project(
3820 this: ModelHandle<Self>,
3821 message: TypedEnvelope<proto::RequestJoinProject>,
3822 _: Arc<Client>,
3823 mut cx: AsyncAppContext,
3824 ) -> Result<()> {
3825 let user_id = message.payload.requester_id;
3826 if this.read_with(&cx, |project, _| {
3827 project.collaborators.values().any(|c| c.user.id == user_id)
3828 }) {
3829 this.update(&mut cx, |this, cx| {
3830 this.respond_to_join_request(user_id, true, cx)
3831 });
3832 } else {
3833 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3834 let user = user_store
3835 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3836 .await?;
3837 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3838 }
3839 Ok(())
3840 }
3841
3842 async fn handle_unregister_project(
3843 this: ModelHandle<Self>,
3844 _: TypedEnvelope<proto::UnregisterProject>,
3845 _: Arc<Client>,
3846 mut cx: AsyncAppContext,
3847 ) -> Result<()> {
3848 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3849 Ok(())
3850 }
3851
3852 async fn handle_project_unshared(
3853 this: ModelHandle<Self>,
3854 _: TypedEnvelope<proto::ProjectUnshared>,
3855 _: Arc<Client>,
3856 mut cx: AsyncAppContext,
3857 ) -> Result<()> {
3858 this.update(&mut cx, |this, cx| this.unshared(cx));
3859 Ok(())
3860 }
3861
3862 async fn handle_add_collaborator(
3863 this: ModelHandle<Self>,
3864 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3865 _: Arc<Client>,
3866 mut cx: AsyncAppContext,
3867 ) -> Result<()> {
3868 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3869 let collaborator = envelope
3870 .payload
3871 .collaborator
3872 .take()
3873 .ok_or_else(|| anyhow!("empty collaborator"))?;
3874
3875 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3876 this.update(&mut cx, |this, cx| {
3877 this.collaborators
3878 .insert(collaborator.peer_id, collaborator);
3879 cx.notify();
3880 });
3881
3882 Ok(())
3883 }
3884
3885 async fn handle_remove_collaborator(
3886 this: ModelHandle<Self>,
3887 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3888 _: Arc<Client>,
3889 mut cx: AsyncAppContext,
3890 ) -> Result<()> {
3891 this.update(&mut cx, |this, cx| {
3892 let peer_id = PeerId(envelope.payload.peer_id);
3893 let replica_id = this
3894 .collaborators
3895 .remove(&peer_id)
3896 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3897 .replica_id;
3898 for (_, buffer) in &this.opened_buffers {
3899 if let Some(buffer) = buffer.upgrade(cx) {
3900 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3901 }
3902 }
3903
3904 cx.emit(Event::CollaboratorLeft(peer_id));
3905 cx.notify();
3906 Ok(())
3907 })
3908 }
3909
3910 async fn handle_join_project_request_cancelled(
3911 this: ModelHandle<Self>,
3912 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
3913 _: Arc<Client>,
3914 mut cx: AsyncAppContext,
3915 ) -> Result<()> {
3916 let user = this
3917 .update(&mut cx, |this, cx| {
3918 this.user_store.update(cx, |user_store, cx| {
3919 user_store.fetch_user(envelope.payload.requester_id, cx)
3920 })
3921 })
3922 .await?;
3923
3924 this.update(&mut cx, |_, cx| {
3925 cx.emit(Event::ContactCancelledJoinRequest(user));
3926 });
3927
3928 Ok(())
3929 }
3930
3931 async fn handle_register_worktree(
3932 this: ModelHandle<Self>,
3933 envelope: TypedEnvelope<proto::RegisterWorktree>,
3934 client: Arc<Client>,
3935 mut cx: AsyncAppContext,
3936 ) -> Result<()> {
3937 this.update(&mut cx, |this, cx| {
3938 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3939 let replica_id = this.replica_id();
3940 let worktree = proto::Worktree {
3941 id: envelope.payload.worktree_id,
3942 root_name: envelope.payload.root_name,
3943 entries: Default::default(),
3944 diagnostic_summaries: Default::default(),
3945 visible: envelope.payload.visible,
3946 scan_id: 0,
3947 };
3948 let (worktree, load_task) =
3949 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3950 this.add_worktree(&worktree, cx);
3951 load_task.detach();
3952 Ok(())
3953 })
3954 }
3955
3956 async fn handle_unregister_worktree(
3957 this: ModelHandle<Self>,
3958 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3959 _: Arc<Client>,
3960 mut cx: AsyncAppContext,
3961 ) -> Result<()> {
3962 this.update(&mut cx, |this, cx| {
3963 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3964 this.remove_worktree(worktree_id, cx);
3965 Ok(())
3966 })
3967 }
3968
3969 async fn handle_update_worktree(
3970 this: ModelHandle<Self>,
3971 envelope: TypedEnvelope<proto::UpdateWorktree>,
3972 _: Arc<Client>,
3973 mut cx: AsyncAppContext,
3974 ) -> Result<()> {
3975 this.update(&mut cx, |this, cx| {
3976 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3977 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3978 worktree.update(cx, |worktree, _| {
3979 let worktree = worktree.as_remote_mut().unwrap();
3980 worktree.update_from_remote(envelope)
3981 })?;
3982 }
3983 Ok(())
3984 })
3985 }
3986
3987 async fn handle_create_project_entry(
3988 this: ModelHandle<Self>,
3989 envelope: TypedEnvelope<proto::CreateProjectEntry>,
3990 _: Arc<Client>,
3991 mut cx: AsyncAppContext,
3992 ) -> Result<proto::ProjectEntryResponse> {
3993 let worktree = this.update(&mut cx, |this, cx| {
3994 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3995 this.worktree_for_id(worktree_id, cx)
3996 .ok_or_else(|| anyhow!("worktree not found"))
3997 })?;
3998 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3999 let entry = worktree
4000 .update(&mut cx, |worktree, cx| {
4001 let worktree = worktree.as_local_mut().unwrap();
4002 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4003 worktree.create_entry(path, envelope.payload.is_directory, cx)
4004 })
4005 .await?;
4006 Ok(proto::ProjectEntryResponse {
4007 entry: Some((&entry).into()),
4008 worktree_scan_id: worktree_scan_id as u64,
4009 })
4010 }
4011
4012 async fn handle_rename_project_entry(
4013 this: ModelHandle<Self>,
4014 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4015 _: Arc<Client>,
4016 mut cx: AsyncAppContext,
4017 ) -> Result<proto::ProjectEntryResponse> {
4018 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4019 let worktree = this.read_with(&cx, |this, cx| {
4020 this.worktree_for_entry(entry_id, cx)
4021 .ok_or_else(|| anyhow!("worktree not found"))
4022 })?;
4023 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4024 let entry = worktree
4025 .update(&mut cx, |worktree, cx| {
4026 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4027 worktree
4028 .as_local_mut()
4029 .unwrap()
4030 .rename_entry(entry_id, new_path, cx)
4031 .ok_or_else(|| anyhow!("invalid entry"))
4032 })?
4033 .await?;
4034 Ok(proto::ProjectEntryResponse {
4035 entry: Some((&entry).into()),
4036 worktree_scan_id: worktree_scan_id as u64,
4037 })
4038 }
4039
4040 async fn handle_delete_project_entry(
4041 this: ModelHandle<Self>,
4042 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4043 _: Arc<Client>,
4044 mut cx: AsyncAppContext,
4045 ) -> Result<proto::ProjectEntryResponse> {
4046 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4047 let worktree = this.read_with(&cx, |this, cx| {
4048 this.worktree_for_entry(entry_id, cx)
4049 .ok_or_else(|| anyhow!("worktree not found"))
4050 })?;
4051 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4052 worktree
4053 .update(&mut cx, |worktree, cx| {
4054 worktree
4055 .as_local_mut()
4056 .unwrap()
4057 .delete_entry(entry_id, cx)
4058 .ok_or_else(|| anyhow!("invalid entry"))
4059 })?
4060 .await?;
4061 Ok(proto::ProjectEntryResponse {
4062 entry: None,
4063 worktree_scan_id: worktree_scan_id as u64,
4064 })
4065 }
4066
4067 async fn handle_update_diagnostic_summary(
4068 this: ModelHandle<Self>,
4069 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4070 _: Arc<Client>,
4071 mut cx: AsyncAppContext,
4072 ) -> Result<()> {
4073 this.update(&mut cx, |this, cx| {
4074 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4075 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4076 if let Some(summary) = envelope.payload.summary {
4077 let project_path = ProjectPath {
4078 worktree_id,
4079 path: Path::new(&summary.path).into(),
4080 };
4081 worktree.update(cx, |worktree, _| {
4082 worktree
4083 .as_remote_mut()
4084 .unwrap()
4085 .update_diagnostic_summary(project_path.path.clone(), &summary);
4086 });
4087 cx.emit(Event::DiagnosticsUpdated(project_path));
4088 }
4089 }
4090 Ok(())
4091 })
4092 }
4093
4094 async fn handle_start_language_server(
4095 this: ModelHandle<Self>,
4096 envelope: TypedEnvelope<proto::StartLanguageServer>,
4097 _: Arc<Client>,
4098 mut cx: AsyncAppContext,
4099 ) -> Result<()> {
4100 let server = envelope
4101 .payload
4102 .server
4103 .ok_or_else(|| anyhow!("invalid server"))?;
4104 this.update(&mut cx, |this, cx| {
4105 this.language_server_statuses.insert(
4106 server.id as usize,
4107 LanguageServerStatus {
4108 name: server.name,
4109 pending_work: Default::default(),
4110 pending_diagnostic_updates: 0,
4111 },
4112 );
4113 cx.notify();
4114 });
4115 Ok(())
4116 }
4117
4118 async fn handle_update_language_server(
4119 this: ModelHandle<Self>,
4120 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4121 _: Arc<Client>,
4122 mut cx: AsyncAppContext,
4123 ) -> Result<()> {
4124 let language_server_id = envelope.payload.language_server_id as usize;
4125 match envelope
4126 .payload
4127 .variant
4128 .ok_or_else(|| anyhow!("invalid variant"))?
4129 {
4130 proto::update_language_server::Variant::WorkStart(payload) => {
4131 this.update(&mut cx, |this, cx| {
4132 this.on_lsp_work_start(language_server_id, payload.token, cx);
4133 })
4134 }
4135 proto::update_language_server::Variant::WorkProgress(payload) => {
4136 this.update(&mut cx, |this, cx| {
4137 this.on_lsp_work_progress(
4138 language_server_id,
4139 payload.token,
4140 LanguageServerProgress {
4141 message: payload.message,
4142 percentage: payload.percentage.map(|p| p as usize),
4143 last_update_at: Instant::now(),
4144 },
4145 cx,
4146 );
4147 })
4148 }
4149 proto::update_language_server::Variant::WorkEnd(payload) => {
4150 this.update(&mut cx, |this, cx| {
4151 this.on_lsp_work_end(language_server_id, payload.token, cx);
4152 })
4153 }
4154 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4155 this.update(&mut cx, |this, cx| {
4156 this.disk_based_diagnostics_started(cx);
4157 })
4158 }
4159 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4160 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4161 }
4162 }
4163
4164 Ok(())
4165 }
4166
4167 async fn handle_update_buffer(
4168 this: ModelHandle<Self>,
4169 envelope: TypedEnvelope<proto::UpdateBuffer>,
4170 _: Arc<Client>,
4171 mut cx: AsyncAppContext,
4172 ) -> Result<()> {
4173 this.update(&mut cx, |this, cx| {
4174 let payload = envelope.payload.clone();
4175 let buffer_id = payload.buffer_id;
4176 let ops = payload
4177 .operations
4178 .into_iter()
4179 .map(|op| language::proto::deserialize_operation(op))
4180 .collect::<Result<Vec<_>, _>>()?;
4181 let is_remote = this.is_remote();
4182 match this.opened_buffers.entry(buffer_id) {
4183 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4184 OpenBuffer::Strong(buffer) => {
4185 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4186 }
4187 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4188 OpenBuffer::Weak(_) => {}
4189 },
4190 hash_map::Entry::Vacant(e) => {
4191 assert!(
4192 is_remote,
4193 "received buffer update from {:?}",
4194 envelope.original_sender_id
4195 );
4196 e.insert(OpenBuffer::Loading(ops));
4197 }
4198 }
4199 Ok(())
4200 })
4201 }
4202
4203 async fn handle_update_buffer_file(
4204 this: ModelHandle<Self>,
4205 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4206 _: Arc<Client>,
4207 mut cx: AsyncAppContext,
4208 ) -> Result<()> {
4209 this.update(&mut cx, |this, cx| {
4210 let payload = envelope.payload.clone();
4211 let buffer_id = payload.buffer_id;
4212 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4213 let worktree = this
4214 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4215 .ok_or_else(|| anyhow!("no such worktree"))?;
4216 let file = File::from_proto(file, worktree.clone(), cx)?;
4217 let buffer = this
4218 .opened_buffers
4219 .get_mut(&buffer_id)
4220 .and_then(|b| b.upgrade(cx))
4221 .ok_or_else(|| anyhow!("no such buffer"))?;
4222 buffer.update(cx, |buffer, cx| {
4223 buffer.file_updated(Box::new(file), cx).detach();
4224 });
4225 Ok(())
4226 })
4227 }
4228
4229 async fn handle_save_buffer(
4230 this: ModelHandle<Self>,
4231 envelope: TypedEnvelope<proto::SaveBuffer>,
4232 _: Arc<Client>,
4233 mut cx: AsyncAppContext,
4234 ) -> Result<proto::BufferSaved> {
4235 let buffer_id = envelope.payload.buffer_id;
4236 let requested_version = deserialize_version(envelope.payload.version);
4237
4238 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4239 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4240 let buffer = this
4241 .opened_buffers
4242 .get(&buffer_id)
4243 .and_then(|buffer| buffer.upgrade(cx))
4244 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4245 Ok::<_, anyhow::Error>((project_id, buffer))
4246 })?;
4247 buffer
4248 .update(&mut cx, |buffer, _| {
4249 buffer.wait_for_version(requested_version)
4250 })
4251 .await;
4252
4253 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4254 Ok(proto::BufferSaved {
4255 project_id,
4256 buffer_id,
4257 version: serialize_version(&saved_version),
4258 mtime: Some(mtime.into()),
4259 })
4260 }
4261
4262 async fn handle_reload_buffers(
4263 this: ModelHandle<Self>,
4264 envelope: TypedEnvelope<proto::ReloadBuffers>,
4265 _: Arc<Client>,
4266 mut cx: AsyncAppContext,
4267 ) -> Result<proto::ReloadBuffersResponse> {
4268 let sender_id = envelope.original_sender_id()?;
4269 let reload = this.update(&mut cx, |this, cx| {
4270 let mut buffers = HashSet::default();
4271 for buffer_id in &envelope.payload.buffer_ids {
4272 buffers.insert(
4273 this.opened_buffers
4274 .get(buffer_id)
4275 .and_then(|buffer| buffer.upgrade(cx))
4276 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4277 );
4278 }
4279 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4280 })?;
4281
4282 let project_transaction = reload.await?;
4283 let project_transaction = this.update(&mut cx, |this, cx| {
4284 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4285 });
4286 Ok(proto::ReloadBuffersResponse {
4287 transaction: Some(project_transaction),
4288 })
4289 }
4290
4291 async fn handle_format_buffers(
4292 this: ModelHandle<Self>,
4293 envelope: TypedEnvelope<proto::FormatBuffers>,
4294 _: Arc<Client>,
4295 mut cx: AsyncAppContext,
4296 ) -> Result<proto::FormatBuffersResponse> {
4297 let sender_id = envelope.original_sender_id()?;
4298 let format = this.update(&mut cx, |this, cx| {
4299 let mut buffers = HashSet::default();
4300 for buffer_id in &envelope.payload.buffer_ids {
4301 buffers.insert(
4302 this.opened_buffers
4303 .get(buffer_id)
4304 .and_then(|buffer| buffer.upgrade(cx))
4305 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4306 );
4307 }
4308 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4309 })?;
4310
4311 let project_transaction = format.await?;
4312 let project_transaction = this.update(&mut cx, |this, cx| {
4313 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4314 });
4315 Ok(proto::FormatBuffersResponse {
4316 transaction: Some(project_transaction),
4317 })
4318 }
4319
4320 async fn handle_get_completions(
4321 this: ModelHandle<Self>,
4322 envelope: TypedEnvelope<proto::GetCompletions>,
4323 _: Arc<Client>,
4324 mut cx: AsyncAppContext,
4325 ) -> Result<proto::GetCompletionsResponse> {
4326 let position = envelope
4327 .payload
4328 .position
4329 .and_then(language::proto::deserialize_anchor)
4330 .ok_or_else(|| anyhow!("invalid position"))?;
4331 let version = deserialize_version(envelope.payload.version);
4332 let buffer = this.read_with(&cx, |this, cx| {
4333 this.opened_buffers
4334 .get(&envelope.payload.buffer_id)
4335 .and_then(|buffer| buffer.upgrade(cx))
4336 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4337 })?;
4338 buffer
4339 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4340 .await;
4341 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4342 let completions = this
4343 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4344 .await?;
4345
4346 Ok(proto::GetCompletionsResponse {
4347 completions: completions
4348 .iter()
4349 .map(language::proto::serialize_completion)
4350 .collect(),
4351 version: serialize_version(&version),
4352 })
4353 }
4354
4355 async fn handle_apply_additional_edits_for_completion(
4356 this: ModelHandle<Self>,
4357 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4358 _: Arc<Client>,
4359 mut cx: AsyncAppContext,
4360 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4361 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4362 let buffer = this
4363 .opened_buffers
4364 .get(&envelope.payload.buffer_id)
4365 .and_then(|buffer| buffer.upgrade(cx))
4366 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4367 let language = buffer.read(cx).language();
4368 let completion = language::proto::deserialize_completion(
4369 envelope
4370 .payload
4371 .completion
4372 .ok_or_else(|| anyhow!("invalid completion"))?,
4373 language,
4374 )?;
4375 Ok::<_, anyhow::Error>(
4376 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4377 )
4378 })?;
4379
4380 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4381 transaction: apply_additional_edits
4382 .await?
4383 .as_ref()
4384 .map(language::proto::serialize_transaction),
4385 })
4386 }
4387
4388 async fn handle_get_code_actions(
4389 this: ModelHandle<Self>,
4390 envelope: TypedEnvelope<proto::GetCodeActions>,
4391 _: Arc<Client>,
4392 mut cx: AsyncAppContext,
4393 ) -> Result<proto::GetCodeActionsResponse> {
4394 let start = envelope
4395 .payload
4396 .start
4397 .and_then(language::proto::deserialize_anchor)
4398 .ok_or_else(|| anyhow!("invalid start"))?;
4399 let end = envelope
4400 .payload
4401 .end
4402 .and_then(language::proto::deserialize_anchor)
4403 .ok_or_else(|| anyhow!("invalid end"))?;
4404 let buffer = this.update(&mut cx, |this, cx| {
4405 this.opened_buffers
4406 .get(&envelope.payload.buffer_id)
4407 .and_then(|buffer| buffer.upgrade(cx))
4408 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4409 })?;
4410 buffer
4411 .update(&mut cx, |buffer, _| {
4412 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4413 })
4414 .await;
4415
4416 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4417 let code_actions = this.update(&mut cx, |this, cx| {
4418 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4419 })?;
4420
4421 Ok(proto::GetCodeActionsResponse {
4422 actions: code_actions
4423 .await?
4424 .iter()
4425 .map(language::proto::serialize_code_action)
4426 .collect(),
4427 version: serialize_version(&version),
4428 })
4429 }
4430
4431 async fn handle_apply_code_action(
4432 this: ModelHandle<Self>,
4433 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4434 _: Arc<Client>,
4435 mut cx: AsyncAppContext,
4436 ) -> Result<proto::ApplyCodeActionResponse> {
4437 let sender_id = envelope.original_sender_id()?;
4438 let action = language::proto::deserialize_code_action(
4439 envelope
4440 .payload
4441 .action
4442 .ok_or_else(|| anyhow!("invalid action"))?,
4443 )?;
4444 let apply_code_action = this.update(&mut cx, |this, cx| {
4445 let buffer = this
4446 .opened_buffers
4447 .get(&envelope.payload.buffer_id)
4448 .and_then(|buffer| buffer.upgrade(cx))
4449 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4450 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4451 })?;
4452
4453 let project_transaction = apply_code_action.await?;
4454 let project_transaction = this.update(&mut cx, |this, cx| {
4455 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4456 });
4457 Ok(proto::ApplyCodeActionResponse {
4458 transaction: Some(project_transaction),
4459 })
4460 }
4461
4462 async fn handle_lsp_command<T: LspCommand>(
4463 this: ModelHandle<Self>,
4464 envelope: TypedEnvelope<T::ProtoRequest>,
4465 _: Arc<Client>,
4466 mut cx: AsyncAppContext,
4467 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4468 where
4469 <T::LspRequest as lsp::request::Request>::Result: Send,
4470 {
4471 let sender_id = envelope.original_sender_id()?;
4472 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4473 let buffer_handle = this.read_with(&cx, |this, _| {
4474 this.opened_buffers
4475 .get(&buffer_id)
4476 .and_then(|buffer| buffer.upgrade(&cx))
4477 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4478 })?;
4479 let request = T::from_proto(
4480 envelope.payload,
4481 this.clone(),
4482 buffer_handle.clone(),
4483 cx.clone(),
4484 )
4485 .await?;
4486 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4487 let response = this
4488 .update(&mut cx, |this, cx| {
4489 this.request_lsp(buffer_handle, request, cx)
4490 })
4491 .await?;
4492 this.update(&mut cx, |this, cx| {
4493 Ok(T::response_to_proto(
4494 response,
4495 this,
4496 sender_id,
4497 &buffer_version,
4498 cx,
4499 ))
4500 })
4501 }
4502
4503 async fn handle_get_project_symbols(
4504 this: ModelHandle<Self>,
4505 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4506 _: Arc<Client>,
4507 mut cx: AsyncAppContext,
4508 ) -> Result<proto::GetProjectSymbolsResponse> {
4509 let symbols = this
4510 .update(&mut cx, |this, cx| {
4511 this.symbols(&envelope.payload.query, cx)
4512 })
4513 .await?;
4514
4515 Ok(proto::GetProjectSymbolsResponse {
4516 symbols: symbols.iter().map(serialize_symbol).collect(),
4517 })
4518 }
4519
4520 async fn handle_search_project(
4521 this: ModelHandle<Self>,
4522 envelope: TypedEnvelope<proto::SearchProject>,
4523 _: Arc<Client>,
4524 mut cx: AsyncAppContext,
4525 ) -> Result<proto::SearchProjectResponse> {
4526 let peer_id = envelope.original_sender_id()?;
4527 let query = SearchQuery::from_proto(envelope.payload)?;
4528 let result = this
4529 .update(&mut cx, |this, cx| this.search(query, cx))
4530 .await?;
4531
4532 this.update(&mut cx, |this, cx| {
4533 let mut locations = Vec::new();
4534 for (buffer, ranges) in result {
4535 for range in ranges {
4536 let start = serialize_anchor(&range.start);
4537 let end = serialize_anchor(&range.end);
4538 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4539 locations.push(proto::Location {
4540 buffer: Some(buffer),
4541 start: Some(start),
4542 end: Some(end),
4543 });
4544 }
4545 }
4546 Ok(proto::SearchProjectResponse { locations })
4547 })
4548 }
4549
4550 async fn handle_open_buffer_for_symbol(
4551 this: ModelHandle<Self>,
4552 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4553 _: Arc<Client>,
4554 mut cx: AsyncAppContext,
4555 ) -> Result<proto::OpenBufferForSymbolResponse> {
4556 let peer_id = envelope.original_sender_id()?;
4557 let symbol = envelope
4558 .payload
4559 .symbol
4560 .ok_or_else(|| anyhow!("invalid symbol"))?;
4561 let symbol = this.read_with(&cx, |this, _| {
4562 let symbol = this.deserialize_symbol(symbol)?;
4563 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4564 if signature == symbol.signature {
4565 Ok(symbol)
4566 } else {
4567 Err(anyhow!("invalid symbol signature"))
4568 }
4569 })?;
4570 let buffer = this
4571 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4572 .await?;
4573
4574 Ok(proto::OpenBufferForSymbolResponse {
4575 buffer: Some(this.update(&mut cx, |this, cx| {
4576 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4577 })),
4578 })
4579 }
4580
4581 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4582 let mut hasher = Sha256::new();
4583 hasher.update(worktree_id.to_proto().to_be_bytes());
4584 hasher.update(path.to_string_lossy().as_bytes());
4585 hasher.update(self.nonce.to_be_bytes());
4586 hasher.finalize().as_slice().try_into().unwrap()
4587 }
4588
4589 async fn handle_open_buffer_by_id(
4590 this: ModelHandle<Self>,
4591 envelope: TypedEnvelope<proto::OpenBufferById>,
4592 _: Arc<Client>,
4593 mut cx: AsyncAppContext,
4594 ) -> Result<proto::OpenBufferResponse> {
4595 let peer_id = envelope.original_sender_id()?;
4596 let buffer = this
4597 .update(&mut cx, |this, cx| {
4598 this.open_buffer_by_id(envelope.payload.id, cx)
4599 })
4600 .await?;
4601 this.update(&mut cx, |this, cx| {
4602 Ok(proto::OpenBufferResponse {
4603 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4604 })
4605 })
4606 }
4607
4608 async fn handle_open_buffer_by_path(
4609 this: ModelHandle<Self>,
4610 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4611 _: Arc<Client>,
4612 mut cx: AsyncAppContext,
4613 ) -> Result<proto::OpenBufferResponse> {
4614 let peer_id = envelope.original_sender_id()?;
4615 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4616 let open_buffer = this.update(&mut cx, |this, cx| {
4617 this.open_buffer(
4618 ProjectPath {
4619 worktree_id,
4620 path: PathBuf::from(envelope.payload.path).into(),
4621 },
4622 cx,
4623 )
4624 });
4625
4626 let buffer = open_buffer.await?;
4627 this.update(&mut cx, |this, cx| {
4628 Ok(proto::OpenBufferResponse {
4629 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4630 })
4631 })
4632 }
4633
4634 fn serialize_project_transaction_for_peer(
4635 &mut self,
4636 project_transaction: ProjectTransaction,
4637 peer_id: PeerId,
4638 cx: &AppContext,
4639 ) -> proto::ProjectTransaction {
4640 let mut serialized_transaction = proto::ProjectTransaction {
4641 buffers: Default::default(),
4642 transactions: Default::default(),
4643 };
4644 for (buffer, transaction) in project_transaction.0 {
4645 serialized_transaction
4646 .buffers
4647 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4648 serialized_transaction
4649 .transactions
4650 .push(language::proto::serialize_transaction(&transaction));
4651 }
4652 serialized_transaction
4653 }
4654
4655 fn deserialize_project_transaction(
4656 &mut self,
4657 message: proto::ProjectTransaction,
4658 push_to_history: bool,
4659 cx: &mut ModelContext<Self>,
4660 ) -> Task<Result<ProjectTransaction>> {
4661 cx.spawn(|this, mut cx| async move {
4662 let mut project_transaction = ProjectTransaction::default();
4663 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4664 let buffer = this
4665 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4666 .await?;
4667 let transaction = language::proto::deserialize_transaction(transaction)?;
4668 project_transaction.0.insert(buffer, transaction);
4669 }
4670
4671 for (buffer, transaction) in &project_transaction.0 {
4672 buffer
4673 .update(&mut cx, |buffer, _| {
4674 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4675 })
4676 .await;
4677
4678 if push_to_history {
4679 buffer.update(&mut cx, |buffer, _| {
4680 buffer.push_transaction(transaction.clone(), Instant::now());
4681 });
4682 }
4683 }
4684
4685 Ok(project_transaction)
4686 })
4687 }
4688
4689 fn serialize_buffer_for_peer(
4690 &mut self,
4691 buffer: &ModelHandle<Buffer>,
4692 peer_id: PeerId,
4693 cx: &AppContext,
4694 ) -> proto::Buffer {
4695 let buffer_id = buffer.read(cx).remote_id();
4696 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4697 if shared_buffers.insert(buffer_id) {
4698 proto::Buffer {
4699 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4700 }
4701 } else {
4702 proto::Buffer {
4703 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4704 }
4705 }
4706 }
4707
4708 fn deserialize_buffer(
4709 &mut self,
4710 buffer: proto::Buffer,
4711 cx: &mut ModelContext<Self>,
4712 ) -> Task<Result<ModelHandle<Buffer>>> {
4713 let replica_id = self.replica_id();
4714
4715 let opened_buffer_tx = self.opened_buffer.0.clone();
4716 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4717 cx.spawn(|this, mut cx| async move {
4718 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4719 proto::buffer::Variant::Id(id) => {
4720 let buffer = loop {
4721 let buffer = this.read_with(&cx, |this, cx| {
4722 this.opened_buffers
4723 .get(&id)
4724 .and_then(|buffer| buffer.upgrade(cx))
4725 });
4726 if let Some(buffer) = buffer {
4727 break buffer;
4728 }
4729 opened_buffer_rx
4730 .next()
4731 .await
4732 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4733 };
4734 Ok(buffer)
4735 }
4736 proto::buffer::Variant::State(mut buffer) => {
4737 let mut buffer_worktree = None;
4738 let mut buffer_file = None;
4739 if let Some(file) = buffer.file.take() {
4740 this.read_with(&cx, |this, cx| {
4741 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4742 let worktree =
4743 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4744 anyhow!("no worktree found for id {}", file.worktree_id)
4745 })?;
4746 buffer_file =
4747 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4748 as Box<dyn language::File>);
4749 buffer_worktree = Some(worktree);
4750 Ok::<_, anyhow::Error>(())
4751 })?;
4752 }
4753
4754 let buffer = cx.add_model(|cx| {
4755 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4756 });
4757
4758 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4759
4760 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4761 Ok(buffer)
4762 }
4763 }
4764 })
4765 }
4766
4767 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4768 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4769 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4770 let start = serialized_symbol
4771 .start
4772 .ok_or_else(|| anyhow!("invalid start"))?;
4773 let end = serialized_symbol
4774 .end
4775 .ok_or_else(|| anyhow!("invalid end"))?;
4776 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4777 let path = PathBuf::from(serialized_symbol.path);
4778 let language = self.languages.select_language(&path);
4779 Ok(Symbol {
4780 source_worktree_id,
4781 worktree_id,
4782 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4783 label: language
4784 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4785 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4786 name: serialized_symbol.name,
4787 path,
4788 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4789 kind,
4790 signature: serialized_symbol
4791 .signature
4792 .try_into()
4793 .map_err(|_| anyhow!("invalid signature"))?,
4794 })
4795 }
4796
4797 async fn handle_buffer_saved(
4798 this: ModelHandle<Self>,
4799 envelope: TypedEnvelope<proto::BufferSaved>,
4800 _: Arc<Client>,
4801 mut cx: AsyncAppContext,
4802 ) -> Result<()> {
4803 let version = deserialize_version(envelope.payload.version);
4804 let mtime = envelope
4805 .payload
4806 .mtime
4807 .ok_or_else(|| anyhow!("missing mtime"))?
4808 .into();
4809
4810 this.update(&mut cx, |this, cx| {
4811 let buffer = this
4812 .opened_buffers
4813 .get(&envelope.payload.buffer_id)
4814 .and_then(|buffer| buffer.upgrade(cx));
4815 if let Some(buffer) = buffer {
4816 buffer.update(cx, |buffer, cx| {
4817 buffer.did_save(version, mtime, None, cx);
4818 });
4819 }
4820 Ok(())
4821 })
4822 }
4823
4824 async fn handle_buffer_reloaded(
4825 this: ModelHandle<Self>,
4826 envelope: TypedEnvelope<proto::BufferReloaded>,
4827 _: Arc<Client>,
4828 mut cx: AsyncAppContext,
4829 ) -> Result<()> {
4830 let payload = envelope.payload.clone();
4831 let version = deserialize_version(payload.version);
4832 let mtime = payload
4833 .mtime
4834 .ok_or_else(|| anyhow!("missing mtime"))?
4835 .into();
4836 this.update(&mut cx, |this, cx| {
4837 let buffer = this
4838 .opened_buffers
4839 .get(&payload.buffer_id)
4840 .and_then(|buffer| buffer.upgrade(cx));
4841 if let Some(buffer) = buffer {
4842 buffer.update(cx, |buffer, cx| {
4843 buffer.did_reload(version, mtime, cx);
4844 });
4845 }
4846 Ok(())
4847 })
4848 }
4849
4850 pub fn match_paths<'a>(
4851 &self,
4852 query: &'a str,
4853 include_ignored: bool,
4854 smart_case: bool,
4855 max_results: usize,
4856 cancel_flag: &'a AtomicBool,
4857 cx: &AppContext,
4858 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4859 let worktrees = self
4860 .worktrees(cx)
4861 .filter(|worktree| worktree.read(cx).is_visible())
4862 .collect::<Vec<_>>();
4863 let include_root_name = worktrees.len() > 1;
4864 let candidate_sets = worktrees
4865 .into_iter()
4866 .map(|worktree| CandidateSet {
4867 snapshot: worktree.read(cx).snapshot(),
4868 include_ignored,
4869 include_root_name,
4870 })
4871 .collect::<Vec<_>>();
4872
4873 let background = cx.background().clone();
4874 async move {
4875 fuzzy::match_paths(
4876 candidate_sets.as_slice(),
4877 query,
4878 smart_case,
4879 max_results,
4880 cancel_flag,
4881 background,
4882 )
4883 .await
4884 }
4885 }
4886
4887 fn edits_from_lsp(
4888 &mut self,
4889 buffer: &ModelHandle<Buffer>,
4890 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4891 version: Option<i32>,
4892 cx: &mut ModelContext<Self>,
4893 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4894 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4895 cx.background().spawn(async move {
4896 let snapshot = snapshot?;
4897 let mut lsp_edits = lsp_edits
4898 .into_iter()
4899 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4900 .peekable();
4901
4902 let mut edits = Vec::new();
4903 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4904 // Combine any LSP edits that are adjacent.
4905 //
4906 // Also, combine LSP edits that are separated from each other by only
4907 // a newline. This is important because for some code actions,
4908 // Rust-analyzer rewrites the entire buffer via a series of edits that
4909 // are separated by unchanged newline characters.
4910 //
4911 // In order for the diffing logic below to work properly, any edits that
4912 // cancel each other out must be combined into one.
4913 while let Some((next_range, next_text)) = lsp_edits.peek() {
4914 if next_range.start > range.end {
4915 if next_range.start.row > range.end.row + 1
4916 || next_range.start.column > 0
4917 || snapshot.clip_point_utf16(
4918 PointUtf16::new(range.end.row, u32::MAX),
4919 Bias::Left,
4920 ) > range.end
4921 {
4922 break;
4923 }
4924 new_text.push('\n');
4925 }
4926 range.end = next_range.end;
4927 new_text.push_str(&next_text);
4928 lsp_edits.next();
4929 }
4930
4931 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4932 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4933 {
4934 return Err(anyhow!("invalid edits received from language server"));
4935 }
4936
4937 // For multiline edits, perform a diff of the old and new text so that
4938 // we can identify the changes more precisely, preserving the locations
4939 // of any anchors positioned in the unchanged regions.
4940 if range.end.row > range.start.row {
4941 let mut offset = range.start.to_offset(&snapshot);
4942 let old_text = snapshot.text_for_range(range).collect::<String>();
4943
4944 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4945 let mut moved_since_edit = true;
4946 for change in diff.iter_all_changes() {
4947 let tag = change.tag();
4948 let value = change.value();
4949 match tag {
4950 ChangeTag::Equal => {
4951 offset += value.len();
4952 moved_since_edit = true;
4953 }
4954 ChangeTag::Delete => {
4955 let start = snapshot.anchor_after(offset);
4956 let end = snapshot.anchor_before(offset + value.len());
4957 if moved_since_edit {
4958 edits.push((start..end, String::new()));
4959 } else {
4960 edits.last_mut().unwrap().0.end = end;
4961 }
4962 offset += value.len();
4963 moved_since_edit = false;
4964 }
4965 ChangeTag::Insert => {
4966 if moved_since_edit {
4967 let anchor = snapshot.anchor_after(offset);
4968 edits.push((anchor.clone()..anchor, value.to_string()));
4969 } else {
4970 edits.last_mut().unwrap().1.push_str(value);
4971 }
4972 moved_since_edit = false;
4973 }
4974 }
4975 }
4976 } else if range.end == range.start {
4977 let anchor = snapshot.anchor_after(range.start);
4978 edits.push((anchor.clone()..anchor, new_text));
4979 } else {
4980 let edit_start = snapshot.anchor_after(range.start);
4981 let edit_end = snapshot.anchor_before(range.end);
4982 edits.push((edit_start..edit_end, new_text));
4983 }
4984 }
4985
4986 Ok(edits)
4987 })
4988 }
4989
4990 fn buffer_snapshot_for_lsp_version(
4991 &mut self,
4992 buffer: &ModelHandle<Buffer>,
4993 version: Option<i32>,
4994 cx: &AppContext,
4995 ) -> Result<TextBufferSnapshot> {
4996 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4997
4998 if let Some(version) = version {
4999 let buffer_id = buffer.read(cx).remote_id();
5000 let snapshots = self
5001 .buffer_snapshots
5002 .get_mut(&buffer_id)
5003 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5004 let mut found_snapshot = None;
5005 snapshots.retain(|(snapshot_version, snapshot)| {
5006 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5007 false
5008 } else {
5009 if *snapshot_version == version {
5010 found_snapshot = Some(snapshot.clone());
5011 }
5012 true
5013 }
5014 });
5015
5016 found_snapshot.ok_or_else(|| {
5017 anyhow!(
5018 "snapshot not found for buffer {} at version {}",
5019 buffer_id,
5020 version
5021 )
5022 })
5023 } else {
5024 Ok((buffer.read(cx)).text_snapshot())
5025 }
5026 }
5027
5028 fn language_server_for_buffer(
5029 &self,
5030 buffer: &Buffer,
5031 cx: &AppContext,
5032 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5033 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5034 let worktree_id = file.worktree_id(cx);
5035 self.language_servers
5036 .get(&(worktree_id, language.lsp_adapter()?.name()))
5037 } else {
5038 None
5039 }
5040 }
5041}
5042
5043impl WorktreeHandle {
5044 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5045 match self {
5046 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5047 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5048 }
5049 }
5050}
5051
5052impl OpenBuffer {
5053 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5054 match self {
5055 OpenBuffer::Strong(handle) => Some(handle.clone()),
5056 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5057 OpenBuffer::Loading(_) => None,
5058 }
5059 }
5060}
5061
5062struct CandidateSet {
5063 snapshot: Snapshot,
5064 include_ignored: bool,
5065 include_root_name: bool,
5066}
5067
5068impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5069 type Candidates = CandidateSetIter<'a>;
5070
5071 fn id(&self) -> usize {
5072 self.snapshot.id().to_usize()
5073 }
5074
5075 fn len(&self) -> usize {
5076 if self.include_ignored {
5077 self.snapshot.file_count()
5078 } else {
5079 self.snapshot.visible_file_count()
5080 }
5081 }
5082
5083 fn prefix(&self) -> Arc<str> {
5084 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5085 self.snapshot.root_name().into()
5086 } else if self.include_root_name {
5087 format!("{}/", self.snapshot.root_name()).into()
5088 } else {
5089 "".into()
5090 }
5091 }
5092
5093 fn candidates(&'a self, start: usize) -> Self::Candidates {
5094 CandidateSetIter {
5095 traversal: self.snapshot.files(self.include_ignored, start),
5096 }
5097 }
5098}
5099
5100struct CandidateSetIter<'a> {
5101 traversal: Traversal<'a>,
5102}
5103
5104impl<'a> Iterator for CandidateSetIter<'a> {
5105 type Item = PathMatchCandidate<'a>;
5106
5107 fn next(&mut self) -> Option<Self::Item> {
5108 self.traversal.next().map(|entry| {
5109 if let EntryKind::File(char_bag) = entry.kind {
5110 PathMatchCandidate {
5111 path: &entry.path,
5112 char_bag,
5113 }
5114 } else {
5115 unreachable!()
5116 }
5117 })
5118 }
5119}
5120
5121impl Entity for Project {
5122 type Event = Event;
5123
5124 fn release(&mut self, _: &mut gpui::MutableAppContext) {
5125 match &self.client_state {
5126 ProjectClientState::Local { remote_id_rx, .. } => {
5127 if let Some(project_id) = *remote_id_rx.borrow() {
5128 self.client
5129 .send(proto::UnregisterProject { project_id })
5130 .log_err();
5131 }
5132 }
5133 ProjectClientState::Remote { remote_id, .. } => {
5134 self.client
5135 .send(proto::LeaveProject {
5136 project_id: *remote_id,
5137 })
5138 .log_err();
5139 }
5140 }
5141 }
5142
5143 fn app_will_quit(
5144 &mut self,
5145 _: &mut MutableAppContext,
5146 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5147 let shutdown_futures = self
5148 .language_servers
5149 .drain()
5150 .filter_map(|(_, (_, server))| server.shutdown())
5151 .collect::<Vec<_>>();
5152 Some(
5153 async move {
5154 futures::future::join_all(shutdown_futures).await;
5155 }
5156 .boxed(),
5157 )
5158 }
5159}
5160
5161impl Collaborator {
5162 fn from_proto(
5163 message: proto::Collaborator,
5164 user_store: &ModelHandle<UserStore>,
5165 cx: &mut AsyncAppContext,
5166 ) -> impl Future<Output = Result<Self>> {
5167 let user = user_store.update(cx, |user_store, cx| {
5168 user_store.fetch_user(message.user_id, cx)
5169 });
5170
5171 async move {
5172 Ok(Self {
5173 peer_id: PeerId(message.peer_id),
5174 user: user.await?,
5175 replica_id: message.replica_id as ReplicaId,
5176 })
5177 }
5178 }
5179}
5180
5181impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5182 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5183 Self {
5184 worktree_id,
5185 path: path.as_ref().into(),
5186 }
5187 }
5188}
5189
5190impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5191 fn from(options: lsp::CreateFileOptions) -> Self {
5192 Self {
5193 overwrite: options.overwrite.unwrap_or(false),
5194 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5195 }
5196 }
5197}
5198
5199impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5200 fn from(options: lsp::RenameFileOptions) -> Self {
5201 Self {
5202 overwrite: options.overwrite.unwrap_or(false),
5203 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5204 }
5205 }
5206}
5207
5208impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5209 fn from(options: lsp::DeleteFileOptions) -> Self {
5210 Self {
5211 recursive: options.recursive.unwrap_or(false),
5212 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5213 }
5214 }
5215}
5216
5217fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5218 proto::Symbol {
5219 source_worktree_id: symbol.source_worktree_id.to_proto(),
5220 worktree_id: symbol.worktree_id.to_proto(),
5221 language_server_name: symbol.language_server_name.0.to_string(),
5222 name: symbol.name.clone(),
5223 kind: unsafe { mem::transmute(symbol.kind) },
5224 path: symbol.path.to_string_lossy().to_string(),
5225 start: Some(proto::Point {
5226 row: symbol.range.start.row,
5227 column: symbol.range.start.column,
5228 }),
5229 end: Some(proto::Point {
5230 row: symbol.range.end.row,
5231 column: symbol.range.end.column,
5232 }),
5233 signature: symbol.signature.to_vec(),
5234 }
5235}
5236
5237fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5238 let mut path_components = path.components();
5239 let mut base_components = base.components();
5240 let mut components: Vec<Component> = Vec::new();
5241 loop {
5242 match (path_components.next(), base_components.next()) {
5243 (None, None) => break,
5244 (Some(a), None) => {
5245 components.push(a);
5246 components.extend(path_components.by_ref());
5247 break;
5248 }
5249 (None, _) => components.push(Component::ParentDir),
5250 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5251 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5252 (Some(a), Some(_)) => {
5253 components.push(Component::ParentDir);
5254 for _ in base_components {
5255 components.push(Component::ParentDir);
5256 }
5257 components.push(a);
5258 components.extend(path_components.by_ref());
5259 break;
5260 }
5261 }
5262 }
5263 components.iter().map(|c| c.as_os_str()).collect()
5264}
5265
5266impl Item for Buffer {
5267 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5268 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5269 }
5270}
5271
5272#[cfg(test)]
5273mod tests {
5274 use crate::worktree::WorktreeHandle;
5275
5276 use super::{Event, *};
5277 use fs::RealFs;
5278 use futures::{future, StreamExt};
5279 use gpui::test::subscribe;
5280 use language::{
5281 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5282 OffsetRangeExt, Point, ToPoint,
5283 };
5284 use lsp::Url;
5285 use serde_json::json;
5286 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5287 use unindent::Unindent as _;
5288 use util::{assert_set_eq, test::temp_tree};
5289
5290 #[gpui::test]
5291 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5292 let dir = temp_tree(json!({
5293 "root": {
5294 "apple": "",
5295 "banana": {
5296 "carrot": {
5297 "date": "",
5298 "endive": "",
5299 }
5300 },
5301 "fennel": {
5302 "grape": "",
5303 }
5304 }
5305 }));
5306
5307 let root_link_path = dir.path().join("root_link");
5308 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5309 unix::fs::symlink(
5310 &dir.path().join("root/fennel"),
5311 &dir.path().join("root/finnochio"),
5312 )
5313 .unwrap();
5314
5315 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5316
5317 project.read_with(cx, |project, cx| {
5318 let tree = project.worktrees(cx).next().unwrap().read(cx);
5319 assert_eq!(tree.file_count(), 5);
5320 assert_eq!(
5321 tree.inode_for_path("fennel/grape"),
5322 tree.inode_for_path("finnochio/grape")
5323 );
5324 });
5325
5326 let cancel_flag = Default::default();
5327 let results = project
5328 .read_with(cx, |project, cx| {
5329 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5330 })
5331 .await;
5332 assert_eq!(
5333 results
5334 .into_iter()
5335 .map(|result| result.path)
5336 .collect::<Vec<Arc<Path>>>(),
5337 vec![
5338 PathBuf::from("banana/carrot/date").into(),
5339 PathBuf::from("banana/carrot/endive").into(),
5340 ]
5341 );
5342 }
5343
5344 #[gpui::test]
5345 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5346 cx.foreground().forbid_parking();
5347
5348 let mut rust_language = Language::new(
5349 LanguageConfig {
5350 name: "Rust".into(),
5351 path_suffixes: vec!["rs".to_string()],
5352 ..Default::default()
5353 },
5354 Some(tree_sitter_rust::language()),
5355 );
5356 let mut json_language = Language::new(
5357 LanguageConfig {
5358 name: "JSON".into(),
5359 path_suffixes: vec!["json".to_string()],
5360 ..Default::default()
5361 },
5362 None,
5363 );
5364 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5365 name: "the-rust-language-server",
5366 capabilities: lsp::ServerCapabilities {
5367 completion_provider: Some(lsp::CompletionOptions {
5368 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5369 ..Default::default()
5370 }),
5371 ..Default::default()
5372 },
5373 ..Default::default()
5374 });
5375 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5376 name: "the-json-language-server",
5377 capabilities: lsp::ServerCapabilities {
5378 completion_provider: Some(lsp::CompletionOptions {
5379 trigger_characters: Some(vec![":".to_string()]),
5380 ..Default::default()
5381 }),
5382 ..Default::default()
5383 },
5384 ..Default::default()
5385 });
5386
5387 let fs = FakeFs::new(cx.background());
5388 fs.insert_tree(
5389 "/the-root",
5390 json!({
5391 "test.rs": "const A: i32 = 1;",
5392 "test2.rs": "",
5393 "Cargo.toml": "a = 1",
5394 "package.json": "{\"a\": 1}",
5395 }),
5396 )
5397 .await;
5398
5399 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5400 project.update(cx, |project, _| {
5401 project.languages.add(Arc::new(rust_language));
5402 project.languages.add(Arc::new(json_language));
5403 });
5404
5405 // Open a buffer without an associated language server.
5406 let toml_buffer = project
5407 .update(cx, |project, cx| {
5408 project.open_local_buffer("/the-root/Cargo.toml", cx)
5409 })
5410 .await
5411 .unwrap();
5412
5413 // Open a buffer with an associated language server.
5414 let rust_buffer = project
5415 .update(cx, |project, cx| {
5416 project.open_local_buffer("/the-root/test.rs", cx)
5417 })
5418 .await
5419 .unwrap();
5420
5421 // A server is started up, and it is notified about Rust files.
5422 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5423 assert_eq!(
5424 fake_rust_server
5425 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5426 .await
5427 .text_document,
5428 lsp::TextDocumentItem {
5429 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5430 version: 0,
5431 text: "const A: i32 = 1;".to_string(),
5432 language_id: Default::default()
5433 }
5434 );
5435
5436 // The buffer is configured based on the language server's capabilities.
5437 rust_buffer.read_with(cx, |buffer, _| {
5438 assert_eq!(
5439 buffer.completion_triggers(),
5440 &[".".to_string(), "::".to_string()]
5441 );
5442 });
5443 toml_buffer.read_with(cx, |buffer, _| {
5444 assert!(buffer.completion_triggers().is_empty());
5445 });
5446
5447 // Edit a buffer. The changes are reported to the language server.
5448 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5449 assert_eq!(
5450 fake_rust_server
5451 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5452 .await
5453 .text_document,
5454 lsp::VersionedTextDocumentIdentifier::new(
5455 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5456 1
5457 )
5458 );
5459
5460 // Open a third buffer with a different associated language server.
5461 let json_buffer = project
5462 .update(cx, |project, cx| {
5463 project.open_local_buffer("/the-root/package.json", cx)
5464 })
5465 .await
5466 .unwrap();
5467
5468 // A json language server is started up and is only notified about the json buffer.
5469 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5470 assert_eq!(
5471 fake_json_server
5472 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5473 .await
5474 .text_document,
5475 lsp::TextDocumentItem {
5476 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5477 version: 0,
5478 text: "{\"a\": 1}".to_string(),
5479 language_id: Default::default()
5480 }
5481 );
5482
5483 // This buffer is configured based on the second language server's
5484 // capabilities.
5485 json_buffer.read_with(cx, |buffer, _| {
5486 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5487 });
5488
5489 // When opening another buffer whose language server is already running,
5490 // it is also configured based on the existing language server's capabilities.
5491 let rust_buffer2 = project
5492 .update(cx, |project, cx| {
5493 project.open_local_buffer("/the-root/test2.rs", cx)
5494 })
5495 .await
5496 .unwrap();
5497 rust_buffer2.read_with(cx, |buffer, _| {
5498 assert_eq!(
5499 buffer.completion_triggers(),
5500 &[".".to_string(), "::".to_string()]
5501 );
5502 });
5503
5504 // Changes are reported only to servers matching the buffer's language.
5505 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5506 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5507 assert_eq!(
5508 fake_rust_server
5509 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5510 .await
5511 .text_document,
5512 lsp::VersionedTextDocumentIdentifier::new(
5513 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5514 1
5515 )
5516 );
5517
5518 // Save notifications are reported to all servers.
5519 toml_buffer
5520 .update(cx, |buffer, cx| buffer.save(cx))
5521 .await
5522 .unwrap();
5523 assert_eq!(
5524 fake_rust_server
5525 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5526 .await
5527 .text_document,
5528 lsp::TextDocumentIdentifier::new(
5529 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5530 )
5531 );
5532 assert_eq!(
5533 fake_json_server
5534 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5535 .await
5536 .text_document,
5537 lsp::TextDocumentIdentifier::new(
5538 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5539 )
5540 );
5541
5542 // Renames are reported only to servers matching the buffer's language.
5543 fs.rename(
5544 Path::new("/the-root/test2.rs"),
5545 Path::new("/the-root/test3.rs"),
5546 Default::default(),
5547 )
5548 .await
5549 .unwrap();
5550 assert_eq!(
5551 fake_rust_server
5552 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5553 .await
5554 .text_document,
5555 lsp::TextDocumentIdentifier::new(
5556 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5557 ),
5558 );
5559 assert_eq!(
5560 fake_rust_server
5561 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5562 .await
5563 .text_document,
5564 lsp::TextDocumentItem {
5565 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5566 version: 0,
5567 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5568 language_id: Default::default()
5569 },
5570 );
5571
5572 rust_buffer2.update(cx, |buffer, cx| {
5573 buffer.update_diagnostics(
5574 DiagnosticSet::from_sorted_entries(
5575 vec![DiagnosticEntry {
5576 diagnostic: Default::default(),
5577 range: Anchor::MIN..Anchor::MAX,
5578 }],
5579 &buffer.snapshot(),
5580 ),
5581 cx,
5582 );
5583 assert_eq!(
5584 buffer
5585 .snapshot()
5586 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5587 .count(),
5588 1
5589 );
5590 });
5591
5592 // When the rename changes the extension of the file, the buffer gets closed on the old
5593 // language server and gets opened on the new one.
5594 fs.rename(
5595 Path::new("/the-root/test3.rs"),
5596 Path::new("/the-root/test3.json"),
5597 Default::default(),
5598 )
5599 .await
5600 .unwrap();
5601 assert_eq!(
5602 fake_rust_server
5603 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5604 .await
5605 .text_document,
5606 lsp::TextDocumentIdentifier::new(
5607 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5608 ),
5609 );
5610 assert_eq!(
5611 fake_json_server
5612 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5613 .await
5614 .text_document,
5615 lsp::TextDocumentItem {
5616 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5617 version: 0,
5618 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5619 language_id: Default::default()
5620 },
5621 );
5622
5623 // We clear the diagnostics, since the language has changed.
5624 rust_buffer2.read_with(cx, |buffer, _| {
5625 assert_eq!(
5626 buffer
5627 .snapshot()
5628 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5629 .count(),
5630 0
5631 );
5632 });
5633
5634 // The renamed file's version resets after changing language server.
5635 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5636 assert_eq!(
5637 fake_json_server
5638 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5639 .await
5640 .text_document,
5641 lsp::VersionedTextDocumentIdentifier::new(
5642 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5643 1
5644 )
5645 );
5646
5647 // Restart language servers
5648 project.update(cx, |project, cx| {
5649 project.restart_language_servers_for_buffers(
5650 vec![rust_buffer.clone(), json_buffer.clone()],
5651 cx,
5652 );
5653 });
5654
5655 let mut rust_shutdown_requests = fake_rust_server
5656 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5657 let mut json_shutdown_requests = fake_json_server
5658 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5659 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5660
5661 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5662 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5663
5664 // Ensure rust document is reopened in new rust language server
5665 assert_eq!(
5666 fake_rust_server
5667 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5668 .await
5669 .text_document,
5670 lsp::TextDocumentItem {
5671 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5672 version: 1,
5673 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5674 language_id: Default::default()
5675 }
5676 );
5677
5678 // Ensure json documents are reopened in new json language server
5679 assert_set_eq!(
5680 [
5681 fake_json_server
5682 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5683 .await
5684 .text_document,
5685 fake_json_server
5686 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5687 .await
5688 .text_document,
5689 ],
5690 [
5691 lsp::TextDocumentItem {
5692 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5693 version: 0,
5694 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5695 language_id: Default::default()
5696 },
5697 lsp::TextDocumentItem {
5698 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5699 version: 1,
5700 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5701 language_id: Default::default()
5702 }
5703 ]
5704 );
5705
5706 // Close notifications are reported only to servers matching the buffer's language.
5707 cx.update(|_| drop(json_buffer));
5708 let close_message = lsp::DidCloseTextDocumentParams {
5709 text_document: lsp::TextDocumentIdentifier::new(
5710 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5711 ),
5712 };
5713 assert_eq!(
5714 fake_json_server
5715 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5716 .await,
5717 close_message,
5718 );
5719 }
5720
5721 #[gpui::test]
5722 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5723 cx.foreground().forbid_parking();
5724
5725 let fs = FakeFs::new(cx.background());
5726 fs.insert_tree(
5727 "/dir",
5728 json!({
5729 "a.rs": "let a = 1;",
5730 "b.rs": "let b = 2;"
5731 }),
5732 )
5733 .await;
5734
5735 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
5736
5737 let buffer_a = project
5738 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5739 .await
5740 .unwrap();
5741 let buffer_b = project
5742 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5743 .await
5744 .unwrap();
5745
5746 project.update(cx, |project, cx| {
5747 project
5748 .update_diagnostics(
5749 lsp::PublishDiagnosticsParams {
5750 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5751 version: None,
5752 diagnostics: vec![lsp::Diagnostic {
5753 range: lsp::Range::new(
5754 lsp::Position::new(0, 4),
5755 lsp::Position::new(0, 5),
5756 ),
5757 severity: Some(lsp::DiagnosticSeverity::ERROR),
5758 message: "error 1".to_string(),
5759 ..Default::default()
5760 }],
5761 },
5762 &[],
5763 cx,
5764 )
5765 .unwrap();
5766 project
5767 .update_diagnostics(
5768 lsp::PublishDiagnosticsParams {
5769 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5770 version: None,
5771 diagnostics: vec![lsp::Diagnostic {
5772 range: lsp::Range::new(
5773 lsp::Position::new(0, 4),
5774 lsp::Position::new(0, 5),
5775 ),
5776 severity: Some(lsp::DiagnosticSeverity::WARNING),
5777 message: "error 2".to_string(),
5778 ..Default::default()
5779 }],
5780 },
5781 &[],
5782 cx,
5783 )
5784 .unwrap();
5785 });
5786
5787 buffer_a.read_with(cx, |buffer, _| {
5788 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5789 assert_eq!(
5790 chunks
5791 .iter()
5792 .map(|(s, d)| (s.as_str(), *d))
5793 .collect::<Vec<_>>(),
5794 &[
5795 ("let ", None),
5796 ("a", Some(DiagnosticSeverity::ERROR)),
5797 (" = 1;", None),
5798 ]
5799 );
5800 });
5801 buffer_b.read_with(cx, |buffer, _| {
5802 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5803 assert_eq!(
5804 chunks
5805 .iter()
5806 .map(|(s, d)| (s.as_str(), *d))
5807 .collect::<Vec<_>>(),
5808 &[
5809 ("let ", None),
5810 ("b", Some(DiagnosticSeverity::WARNING)),
5811 (" = 2;", None),
5812 ]
5813 );
5814 });
5815 }
5816
5817 #[gpui::test]
5818 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5819 cx.foreground().forbid_parking();
5820
5821 let progress_token = "the-progress-token";
5822 let mut language = Language::new(
5823 LanguageConfig {
5824 name: "Rust".into(),
5825 path_suffixes: vec!["rs".to_string()],
5826 ..Default::default()
5827 },
5828 Some(tree_sitter_rust::language()),
5829 );
5830 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5831 disk_based_diagnostics_progress_token: Some(progress_token),
5832 disk_based_diagnostics_sources: &["disk"],
5833 ..Default::default()
5834 });
5835
5836 let fs = FakeFs::new(cx.background());
5837 fs.insert_tree(
5838 "/dir",
5839 json!({
5840 "a.rs": "fn a() { A }",
5841 "b.rs": "const y: i32 = 1",
5842 }),
5843 )
5844 .await;
5845
5846 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5847 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5848 let worktree_id =
5849 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5850
5851 // Cause worktree to start the fake language server
5852 let _buffer = project
5853 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5854 .await
5855 .unwrap();
5856
5857 let mut events = subscribe(&project, cx);
5858
5859 let mut fake_server = fake_servers.next().await.unwrap();
5860 fake_server.start_progress(progress_token).await;
5861 assert_eq!(
5862 events.next().await.unwrap(),
5863 Event::DiskBasedDiagnosticsStarted
5864 );
5865
5866 fake_server.start_progress(progress_token).await;
5867 fake_server.end_progress(progress_token).await;
5868 fake_server.start_progress(progress_token).await;
5869
5870 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5871 lsp::PublishDiagnosticsParams {
5872 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5873 version: None,
5874 diagnostics: vec![lsp::Diagnostic {
5875 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5876 severity: Some(lsp::DiagnosticSeverity::ERROR),
5877 message: "undefined variable 'A'".to_string(),
5878 ..Default::default()
5879 }],
5880 },
5881 );
5882 assert_eq!(
5883 events.next().await.unwrap(),
5884 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5885 );
5886
5887 fake_server.end_progress(progress_token).await;
5888 fake_server.end_progress(progress_token).await;
5889 assert_eq!(
5890 events.next().await.unwrap(),
5891 Event::DiskBasedDiagnosticsUpdated
5892 );
5893 assert_eq!(
5894 events.next().await.unwrap(),
5895 Event::DiskBasedDiagnosticsFinished
5896 );
5897
5898 let buffer = project
5899 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5900 .await
5901 .unwrap();
5902
5903 buffer.read_with(cx, |buffer, _| {
5904 let snapshot = buffer.snapshot();
5905 let diagnostics = snapshot
5906 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5907 .collect::<Vec<_>>();
5908 assert_eq!(
5909 diagnostics,
5910 &[DiagnosticEntry {
5911 range: Point::new(0, 9)..Point::new(0, 10),
5912 diagnostic: Diagnostic {
5913 severity: lsp::DiagnosticSeverity::ERROR,
5914 message: "undefined variable 'A'".to_string(),
5915 group_id: 0,
5916 is_primary: true,
5917 ..Default::default()
5918 }
5919 }]
5920 )
5921 });
5922
5923 // Ensure publishing empty diagnostics twice only results in one update event.
5924 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5925 lsp::PublishDiagnosticsParams {
5926 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5927 version: None,
5928 diagnostics: Default::default(),
5929 },
5930 );
5931 assert_eq!(
5932 events.next().await.unwrap(),
5933 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5934 );
5935
5936 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5937 lsp::PublishDiagnosticsParams {
5938 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5939 version: None,
5940 diagnostics: Default::default(),
5941 },
5942 );
5943 cx.foreground().run_until_parked();
5944 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5945 }
5946
5947 #[gpui::test]
5948 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5949 cx.foreground().forbid_parking();
5950
5951 let progress_token = "the-progress-token";
5952 let mut language = Language::new(
5953 LanguageConfig {
5954 path_suffixes: vec!["rs".to_string()],
5955 ..Default::default()
5956 },
5957 None,
5958 );
5959 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5960 disk_based_diagnostics_sources: &["disk"],
5961 disk_based_diagnostics_progress_token: Some(progress_token),
5962 ..Default::default()
5963 });
5964
5965 let fs = FakeFs::new(cx.background());
5966 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5967
5968 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5969 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5970
5971 let buffer = project
5972 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5973 .await
5974 .unwrap();
5975
5976 // Simulate diagnostics starting to update.
5977 let mut fake_server = fake_servers.next().await.unwrap();
5978 fake_server.start_progress(progress_token).await;
5979
5980 // Restart the server before the diagnostics finish updating.
5981 project.update(cx, |project, cx| {
5982 project.restart_language_servers_for_buffers([buffer], cx);
5983 });
5984 let mut events = subscribe(&project, cx);
5985
5986 // Simulate the newly started server sending more diagnostics.
5987 let mut fake_server = fake_servers.next().await.unwrap();
5988 fake_server.start_progress(progress_token).await;
5989 assert_eq!(
5990 events.next().await.unwrap(),
5991 Event::DiskBasedDiagnosticsStarted
5992 );
5993
5994 // All diagnostics are considered done, despite the old server's diagnostic
5995 // task never completing.
5996 fake_server.end_progress(progress_token).await;
5997 assert_eq!(
5998 events.next().await.unwrap(),
5999 Event::DiskBasedDiagnosticsUpdated
6000 );
6001 assert_eq!(
6002 events.next().await.unwrap(),
6003 Event::DiskBasedDiagnosticsFinished
6004 );
6005 project.read_with(cx, |project, _| {
6006 assert!(!project.is_running_disk_based_diagnostics());
6007 });
6008 }
6009
6010 #[gpui::test]
6011 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6012 cx.foreground().forbid_parking();
6013
6014 let mut language = Language::new(
6015 LanguageConfig {
6016 name: "Rust".into(),
6017 path_suffixes: vec!["rs".to_string()],
6018 ..Default::default()
6019 },
6020 Some(tree_sitter_rust::language()),
6021 );
6022 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6023 disk_based_diagnostics_sources: &["disk"],
6024 ..Default::default()
6025 });
6026
6027 let text = "
6028 fn a() { A }
6029 fn b() { BB }
6030 fn c() { CCC }
6031 "
6032 .unindent();
6033
6034 let fs = FakeFs::new(cx.background());
6035 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6036
6037 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6038 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6039
6040 let buffer = project
6041 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6042 .await
6043 .unwrap();
6044
6045 let mut fake_server = fake_servers.next().await.unwrap();
6046 let open_notification = fake_server
6047 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6048 .await;
6049
6050 // Edit the buffer, moving the content down
6051 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6052 let change_notification_1 = fake_server
6053 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6054 .await;
6055 assert!(
6056 change_notification_1.text_document.version > open_notification.text_document.version
6057 );
6058
6059 // Report some diagnostics for the initial version of the buffer
6060 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6061 lsp::PublishDiagnosticsParams {
6062 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6063 version: Some(open_notification.text_document.version),
6064 diagnostics: vec![
6065 lsp::Diagnostic {
6066 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6067 severity: Some(DiagnosticSeverity::ERROR),
6068 message: "undefined variable 'A'".to_string(),
6069 source: Some("disk".to_string()),
6070 ..Default::default()
6071 },
6072 lsp::Diagnostic {
6073 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6074 severity: Some(DiagnosticSeverity::ERROR),
6075 message: "undefined variable 'BB'".to_string(),
6076 source: Some("disk".to_string()),
6077 ..Default::default()
6078 },
6079 lsp::Diagnostic {
6080 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6081 severity: Some(DiagnosticSeverity::ERROR),
6082 source: Some("disk".to_string()),
6083 message: "undefined variable 'CCC'".to_string(),
6084 ..Default::default()
6085 },
6086 ],
6087 },
6088 );
6089
6090 // The diagnostics have moved down since they were created.
6091 buffer.next_notification(cx).await;
6092 buffer.read_with(cx, |buffer, _| {
6093 assert_eq!(
6094 buffer
6095 .snapshot()
6096 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6097 .collect::<Vec<_>>(),
6098 &[
6099 DiagnosticEntry {
6100 range: Point::new(3, 9)..Point::new(3, 11),
6101 diagnostic: Diagnostic {
6102 severity: DiagnosticSeverity::ERROR,
6103 message: "undefined variable 'BB'".to_string(),
6104 is_disk_based: true,
6105 group_id: 1,
6106 is_primary: true,
6107 ..Default::default()
6108 },
6109 },
6110 DiagnosticEntry {
6111 range: Point::new(4, 9)..Point::new(4, 12),
6112 diagnostic: Diagnostic {
6113 severity: DiagnosticSeverity::ERROR,
6114 message: "undefined variable 'CCC'".to_string(),
6115 is_disk_based: true,
6116 group_id: 2,
6117 is_primary: true,
6118 ..Default::default()
6119 }
6120 }
6121 ]
6122 );
6123 assert_eq!(
6124 chunks_with_diagnostics(buffer, 0..buffer.len()),
6125 [
6126 ("\n\nfn a() { ".to_string(), None),
6127 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6128 (" }\nfn b() { ".to_string(), None),
6129 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6130 (" }\nfn c() { ".to_string(), None),
6131 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6132 (" }\n".to_string(), None),
6133 ]
6134 );
6135 assert_eq!(
6136 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6137 [
6138 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6139 (" }\nfn c() { ".to_string(), None),
6140 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6141 ]
6142 );
6143 });
6144
6145 // Ensure overlapping diagnostics are highlighted correctly.
6146 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6147 lsp::PublishDiagnosticsParams {
6148 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6149 version: Some(open_notification.text_document.version),
6150 diagnostics: vec![
6151 lsp::Diagnostic {
6152 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6153 severity: Some(DiagnosticSeverity::ERROR),
6154 message: "undefined variable 'A'".to_string(),
6155 source: Some("disk".to_string()),
6156 ..Default::default()
6157 },
6158 lsp::Diagnostic {
6159 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6160 severity: Some(DiagnosticSeverity::WARNING),
6161 message: "unreachable statement".to_string(),
6162 source: Some("disk".to_string()),
6163 ..Default::default()
6164 },
6165 ],
6166 },
6167 );
6168
6169 buffer.next_notification(cx).await;
6170 buffer.read_with(cx, |buffer, _| {
6171 assert_eq!(
6172 buffer
6173 .snapshot()
6174 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6175 .collect::<Vec<_>>(),
6176 &[
6177 DiagnosticEntry {
6178 range: Point::new(2, 9)..Point::new(2, 12),
6179 diagnostic: Diagnostic {
6180 severity: DiagnosticSeverity::WARNING,
6181 message: "unreachable statement".to_string(),
6182 is_disk_based: true,
6183 group_id: 1,
6184 is_primary: true,
6185 ..Default::default()
6186 }
6187 },
6188 DiagnosticEntry {
6189 range: Point::new(2, 9)..Point::new(2, 10),
6190 diagnostic: Diagnostic {
6191 severity: DiagnosticSeverity::ERROR,
6192 message: "undefined variable 'A'".to_string(),
6193 is_disk_based: true,
6194 group_id: 0,
6195 is_primary: true,
6196 ..Default::default()
6197 },
6198 }
6199 ]
6200 );
6201 assert_eq!(
6202 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6203 [
6204 ("fn a() { ".to_string(), None),
6205 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6206 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6207 ("\n".to_string(), None),
6208 ]
6209 );
6210 assert_eq!(
6211 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6212 [
6213 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6214 ("\n".to_string(), None),
6215 ]
6216 );
6217 });
6218
6219 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6220 // changes since the last save.
6221 buffer.update(cx, |buffer, cx| {
6222 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6223 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6224 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6225 });
6226 let change_notification_2 = fake_server
6227 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6228 .await;
6229 assert!(
6230 change_notification_2.text_document.version
6231 > change_notification_1.text_document.version
6232 );
6233
6234 // Handle out-of-order diagnostics
6235 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6236 lsp::PublishDiagnosticsParams {
6237 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6238 version: Some(change_notification_2.text_document.version),
6239 diagnostics: vec![
6240 lsp::Diagnostic {
6241 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6242 severity: Some(DiagnosticSeverity::ERROR),
6243 message: "undefined variable 'BB'".to_string(),
6244 source: Some("disk".to_string()),
6245 ..Default::default()
6246 },
6247 lsp::Diagnostic {
6248 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6249 severity: Some(DiagnosticSeverity::WARNING),
6250 message: "undefined variable 'A'".to_string(),
6251 source: Some("disk".to_string()),
6252 ..Default::default()
6253 },
6254 ],
6255 },
6256 );
6257
6258 buffer.next_notification(cx).await;
6259 buffer.read_with(cx, |buffer, _| {
6260 assert_eq!(
6261 buffer
6262 .snapshot()
6263 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6264 .collect::<Vec<_>>(),
6265 &[
6266 DiagnosticEntry {
6267 range: Point::new(2, 21)..Point::new(2, 22),
6268 diagnostic: Diagnostic {
6269 severity: DiagnosticSeverity::WARNING,
6270 message: "undefined variable 'A'".to_string(),
6271 is_disk_based: true,
6272 group_id: 1,
6273 is_primary: true,
6274 ..Default::default()
6275 }
6276 },
6277 DiagnosticEntry {
6278 range: Point::new(3, 9)..Point::new(3, 14),
6279 diagnostic: Diagnostic {
6280 severity: DiagnosticSeverity::ERROR,
6281 message: "undefined variable 'BB'".to_string(),
6282 is_disk_based: true,
6283 group_id: 0,
6284 is_primary: true,
6285 ..Default::default()
6286 },
6287 }
6288 ]
6289 );
6290 });
6291 }
6292
6293 #[gpui::test]
6294 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6295 cx.foreground().forbid_parking();
6296
6297 let text = concat!(
6298 "let one = ;\n", //
6299 "let two = \n",
6300 "let three = 3;\n",
6301 );
6302
6303 let fs = FakeFs::new(cx.background());
6304 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6305
6306 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6307 let buffer = project
6308 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6309 .await
6310 .unwrap();
6311
6312 project.update(cx, |project, cx| {
6313 project
6314 .update_buffer_diagnostics(
6315 &buffer,
6316 vec![
6317 DiagnosticEntry {
6318 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6319 diagnostic: Diagnostic {
6320 severity: DiagnosticSeverity::ERROR,
6321 message: "syntax error 1".to_string(),
6322 ..Default::default()
6323 },
6324 },
6325 DiagnosticEntry {
6326 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6327 diagnostic: Diagnostic {
6328 severity: DiagnosticSeverity::ERROR,
6329 message: "syntax error 2".to_string(),
6330 ..Default::default()
6331 },
6332 },
6333 ],
6334 None,
6335 cx,
6336 )
6337 .unwrap();
6338 });
6339
6340 // An empty range is extended forward to include the following character.
6341 // At the end of a line, an empty range is extended backward to include
6342 // the preceding character.
6343 buffer.read_with(cx, |buffer, _| {
6344 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6345 assert_eq!(
6346 chunks
6347 .iter()
6348 .map(|(s, d)| (s.as_str(), *d))
6349 .collect::<Vec<_>>(),
6350 &[
6351 ("let one = ", None),
6352 (";", Some(DiagnosticSeverity::ERROR)),
6353 ("\nlet two =", None),
6354 (" ", Some(DiagnosticSeverity::ERROR)),
6355 ("\nlet three = 3;\n", None)
6356 ]
6357 );
6358 });
6359 }
6360
6361 #[gpui::test]
6362 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6363 cx.foreground().forbid_parking();
6364
6365 let mut language = Language::new(
6366 LanguageConfig {
6367 name: "Rust".into(),
6368 path_suffixes: vec!["rs".to_string()],
6369 ..Default::default()
6370 },
6371 Some(tree_sitter_rust::language()),
6372 );
6373 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6374
6375 let text = "
6376 fn a() {
6377 f1();
6378 }
6379 fn b() {
6380 f2();
6381 }
6382 fn c() {
6383 f3();
6384 }
6385 "
6386 .unindent();
6387
6388 let fs = FakeFs::new(cx.background());
6389 fs.insert_tree(
6390 "/dir",
6391 json!({
6392 "a.rs": text.clone(),
6393 }),
6394 )
6395 .await;
6396
6397 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6398 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6399 let buffer = project
6400 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6401 .await
6402 .unwrap();
6403
6404 let mut fake_server = fake_servers.next().await.unwrap();
6405 let lsp_document_version = fake_server
6406 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6407 .await
6408 .text_document
6409 .version;
6410
6411 // Simulate editing the buffer after the language server computes some edits.
6412 buffer.update(cx, |buffer, cx| {
6413 buffer.edit(
6414 [(
6415 Point::new(0, 0)..Point::new(0, 0),
6416 "// above first function\n",
6417 )],
6418 cx,
6419 );
6420 buffer.edit(
6421 [(
6422 Point::new(2, 0)..Point::new(2, 0),
6423 " // inside first function\n",
6424 )],
6425 cx,
6426 );
6427 buffer.edit(
6428 [(
6429 Point::new(6, 4)..Point::new(6, 4),
6430 "// inside second function ",
6431 )],
6432 cx,
6433 );
6434
6435 assert_eq!(
6436 buffer.text(),
6437 "
6438 // above first function
6439 fn a() {
6440 // inside first function
6441 f1();
6442 }
6443 fn b() {
6444 // inside second function f2();
6445 }
6446 fn c() {
6447 f3();
6448 }
6449 "
6450 .unindent()
6451 );
6452 });
6453
6454 let edits = project
6455 .update(cx, |project, cx| {
6456 project.edits_from_lsp(
6457 &buffer,
6458 vec![
6459 // replace body of first function
6460 lsp::TextEdit {
6461 range: lsp::Range::new(
6462 lsp::Position::new(0, 0),
6463 lsp::Position::new(3, 0),
6464 ),
6465 new_text: "
6466 fn a() {
6467 f10();
6468 }
6469 "
6470 .unindent(),
6471 },
6472 // edit inside second function
6473 lsp::TextEdit {
6474 range: lsp::Range::new(
6475 lsp::Position::new(4, 6),
6476 lsp::Position::new(4, 6),
6477 ),
6478 new_text: "00".into(),
6479 },
6480 // edit inside third function via two distinct edits
6481 lsp::TextEdit {
6482 range: lsp::Range::new(
6483 lsp::Position::new(7, 5),
6484 lsp::Position::new(7, 5),
6485 ),
6486 new_text: "4000".into(),
6487 },
6488 lsp::TextEdit {
6489 range: lsp::Range::new(
6490 lsp::Position::new(7, 5),
6491 lsp::Position::new(7, 6),
6492 ),
6493 new_text: "".into(),
6494 },
6495 ],
6496 Some(lsp_document_version),
6497 cx,
6498 )
6499 })
6500 .await
6501 .unwrap();
6502
6503 buffer.update(cx, |buffer, cx| {
6504 for (range, new_text) in edits {
6505 buffer.edit([(range, new_text)], cx);
6506 }
6507 assert_eq!(
6508 buffer.text(),
6509 "
6510 // above first function
6511 fn a() {
6512 // inside first function
6513 f10();
6514 }
6515 fn b() {
6516 // inside second function f200();
6517 }
6518 fn c() {
6519 f4000();
6520 }
6521 "
6522 .unindent()
6523 );
6524 });
6525 }
6526
6527 #[gpui::test]
6528 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6529 cx.foreground().forbid_parking();
6530
6531 let text = "
6532 use a::b;
6533 use a::c;
6534
6535 fn f() {
6536 b();
6537 c();
6538 }
6539 "
6540 .unindent();
6541
6542 let fs = FakeFs::new(cx.background());
6543 fs.insert_tree(
6544 "/dir",
6545 json!({
6546 "a.rs": text.clone(),
6547 }),
6548 )
6549 .await;
6550
6551 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6552 let buffer = project
6553 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6554 .await
6555 .unwrap();
6556
6557 // Simulate the language server sending us a small edit in the form of a very large diff.
6558 // Rust-analyzer does this when performing a merge-imports code action.
6559 let edits = project
6560 .update(cx, |project, cx| {
6561 project.edits_from_lsp(
6562 &buffer,
6563 [
6564 // Replace the first use statement without editing the semicolon.
6565 lsp::TextEdit {
6566 range: lsp::Range::new(
6567 lsp::Position::new(0, 4),
6568 lsp::Position::new(0, 8),
6569 ),
6570 new_text: "a::{b, c}".into(),
6571 },
6572 // Reinsert the remainder of the file between the semicolon and the final
6573 // newline of the file.
6574 lsp::TextEdit {
6575 range: lsp::Range::new(
6576 lsp::Position::new(0, 9),
6577 lsp::Position::new(0, 9),
6578 ),
6579 new_text: "\n\n".into(),
6580 },
6581 lsp::TextEdit {
6582 range: lsp::Range::new(
6583 lsp::Position::new(0, 9),
6584 lsp::Position::new(0, 9),
6585 ),
6586 new_text: "
6587 fn f() {
6588 b();
6589 c();
6590 }"
6591 .unindent(),
6592 },
6593 // Delete everything after the first newline of the file.
6594 lsp::TextEdit {
6595 range: lsp::Range::new(
6596 lsp::Position::new(1, 0),
6597 lsp::Position::new(7, 0),
6598 ),
6599 new_text: "".into(),
6600 },
6601 ],
6602 None,
6603 cx,
6604 )
6605 })
6606 .await
6607 .unwrap();
6608
6609 buffer.update(cx, |buffer, cx| {
6610 let edits = edits
6611 .into_iter()
6612 .map(|(range, text)| {
6613 (
6614 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6615 text,
6616 )
6617 })
6618 .collect::<Vec<_>>();
6619
6620 assert_eq!(
6621 edits,
6622 [
6623 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6624 (Point::new(1, 0)..Point::new(2, 0), "".into())
6625 ]
6626 );
6627
6628 for (range, new_text) in edits {
6629 buffer.edit([(range, new_text)], cx);
6630 }
6631 assert_eq!(
6632 buffer.text(),
6633 "
6634 use a::{b, c};
6635
6636 fn f() {
6637 b();
6638 c();
6639 }
6640 "
6641 .unindent()
6642 );
6643 });
6644 }
6645
6646 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6647 buffer: &Buffer,
6648 range: Range<T>,
6649 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6650 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6651 for chunk in buffer.snapshot().chunks(range, true) {
6652 if chunks.last().map_or(false, |prev_chunk| {
6653 prev_chunk.1 == chunk.diagnostic_severity
6654 }) {
6655 chunks.last_mut().unwrap().0.push_str(chunk.text);
6656 } else {
6657 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6658 }
6659 }
6660 chunks
6661 }
6662
6663 #[gpui::test]
6664 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6665 let dir = temp_tree(json!({
6666 "root": {
6667 "dir1": {},
6668 "dir2": {
6669 "dir3": {}
6670 }
6671 }
6672 }));
6673
6674 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6675 let cancel_flag = Default::default();
6676 let results = project
6677 .read_with(cx, |project, cx| {
6678 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6679 })
6680 .await;
6681
6682 assert!(results.is_empty());
6683 }
6684
6685 #[gpui::test(iterations = 10)]
6686 async fn test_definition(cx: &mut gpui::TestAppContext) {
6687 let mut language = Language::new(
6688 LanguageConfig {
6689 name: "Rust".into(),
6690 path_suffixes: vec!["rs".to_string()],
6691 ..Default::default()
6692 },
6693 Some(tree_sitter_rust::language()),
6694 );
6695 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6696
6697 let fs = FakeFs::new(cx.background());
6698 fs.insert_tree(
6699 "/dir",
6700 json!({
6701 "a.rs": "const fn a() { A }",
6702 "b.rs": "const y: i32 = crate::a()",
6703 }),
6704 )
6705 .await;
6706
6707 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
6708 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6709
6710 let buffer = project
6711 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6712 .await
6713 .unwrap();
6714
6715 let fake_server = fake_servers.next().await.unwrap();
6716 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6717 let params = params.text_document_position_params;
6718 assert_eq!(
6719 params.text_document.uri.to_file_path().unwrap(),
6720 Path::new("/dir/b.rs"),
6721 );
6722 assert_eq!(params.position, lsp::Position::new(0, 22));
6723
6724 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6725 lsp::Location::new(
6726 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6727 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6728 ),
6729 )))
6730 });
6731
6732 let mut definitions = project
6733 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6734 .await
6735 .unwrap();
6736
6737 assert_eq!(definitions.len(), 1);
6738 let definition = definitions.pop().unwrap();
6739 cx.update(|cx| {
6740 let target_buffer = definition.buffer.read(cx);
6741 assert_eq!(
6742 target_buffer
6743 .file()
6744 .unwrap()
6745 .as_local()
6746 .unwrap()
6747 .abs_path(cx),
6748 Path::new("/dir/a.rs"),
6749 );
6750 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6751 assert_eq!(
6752 list_worktrees(&project, cx),
6753 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6754 );
6755
6756 drop(definition);
6757 });
6758 cx.read(|cx| {
6759 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6760 });
6761
6762 fn list_worktrees<'a>(
6763 project: &'a ModelHandle<Project>,
6764 cx: &'a AppContext,
6765 ) -> Vec<(&'a Path, bool)> {
6766 project
6767 .read(cx)
6768 .worktrees(cx)
6769 .map(|worktree| {
6770 let worktree = worktree.read(cx);
6771 (
6772 worktree.as_local().unwrap().abs_path().as_ref(),
6773 worktree.is_visible(),
6774 )
6775 })
6776 .collect::<Vec<_>>()
6777 }
6778 }
6779
6780 #[gpui::test]
6781 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6782 let mut language = Language::new(
6783 LanguageConfig {
6784 name: "TypeScript".into(),
6785 path_suffixes: vec!["ts".to_string()],
6786 ..Default::default()
6787 },
6788 Some(tree_sitter_typescript::language_typescript()),
6789 );
6790 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6791
6792 let fs = FakeFs::new(cx.background());
6793 fs.insert_tree(
6794 "/dir",
6795 json!({
6796 "a.ts": "",
6797 }),
6798 )
6799 .await;
6800
6801 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6802 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6803 let buffer = project
6804 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6805 .await
6806 .unwrap();
6807
6808 let fake_server = fake_language_servers.next().await.unwrap();
6809
6810 let text = "let a = b.fqn";
6811 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6812 let completions = project.update(cx, |project, cx| {
6813 project.completions(&buffer, text.len(), cx)
6814 });
6815
6816 fake_server
6817 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6818 Ok(Some(lsp::CompletionResponse::Array(vec![
6819 lsp::CompletionItem {
6820 label: "fullyQualifiedName?".into(),
6821 insert_text: Some("fullyQualifiedName".into()),
6822 ..Default::default()
6823 },
6824 ])))
6825 })
6826 .next()
6827 .await;
6828 let completions = completions.await.unwrap();
6829 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6830 assert_eq!(completions.len(), 1);
6831 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6832 assert_eq!(
6833 completions[0].old_range.to_offset(&snapshot),
6834 text.len() - 3..text.len()
6835 );
6836 }
6837
6838 #[gpui::test(iterations = 10)]
6839 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6840 let mut language = Language::new(
6841 LanguageConfig {
6842 name: "TypeScript".into(),
6843 path_suffixes: vec!["ts".to_string()],
6844 ..Default::default()
6845 },
6846 None,
6847 );
6848 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6849
6850 let fs = FakeFs::new(cx.background());
6851 fs.insert_tree(
6852 "/dir",
6853 json!({
6854 "a.ts": "a",
6855 }),
6856 )
6857 .await;
6858
6859 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6860 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6861 let buffer = project
6862 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6863 .await
6864 .unwrap();
6865
6866 let fake_server = fake_language_servers.next().await.unwrap();
6867
6868 // Language server returns code actions that contain commands, and not edits.
6869 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6870 fake_server
6871 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6872 Ok(Some(vec![
6873 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6874 title: "The code action".into(),
6875 command: Some(lsp::Command {
6876 title: "The command".into(),
6877 command: "_the/command".into(),
6878 arguments: Some(vec![json!("the-argument")]),
6879 }),
6880 ..Default::default()
6881 }),
6882 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6883 title: "two".into(),
6884 ..Default::default()
6885 }),
6886 ]))
6887 })
6888 .next()
6889 .await;
6890
6891 let action = actions.await.unwrap()[0].clone();
6892 let apply = project.update(cx, |project, cx| {
6893 project.apply_code_action(buffer.clone(), action, true, cx)
6894 });
6895
6896 // Resolving the code action does not populate its edits. In absence of
6897 // edits, we must execute the given command.
6898 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6899 |action, _| async move { Ok(action) },
6900 );
6901
6902 // While executing the command, the language server sends the editor
6903 // a `workspaceEdit` request.
6904 fake_server
6905 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6906 let fake = fake_server.clone();
6907 move |params, _| {
6908 assert_eq!(params.command, "_the/command");
6909 let fake = fake.clone();
6910 async move {
6911 fake.server
6912 .request::<lsp::request::ApplyWorkspaceEdit>(
6913 lsp::ApplyWorkspaceEditParams {
6914 label: None,
6915 edit: lsp::WorkspaceEdit {
6916 changes: Some(
6917 [(
6918 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6919 vec![lsp::TextEdit {
6920 range: lsp::Range::new(
6921 lsp::Position::new(0, 0),
6922 lsp::Position::new(0, 0),
6923 ),
6924 new_text: "X".into(),
6925 }],
6926 )]
6927 .into_iter()
6928 .collect(),
6929 ),
6930 ..Default::default()
6931 },
6932 },
6933 )
6934 .await
6935 .unwrap();
6936 Ok(Some(json!(null)))
6937 }
6938 }
6939 })
6940 .next()
6941 .await;
6942
6943 // Applying the code action returns a project transaction containing the edits
6944 // sent by the language server in its `workspaceEdit` request.
6945 let transaction = apply.await.unwrap();
6946 assert!(transaction.0.contains_key(&buffer));
6947 buffer.update(cx, |buffer, cx| {
6948 assert_eq!(buffer.text(), "Xa");
6949 buffer.undo(cx);
6950 assert_eq!(buffer.text(), "a");
6951 });
6952 }
6953
6954 #[gpui::test]
6955 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6956 let fs = FakeFs::new(cx.background());
6957 fs.insert_tree(
6958 "/dir",
6959 json!({
6960 "file1": "the old contents",
6961 }),
6962 )
6963 .await;
6964
6965 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6966 let buffer = project
6967 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6968 .await
6969 .unwrap();
6970 buffer
6971 .update(cx, |buffer, cx| {
6972 assert_eq!(buffer.text(), "the old contents");
6973 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6974 buffer.save(cx)
6975 })
6976 .await
6977 .unwrap();
6978
6979 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6980 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6981 }
6982
6983 #[gpui::test]
6984 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6985 let fs = FakeFs::new(cx.background());
6986 fs.insert_tree(
6987 "/dir",
6988 json!({
6989 "file1": "the old contents",
6990 }),
6991 )
6992 .await;
6993
6994 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
6995 let buffer = project
6996 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6997 .await
6998 .unwrap();
6999 buffer
7000 .update(cx, |buffer, cx| {
7001 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7002 buffer.save(cx)
7003 })
7004 .await
7005 .unwrap();
7006
7007 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7008 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7009 }
7010
7011 #[gpui::test]
7012 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7013 let fs = FakeFs::new(cx.background());
7014 fs.insert_tree("/dir", json!({})).await;
7015
7016 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7017 let buffer = project.update(cx, |project, cx| {
7018 project.create_buffer("", None, cx).unwrap()
7019 });
7020 buffer.update(cx, |buffer, cx| {
7021 buffer.edit([(0..0, "abc")], cx);
7022 assert!(buffer.is_dirty());
7023 assert!(!buffer.has_conflict());
7024 });
7025 project
7026 .update(cx, |project, cx| {
7027 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7028 })
7029 .await
7030 .unwrap();
7031 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7032 buffer.read_with(cx, |buffer, cx| {
7033 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7034 assert!(!buffer.is_dirty());
7035 assert!(!buffer.has_conflict());
7036 });
7037
7038 let opened_buffer = project
7039 .update(cx, |project, cx| {
7040 project.open_local_buffer("/dir/file1", cx)
7041 })
7042 .await
7043 .unwrap();
7044 assert_eq!(opened_buffer, buffer);
7045 }
7046
7047 #[gpui::test(retries = 5)]
7048 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7049 let dir = temp_tree(json!({
7050 "a": {
7051 "file1": "",
7052 "file2": "",
7053 "file3": "",
7054 },
7055 "b": {
7056 "c": {
7057 "file4": "",
7058 "file5": "",
7059 }
7060 }
7061 }));
7062
7063 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7064 let rpc = project.read_with(cx, |p, _| p.client.clone());
7065
7066 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7067 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7068 async move { buffer.await.unwrap() }
7069 };
7070 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7071 project.read_with(cx, |project, cx| {
7072 let tree = project.worktrees(cx).next().unwrap();
7073 tree.read(cx)
7074 .entry_for_path(path)
7075 .expect(&format!("no entry for path {}", path))
7076 .id
7077 })
7078 };
7079
7080 let buffer2 = buffer_for_path("a/file2", cx).await;
7081 let buffer3 = buffer_for_path("a/file3", cx).await;
7082 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7083 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7084
7085 let file2_id = id_for_path("a/file2", &cx);
7086 let file3_id = id_for_path("a/file3", &cx);
7087 let file4_id = id_for_path("b/c/file4", &cx);
7088
7089 // Create a remote copy of this worktree.
7090 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7091 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7092 let (remote, load_task) = cx.update(|cx| {
7093 Worktree::remote(
7094 1,
7095 1,
7096 initial_snapshot.to_proto(&Default::default(), true),
7097 rpc.clone(),
7098 cx,
7099 )
7100 });
7101 // tree
7102 load_task.await;
7103
7104 cx.read(|cx| {
7105 assert!(!buffer2.read(cx).is_dirty());
7106 assert!(!buffer3.read(cx).is_dirty());
7107 assert!(!buffer4.read(cx).is_dirty());
7108 assert!(!buffer5.read(cx).is_dirty());
7109 });
7110
7111 // Rename and delete files and directories.
7112 tree.flush_fs_events(&cx).await;
7113 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7114 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7115 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7116 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7117 tree.flush_fs_events(&cx).await;
7118
7119 let expected_paths = vec![
7120 "a",
7121 "a/file1",
7122 "a/file2.new",
7123 "b",
7124 "d",
7125 "d/file3",
7126 "d/file4",
7127 ];
7128
7129 cx.read(|app| {
7130 assert_eq!(
7131 tree.read(app)
7132 .paths()
7133 .map(|p| p.to_str().unwrap())
7134 .collect::<Vec<_>>(),
7135 expected_paths
7136 );
7137
7138 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7139 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7140 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7141
7142 assert_eq!(
7143 buffer2.read(app).file().unwrap().path().as_ref(),
7144 Path::new("a/file2.new")
7145 );
7146 assert_eq!(
7147 buffer3.read(app).file().unwrap().path().as_ref(),
7148 Path::new("d/file3")
7149 );
7150 assert_eq!(
7151 buffer4.read(app).file().unwrap().path().as_ref(),
7152 Path::new("d/file4")
7153 );
7154 assert_eq!(
7155 buffer5.read(app).file().unwrap().path().as_ref(),
7156 Path::new("b/c/file5")
7157 );
7158
7159 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7160 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7161 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7162 assert!(buffer5.read(app).file().unwrap().is_deleted());
7163 });
7164
7165 // Update the remote worktree. Check that it becomes consistent with the
7166 // local worktree.
7167 remote.update(cx, |remote, cx| {
7168 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7169 &initial_snapshot,
7170 1,
7171 1,
7172 true,
7173 );
7174 remote
7175 .as_remote_mut()
7176 .unwrap()
7177 .snapshot
7178 .apply_remote_update(update_message)
7179 .unwrap();
7180
7181 assert_eq!(
7182 remote
7183 .paths()
7184 .map(|p| p.to_str().unwrap())
7185 .collect::<Vec<_>>(),
7186 expected_paths
7187 );
7188 });
7189 }
7190
7191 #[gpui::test]
7192 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7193 let fs = FakeFs::new(cx.background());
7194 fs.insert_tree(
7195 "/dir",
7196 json!({
7197 "a.txt": "a-contents",
7198 "b.txt": "b-contents",
7199 }),
7200 )
7201 .await;
7202
7203 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7204
7205 // Spawn multiple tasks to open paths, repeating some paths.
7206 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7207 (
7208 p.open_local_buffer("/dir/a.txt", cx),
7209 p.open_local_buffer("/dir/b.txt", cx),
7210 p.open_local_buffer("/dir/a.txt", cx),
7211 )
7212 });
7213
7214 let buffer_a_1 = buffer_a_1.await.unwrap();
7215 let buffer_a_2 = buffer_a_2.await.unwrap();
7216 let buffer_b = buffer_b.await.unwrap();
7217 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7218 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7219
7220 // There is only one buffer per path.
7221 let buffer_a_id = buffer_a_1.id();
7222 assert_eq!(buffer_a_2.id(), buffer_a_id);
7223
7224 // Open the same path again while it is still open.
7225 drop(buffer_a_1);
7226 let buffer_a_3 = project
7227 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7228 .await
7229 .unwrap();
7230
7231 // There's still only one buffer per path.
7232 assert_eq!(buffer_a_3.id(), buffer_a_id);
7233 }
7234
7235 #[gpui::test]
7236 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7237 let fs = FakeFs::new(cx.background());
7238 fs.insert_tree(
7239 "/dir",
7240 json!({
7241 "file1": "abc",
7242 "file2": "def",
7243 "file3": "ghi",
7244 }),
7245 )
7246 .await;
7247
7248 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7249
7250 let buffer1 = project
7251 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7252 .await
7253 .unwrap();
7254 let events = Rc::new(RefCell::new(Vec::new()));
7255
7256 // initially, the buffer isn't dirty.
7257 buffer1.update(cx, |buffer, cx| {
7258 cx.subscribe(&buffer1, {
7259 let events = events.clone();
7260 move |_, _, event, _| match event {
7261 BufferEvent::Operation(_) => {}
7262 _ => events.borrow_mut().push(event.clone()),
7263 }
7264 })
7265 .detach();
7266
7267 assert!(!buffer.is_dirty());
7268 assert!(events.borrow().is_empty());
7269
7270 buffer.edit([(1..2, "")], cx);
7271 });
7272
7273 // after the first edit, the buffer is dirty, and emits a dirtied event.
7274 buffer1.update(cx, |buffer, cx| {
7275 assert!(buffer.text() == "ac");
7276 assert!(buffer.is_dirty());
7277 assert_eq!(
7278 *events.borrow(),
7279 &[language::Event::Edited, language::Event::Dirtied]
7280 );
7281 events.borrow_mut().clear();
7282 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7283 });
7284
7285 // after saving, the buffer is not dirty, and emits a saved event.
7286 buffer1.update(cx, |buffer, cx| {
7287 assert!(!buffer.is_dirty());
7288 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7289 events.borrow_mut().clear();
7290
7291 buffer.edit([(1..1, "B")], cx);
7292 buffer.edit([(2..2, "D")], cx);
7293 });
7294
7295 // after editing again, the buffer is dirty, and emits another dirty event.
7296 buffer1.update(cx, |buffer, cx| {
7297 assert!(buffer.text() == "aBDc");
7298 assert!(buffer.is_dirty());
7299 assert_eq!(
7300 *events.borrow(),
7301 &[
7302 language::Event::Edited,
7303 language::Event::Dirtied,
7304 language::Event::Edited,
7305 ],
7306 );
7307 events.borrow_mut().clear();
7308
7309 // TODO - currently, after restoring the buffer to its
7310 // previously-saved state, the is still considered dirty.
7311 buffer.edit([(1..3, "")], cx);
7312 assert!(buffer.text() == "ac");
7313 assert!(buffer.is_dirty());
7314 });
7315
7316 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7317
7318 // When a file is deleted, the buffer is considered dirty.
7319 let events = Rc::new(RefCell::new(Vec::new()));
7320 let buffer2 = project
7321 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7322 .await
7323 .unwrap();
7324 buffer2.update(cx, |_, cx| {
7325 cx.subscribe(&buffer2, {
7326 let events = events.clone();
7327 move |_, _, event, _| events.borrow_mut().push(event.clone())
7328 })
7329 .detach();
7330 });
7331
7332 fs.remove_file("/dir/file2".as_ref(), Default::default())
7333 .await
7334 .unwrap();
7335 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7336 assert_eq!(
7337 *events.borrow(),
7338 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7339 );
7340
7341 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7342 let events = Rc::new(RefCell::new(Vec::new()));
7343 let buffer3 = project
7344 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7345 .await
7346 .unwrap();
7347 buffer3.update(cx, |_, cx| {
7348 cx.subscribe(&buffer3, {
7349 let events = events.clone();
7350 move |_, _, event, _| events.borrow_mut().push(event.clone())
7351 })
7352 .detach();
7353 });
7354
7355 buffer3.update(cx, |buffer, cx| {
7356 buffer.edit([(0..0, "x")], cx);
7357 });
7358 events.borrow_mut().clear();
7359 fs.remove_file("/dir/file3".as_ref(), Default::default())
7360 .await
7361 .unwrap();
7362 buffer3
7363 .condition(&cx, |_, _| !events.borrow().is_empty())
7364 .await;
7365 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7366 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7367 }
7368
7369 #[gpui::test]
7370 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7371 let initial_contents = "aaa\nbbbbb\nc\n";
7372 let fs = FakeFs::new(cx.background());
7373 fs.insert_tree(
7374 "/dir",
7375 json!({
7376 "the-file": initial_contents,
7377 }),
7378 )
7379 .await;
7380 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7381 let buffer = project
7382 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7383 .await
7384 .unwrap();
7385
7386 let anchors = (0..3)
7387 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7388 .collect::<Vec<_>>();
7389
7390 // Change the file on disk, adding two new lines of text, and removing
7391 // one line.
7392 buffer.read_with(cx, |buffer, _| {
7393 assert!(!buffer.is_dirty());
7394 assert!(!buffer.has_conflict());
7395 });
7396 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7397 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7398 .await
7399 .unwrap();
7400
7401 // Because the buffer was not modified, it is reloaded from disk. Its
7402 // contents are edited according to the diff between the old and new
7403 // file contents.
7404 buffer
7405 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7406 .await;
7407
7408 buffer.update(cx, |buffer, _| {
7409 assert_eq!(buffer.text(), new_contents);
7410 assert!(!buffer.is_dirty());
7411 assert!(!buffer.has_conflict());
7412
7413 let anchor_positions = anchors
7414 .iter()
7415 .map(|anchor| anchor.to_point(&*buffer))
7416 .collect::<Vec<_>>();
7417 assert_eq!(
7418 anchor_positions,
7419 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7420 );
7421 });
7422
7423 // Modify the buffer
7424 buffer.update(cx, |buffer, cx| {
7425 buffer.edit([(0..0, " ")], cx);
7426 assert!(buffer.is_dirty());
7427 assert!(!buffer.has_conflict());
7428 });
7429
7430 // Change the file on disk again, adding blank lines to the beginning.
7431 fs.save(
7432 "/dir/the-file".as_ref(),
7433 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7434 )
7435 .await
7436 .unwrap();
7437
7438 // Because the buffer is modified, it doesn't reload from disk, but is
7439 // marked as having a conflict.
7440 buffer
7441 .condition(&cx, |buffer, _| buffer.has_conflict())
7442 .await;
7443 }
7444
7445 #[gpui::test]
7446 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7447 cx.foreground().forbid_parking();
7448
7449 let fs = FakeFs::new(cx.background());
7450 fs.insert_tree(
7451 "/the-dir",
7452 json!({
7453 "a.rs": "
7454 fn foo(mut v: Vec<usize>) {
7455 for x in &v {
7456 v.push(1);
7457 }
7458 }
7459 "
7460 .unindent(),
7461 }),
7462 )
7463 .await;
7464
7465 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7466 let buffer = project
7467 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7468 .await
7469 .unwrap();
7470
7471 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7472 let message = lsp::PublishDiagnosticsParams {
7473 uri: buffer_uri.clone(),
7474 diagnostics: vec![
7475 lsp::Diagnostic {
7476 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7477 severity: Some(DiagnosticSeverity::WARNING),
7478 message: "error 1".to_string(),
7479 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7480 location: lsp::Location {
7481 uri: buffer_uri.clone(),
7482 range: lsp::Range::new(
7483 lsp::Position::new(1, 8),
7484 lsp::Position::new(1, 9),
7485 ),
7486 },
7487 message: "error 1 hint 1".to_string(),
7488 }]),
7489 ..Default::default()
7490 },
7491 lsp::Diagnostic {
7492 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7493 severity: Some(DiagnosticSeverity::HINT),
7494 message: "error 1 hint 1".to_string(),
7495 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7496 location: lsp::Location {
7497 uri: buffer_uri.clone(),
7498 range: lsp::Range::new(
7499 lsp::Position::new(1, 8),
7500 lsp::Position::new(1, 9),
7501 ),
7502 },
7503 message: "original diagnostic".to_string(),
7504 }]),
7505 ..Default::default()
7506 },
7507 lsp::Diagnostic {
7508 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7509 severity: Some(DiagnosticSeverity::ERROR),
7510 message: "error 2".to_string(),
7511 related_information: Some(vec![
7512 lsp::DiagnosticRelatedInformation {
7513 location: lsp::Location {
7514 uri: buffer_uri.clone(),
7515 range: lsp::Range::new(
7516 lsp::Position::new(1, 13),
7517 lsp::Position::new(1, 15),
7518 ),
7519 },
7520 message: "error 2 hint 1".to_string(),
7521 },
7522 lsp::DiagnosticRelatedInformation {
7523 location: lsp::Location {
7524 uri: buffer_uri.clone(),
7525 range: lsp::Range::new(
7526 lsp::Position::new(1, 13),
7527 lsp::Position::new(1, 15),
7528 ),
7529 },
7530 message: "error 2 hint 2".to_string(),
7531 },
7532 ]),
7533 ..Default::default()
7534 },
7535 lsp::Diagnostic {
7536 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7537 severity: Some(DiagnosticSeverity::HINT),
7538 message: "error 2 hint 1".to_string(),
7539 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7540 location: lsp::Location {
7541 uri: buffer_uri.clone(),
7542 range: lsp::Range::new(
7543 lsp::Position::new(2, 8),
7544 lsp::Position::new(2, 17),
7545 ),
7546 },
7547 message: "original diagnostic".to_string(),
7548 }]),
7549 ..Default::default()
7550 },
7551 lsp::Diagnostic {
7552 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7553 severity: Some(DiagnosticSeverity::HINT),
7554 message: "error 2 hint 2".to_string(),
7555 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7556 location: lsp::Location {
7557 uri: buffer_uri.clone(),
7558 range: lsp::Range::new(
7559 lsp::Position::new(2, 8),
7560 lsp::Position::new(2, 17),
7561 ),
7562 },
7563 message: "original diagnostic".to_string(),
7564 }]),
7565 ..Default::default()
7566 },
7567 ],
7568 version: None,
7569 };
7570
7571 project
7572 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7573 .unwrap();
7574 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7575
7576 assert_eq!(
7577 buffer
7578 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7579 .collect::<Vec<_>>(),
7580 &[
7581 DiagnosticEntry {
7582 range: Point::new(1, 8)..Point::new(1, 9),
7583 diagnostic: Diagnostic {
7584 severity: DiagnosticSeverity::WARNING,
7585 message: "error 1".to_string(),
7586 group_id: 0,
7587 is_primary: true,
7588 ..Default::default()
7589 }
7590 },
7591 DiagnosticEntry {
7592 range: Point::new(1, 8)..Point::new(1, 9),
7593 diagnostic: Diagnostic {
7594 severity: DiagnosticSeverity::HINT,
7595 message: "error 1 hint 1".to_string(),
7596 group_id: 0,
7597 is_primary: false,
7598 ..Default::default()
7599 }
7600 },
7601 DiagnosticEntry {
7602 range: Point::new(1, 13)..Point::new(1, 15),
7603 diagnostic: Diagnostic {
7604 severity: DiagnosticSeverity::HINT,
7605 message: "error 2 hint 1".to_string(),
7606 group_id: 1,
7607 is_primary: false,
7608 ..Default::default()
7609 }
7610 },
7611 DiagnosticEntry {
7612 range: Point::new(1, 13)..Point::new(1, 15),
7613 diagnostic: Diagnostic {
7614 severity: DiagnosticSeverity::HINT,
7615 message: "error 2 hint 2".to_string(),
7616 group_id: 1,
7617 is_primary: false,
7618 ..Default::default()
7619 }
7620 },
7621 DiagnosticEntry {
7622 range: Point::new(2, 8)..Point::new(2, 17),
7623 diagnostic: Diagnostic {
7624 severity: DiagnosticSeverity::ERROR,
7625 message: "error 2".to_string(),
7626 group_id: 1,
7627 is_primary: true,
7628 ..Default::default()
7629 }
7630 }
7631 ]
7632 );
7633
7634 assert_eq!(
7635 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7636 &[
7637 DiagnosticEntry {
7638 range: Point::new(1, 8)..Point::new(1, 9),
7639 diagnostic: Diagnostic {
7640 severity: DiagnosticSeverity::WARNING,
7641 message: "error 1".to_string(),
7642 group_id: 0,
7643 is_primary: true,
7644 ..Default::default()
7645 }
7646 },
7647 DiagnosticEntry {
7648 range: Point::new(1, 8)..Point::new(1, 9),
7649 diagnostic: Diagnostic {
7650 severity: DiagnosticSeverity::HINT,
7651 message: "error 1 hint 1".to_string(),
7652 group_id: 0,
7653 is_primary: false,
7654 ..Default::default()
7655 }
7656 },
7657 ]
7658 );
7659 assert_eq!(
7660 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7661 &[
7662 DiagnosticEntry {
7663 range: Point::new(1, 13)..Point::new(1, 15),
7664 diagnostic: Diagnostic {
7665 severity: DiagnosticSeverity::HINT,
7666 message: "error 2 hint 1".to_string(),
7667 group_id: 1,
7668 is_primary: false,
7669 ..Default::default()
7670 }
7671 },
7672 DiagnosticEntry {
7673 range: Point::new(1, 13)..Point::new(1, 15),
7674 diagnostic: Diagnostic {
7675 severity: DiagnosticSeverity::HINT,
7676 message: "error 2 hint 2".to_string(),
7677 group_id: 1,
7678 is_primary: false,
7679 ..Default::default()
7680 }
7681 },
7682 DiagnosticEntry {
7683 range: Point::new(2, 8)..Point::new(2, 17),
7684 diagnostic: Diagnostic {
7685 severity: DiagnosticSeverity::ERROR,
7686 message: "error 2".to_string(),
7687 group_id: 1,
7688 is_primary: true,
7689 ..Default::default()
7690 }
7691 }
7692 ]
7693 );
7694 }
7695
7696 #[gpui::test]
7697 async fn test_rename(cx: &mut gpui::TestAppContext) {
7698 cx.foreground().forbid_parking();
7699
7700 let mut language = Language::new(
7701 LanguageConfig {
7702 name: "Rust".into(),
7703 path_suffixes: vec!["rs".to_string()],
7704 ..Default::default()
7705 },
7706 Some(tree_sitter_rust::language()),
7707 );
7708 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7709 capabilities: lsp::ServerCapabilities {
7710 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7711 prepare_provider: Some(true),
7712 work_done_progress_options: Default::default(),
7713 })),
7714 ..Default::default()
7715 },
7716 ..Default::default()
7717 });
7718
7719 let fs = FakeFs::new(cx.background());
7720 fs.insert_tree(
7721 "/dir",
7722 json!({
7723 "one.rs": "const ONE: usize = 1;",
7724 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7725 }),
7726 )
7727 .await;
7728
7729 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7730 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7731 let buffer = project
7732 .update(cx, |project, cx| {
7733 project.open_local_buffer("/dir/one.rs", cx)
7734 })
7735 .await
7736 .unwrap();
7737
7738 let fake_server = fake_servers.next().await.unwrap();
7739
7740 let response = project.update(cx, |project, cx| {
7741 project.prepare_rename(buffer.clone(), 7, cx)
7742 });
7743 fake_server
7744 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7745 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7746 assert_eq!(params.position, lsp::Position::new(0, 7));
7747 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7748 lsp::Position::new(0, 6),
7749 lsp::Position::new(0, 9),
7750 ))))
7751 })
7752 .next()
7753 .await
7754 .unwrap();
7755 let range = response.await.unwrap().unwrap();
7756 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7757 assert_eq!(range, 6..9);
7758
7759 let response = project.update(cx, |project, cx| {
7760 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7761 });
7762 fake_server
7763 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7764 assert_eq!(
7765 params.text_document_position.text_document.uri.as_str(),
7766 "file:///dir/one.rs"
7767 );
7768 assert_eq!(
7769 params.text_document_position.position,
7770 lsp::Position::new(0, 7)
7771 );
7772 assert_eq!(params.new_name, "THREE");
7773 Ok(Some(lsp::WorkspaceEdit {
7774 changes: Some(
7775 [
7776 (
7777 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7778 vec![lsp::TextEdit::new(
7779 lsp::Range::new(
7780 lsp::Position::new(0, 6),
7781 lsp::Position::new(0, 9),
7782 ),
7783 "THREE".to_string(),
7784 )],
7785 ),
7786 (
7787 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7788 vec![
7789 lsp::TextEdit::new(
7790 lsp::Range::new(
7791 lsp::Position::new(0, 24),
7792 lsp::Position::new(0, 27),
7793 ),
7794 "THREE".to_string(),
7795 ),
7796 lsp::TextEdit::new(
7797 lsp::Range::new(
7798 lsp::Position::new(0, 35),
7799 lsp::Position::new(0, 38),
7800 ),
7801 "THREE".to_string(),
7802 ),
7803 ],
7804 ),
7805 ]
7806 .into_iter()
7807 .collect(),
7808 ),
7809 ..Default::default()
7810 }))
7811 })
7812 .next()
7813 .await
7814 .unwrap();
7815 let mut transaction = response.await.unwrap().0;
7816 assert_eq!(transaction.len(), 2);
7817 assert_eq!(
7818 transaction
7819 .remove_entry(&buffer)
7820 .unwrap()
7821 .0
7822 .read_with(cx, |buffer, _| buffer.text()),
7823 "const THREE: usize = 1;"
7824 );
7825 assert_eq!(
7826 transaction
7827 .into_keys()
7828 .next()
7829 .unwrap()
7830 .read_with(cx, |buffer, _| buffer.text()),
7831 "const TWO: usize = one::THREE + one::THREE;"
7832 );
7833 }
7834
7835 #[gpui::test]
7836 async fn test_search(cx: &mut gpui::TestAppContext) {
7837 let fs = FakeFs::new(cx.background());
7838 fs.insert_tree(
7839 "/dir",
7840 json!({
7841 "one.rs": "const ONE: usize = 1;",
7842 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7843 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7844 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7845 }),
7846 )
7847 .await;
7848 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7849 assert_eq!(
7850 search(&project, SearchQuery::text("TWO", false, true), cx)
7851 .await
7852 .unwrap(),
7853 HashMap::from_iter([
7854 ("two.rs".to_string(), vec![6..9]),
7855 ("three.rs".to_string(), vec![37..40])
7856 ])
7857 );
7858
7859 let buffer_4 = project
7860 .update(cx, |project, cx| {
7861 project.open_local_buffer("/dir/four.rs", cx)
7862 })
7863 .await
7864 .unwrap();
7865 buffer_4.update(cx, |buffer, cx| {
7866 let text = "two::TWO";
7867 buffer.edit([(20..28, text), (31..43, text)], cx);
7868 });
7869
7870 assert_eq!(
7871 search(&project, SearchQuery::text("TWO", false, true), cx)
7872 .await
7873 .unwrap(),
7874 HashMap::from_iter([
7875 ("two.rs".to_string(), vec![6..9]),
7876 ("three.rs".to_string(), vec![37..40]),
7877 ("four.rs".to_string(), vec![25..28, 36..39])
7878 ])
7879 );
7880
7881 async fn search(
7882 project: &ModelHandle<Project>,
7883 query: SearchQuery,
7884 cx: &mut gpui::TestAppContext,
7885 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7886 let results = project
7887 .update(cx, |project, cx| project.search(query, cx))
7888 .await?;
7889
7890 Ok(results
7891 .into_iter()
7892 .map(|(buffer, ranges)| {
7893 buffer.read_with(cx, |buffer, _| {
7894 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7895 let ranges = ranges
7896 .into_iter()
7897 .map(|range| range.to_offset(buffer))
7898 .collect::<Vec<_>>();
7899 (path, ranges)
7900 })
7901 })
7902 .collect())
7903 }
7904 }
7905}