1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use thiserror::Error;
53use util::{post_inc, ResultExt, TryFutureExt as _};
54
55pub use fs::*;
56pub use worktree::*;
57
58pub trait Item: Entity {
59 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
60}
61
62pub struct Project {
63 worktrees: Vec<WorktreeHandle>,
64 active_entry: Option<ProjectEntryId>,
65 languages: Arc<LanguageRegistry>,
66 language_servers:
67 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
68 started_language_servers:
69 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
70 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
71 language_server_settings: Arc<Mutex<serde_json::Value>>,
72 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
73 next_language_server_id: usize,
74 client: Arc<client::Client>,
75 next_entry_id: Arc<AtomicUsize>,
76 user_store: ModelHandle<UserStore>,
77 fs: Arc<dyn Fs>,
78 client_state: ProjectClientState,
79 collaborators: HashMap<PeerId, Collaborator>,
80 subscriptions: Vec<client::Subscription>,
81 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
82 shared_buffers: HashMap<PeerId, HashSet<u64>>,
83 loading_buffers: HashMap<
84 ProjectPath,
85 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
86 >,
87 loading_local_worktrees:
88 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
89 opened_buffers: HashMap<u64, OpenBuffer>,
90 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
91 nonce: u128,
92}
93
94#[derive(Error, Debug)]
95pub enum JoinProjectError {
96 #[error("host declined join request")]
97 HostDeclined,
98 #[error("host closed the project")]
99 HostClosedProject,
100 #[error("host went offline")]
101 HostWentOffline,
102 #[error("{0}")]
103 Other(#[from] anyhow::Error),
104}
105
106enum OpenBuffer {
107 Strong(ModelHandle<Buffer>),
108 Weak(WeakModelHandle<Buffer>),
109 Loading(Vec<Operation>),
110}
111
112enum WorktreeHandle {
113 Strong(ModelHandle<Worktree>),
114 Weak(WeakModelHandle<Worktree>),
115}
116
117enum ProjectClientState {
118 Local {
119 is_shared: bool,
120 remote_id_tx: watch::Sender<Option<u64>>,
121 remote_id_rx: watch::Receiver<Option<u64>>,
122 _maintain_remote_id_task: Task<Option<()>>,
123 },
124 Remote {
125 sharing_has_stopped: bool,
126 remote_id: u64,
127 replica_id: ReplicaId,
128 _detect_unshare_task: Task<Option<()>>,
129 },
130}
131
132#[derive(Clone, Debug)]
133pub struct Collaborator {
134 pub user: Arc<User>,
135 pub peer_id: PeerId,
136 pub replica_id: ReplicaId,
137}
138
139#[derive(Clone, Debug, PartialEq, Eq)]
140pub enum Event {
141 ActiveEntryChanged(Option<ProjectEntryId>),
142 WorktreeRemoved(WorktreeId),
143 DiskBasedDiagnosticsStarted,
144 DiskBasedDiagnosticsUpdated,
145 DiskBasedDiagnosticsFinished,
146 DiagnosticsUpdated(ProjectPath),
147 RemoteIdChanged(Option<u64>),
148 CollaboratorLeft(PeerId),
149 ContactRequestedJoin(Arc<User>),
150 ContactCancelledJoinRequest(Arc<User>),
151}
152
153#[derive(Serialize)]
154pub struct LanguageServerStatus {
155 pub name: String,
156 pub pending_work: BTreeMap<String, LanguageServerProgress>,
157 pub pending_diagnostic_updates: isize,
158}
159
160#[derive(Clone, Debug, Serialize)]
161pub struct LanguageServerProgress {
162 pub message: Option<String>,
163 pub percentage: Option<usize>,
164 #[serde(skip_serializing)]
165 pub last_update_at: Instant,
166}
167
168#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
169pub struct ProjectPath {
170 pub worktree_id: WorktreeId,
171 pub path: Arc<Path>,
172}
173
174#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
175pub struct DiagnosticSummary {
176 pub error_count: usize,
177 pub warning_count: usize,
178}
179
180#[derive(Debug)]
181pub struct Location {
182 pub buffer: ModelHandle<Buffer>,
183 pub range: Range<language::Anchor>,
184}
185
186#[derive(Debug)]
187pub struct DocumentHighlight {
188 pub range: Range<language::Anchor>,
189 pub kind: DocumentHighlightKind,
190}
191
192#[derive(Clone, Debug)]
193pub struct Symbol {
194 pub source_worktree_id: WorktreeId,
195 pub worktree_id: WorktreeId,
196 pub language_server_name: LanguageServerName,
197 pub path: PathBuf,
198 pub label: CodeLabel,
199 pub name: String,
200 pub kind: lsp::SymbolKind,
201 pub range: Range<PointUtf16>,
202 pub signature: [u8; 32],
203}
204
205#[derive(Default)]
206pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
207
208impl DiagnosticSummary {
209 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
210 let mut this = Self {
211 error_count: 0,
212 warning_count: 0,
213 };
214
215 for entry in diagnostics {
216 if entry.diagnostic.is_primary {
217 match entry.diagnostic.severity {
218 DiagnosticSeverity::ERROR => this.error_count += 1,
219 DiagnosticSeverity::WARNING => this.warning_count += 1,
220 _ => {}
221 }
222 }
223 }
224
225 this
226 }
227
228 pub fn is_empty(&self) -> bool {
229 self.error_count == 0 && self.warning_count == 0
230 }
231
232 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
233 proto::DiagnosticSummary {
234 path: path.to_string_lossy().to_string(),
235 error_count: self.error_count as u32,
236 warning_count: self.warning_count as u32,
237 }
238 }
239}
240
241#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
242pub struct ProjectEntryId(usize);
243
244impl ProjectEntryId {
245 pub const MAX: Self = Self(usize::MAX);
246
247 pub fn new(counter: &AtomicUsize) -> Self {
248 Self(counter.fetch_add(1, SeqCst))
249 }
250
251 pub fn from_proto(id: u64) -> Self {
252 Self(id as usize)
253 }
254
255 pub fn to_proto(&self) -> u64 {
256 self.0 as u64
257 }
258
259 pub fn to_usize(&self) -> usize {
260 self.0
261 }
262}
263
264impl Project {
265 pub fn init(client: &Arc<Client>) {
266 client.add_model_message_handler(Self::handle_request_join_project);
267 client.add_model_message_handler(Self::handle_add_collaborator);
268 client.add_model_message_handler(Self::handle_buffer_reloaded);
269 client.add_model_message_handler(Self::handle_buffer_saved);
270 client.add_model_message_handler(Self::handle_start_language_server);
271 client.add_model_message_handler(Self::handle_update_language_server);
272 client.add_model_message_handler(Self::handle_remove_collaborator);
273 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
274 client.add_model_message_handler(Self::handle_register_worktree);
275 client.add_model_message_handler(Self::handle_unregister_worktree);
276 client.add_model_message_handler(Self::handle_unregister_project);
277 client.add_model_message_handler(Self::handle_project_unshared);
278 client.add_model_message_handler(Self::handle_update_buffer_file);
279 client.add_model_message_handler(Self::handle_update_buffer);
280 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
281 client.add_model_message_handler(Self::handle_update_worktree);
282 client.add_model_request_handler(Self::handle_create_project_entry);
283 client.add_model_request_handler(Self::handle_rename_project_entry);
284 client.add_model_request_handler(Self::handle_delete_project_entry);
285 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
286 client.add_model_request_handler(Self::handle_apply_code_action);
287 client.add_model_request_handler(Self::handle_reload_buffers);
288 client.add_model_request_handler(Self::handle_format_buffers);
289 client.add_model_request_handler(Self::handle_get_code_actions);
290 client.add_model_request_handler(Self::handle_get_completions);
291 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
292 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
293 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
294 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
295 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
296 client.add_model_request_handler(Self::handle_search_project);
297 client.add_model_request_handler(Self::handle_get_project_symbols);
298 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
299 client.add_model_request_handler(Self::handle_open_buffer_by_id);
300 client.add_model_request_handler(Self::handle_open_buffer_by_path);
301 client.add_model_request_handler(Self::handle_save_buffer);
302 }
303
304 pub fn local(
305 client: Arc<Client>,
306 user_store: ModelHandle<UserStore>,
307 languages: Arc<LanguageRegistry>,
308 fs: Arc<dyn Fs>,
309 cx: &mut MutableAppContext,
310 ) -> ModelHandle<Self> {
311 cx.add_model(|cx: &mut ModelContext<Self>| {
312 let (remote_id_tx, remote_id_rx) = watch::channel();
313 let _maintain_remote_id_task = cx.spawn_weak({
314 let rpc = client.clone();
315 move |this, mut cx| {
316 async move {
317 let mut status = rpc.status();
318 while let Some(status) = status.next().await {
319 if let Some(this) = this.upgrade(&cx) {
320 if status.is_connected() {
321 this.update(&mut cx, |this, cx| this.register(cx)).await?;
322 } else {
323 this.update(&mut cx, |this, cx| this.unregister(cx));
324 }
325 }
326 }
327 Ok(())
328 }
329 .log_err()
330 }
331 });
332
333 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
334 Self {
335 worktrees: Default::default(),
336 collaborators: Default::default(),
337 opened_buffers: Default::default(),
338 shared_buffers: Default::default(),
339 loading_buffers: Default::default(),
340 loading_local_worktrees: Default::default(),
341 buffer_snapshots: Default::default(),
342 client_state: ProjectClientState::Local {
343 is_shared: false,
344 remote_id_tx,
345 remote_id_rx,
346 _maintain_remote_id_task,
347 },
348 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
349 subscriptions: Vec::new(),
350 active_entry: None,
351 languages,
352 client,
353 user_store,
354 fs,
355 next_entry_id: Default::default(),
356 language_servers: Default::default(),
357 started_language_servers: Default::default(),
358 language_server_statuses: Default::default(),
359 last_workspace_edits_by_language_server: Default::default(),
360 language_server_settings: Default::default(),
361 next_language_server_id: 0,
362 nonce: StdRng::from_entropy().gen(),
363 }
364 })
365 }
366
367 pub async fn remote(
368 remote_id: u64,
369 client: Arc<Client>,
370 user_store: ModelHandle<UserStore>,
371 languages: Arc<LanguageRegistry>,
372 fs: Arc<dyn Fs>,
373 cx: &mut AsyncAppContext,
374 ) -> Result<ModelHandle<Self>, JoinProjectError> {
375 client.authenticate_and_connect(true, &cx).await?;
376
377 let response = client
378 .request(proto::JoinProject {
379 project_id: remote_id,
380 })
381 .await?;
382
383 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
384 proto::join_project_response::Variant::Accept(response) => response,
385 proto::join_project_response::Variant::Decline(decline) => {
386 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
387 Some(proto::join_project_response::decline::Reason::Declined) => {
388 Err(JoinProjectError::HostDeclined)?
389 }
390 Some(proto::join_project_response::decline::Reason::Closed) => {
391 Err(JoinProjectError::HostClosedProject)?
392 }
393 Some(proto::join_project_response::decline::Reason::WentOffline) => {
394 Err(JoinProjectError::HostWentOffline)?
395 }
396 None => Err(anyhow!("missing decline reason"))?,
397 }
398 }
399 };
400
401 let replica_id = response.replica_id as ReplicaId;
402
403 let mut worktrees = Vec::new();
404 for worktree in response.worktrees {
405 let (worktree, load_task) = cx
406 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
407 worktrees.push(worktree);
408 load_task.detach();
409 }
410
411 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
412 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
413 let mut this = Self {
414 worktrees: Vec::new(),
415 loading_buffers: Default::default(),
416 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
417 shared_buffers: Default::default(),
418 loading_local_worktrees: Default::default(),
419 active_entry: None,
420 collaborators: Default::default(),
421 languages,
422 user_store: user_store.clone(),
423 fs,
424 next_entry_id: Default::default(),
425 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
426 client: client.clone(),
427 client_state: ProjectClientState::Remote {
428 sharing_has_stopped: false,
429 remote_id,
430 replica_id,
431 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
432 async move {
433 let mut status = client.status();
434 let is_connected =
435 status.next().await.map_or(false, |s| s.is_connected());
436 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
437 if !is_connected || status.next().await.is_some() {
438 if let Some(this) = this.upgrade(&cx) {
439 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
440 }
441 }
442 Ok(())
443 }
444 .log_err()
445 }),
446 },
447 language_servers: Default::default(),
448 started_language_servers: Default::default(),
449 language_server_settings: Default::default(),
450 language_server_statuses: response
451 .language_servers
452 .into_iter()
453 .map(|server| {
454 (
455 server.id as usize,
456 LanguageServerStatus {
457 name: server.name,
458 pending_work: Default::default(),
459 pending_diagnostic_updates: 0,
460 },
461 )
462 })
463 .collect(),
464 last_workspace_edits_by_language_server: Default::default(),
465 next_language_server_id: 0,
466 opened_buffers: Default::default(),
467 buffer_snapshots: Default::default(),
468 nonce: StdRng::from_entropy().gen(),
469 };
470 for worktree in worktrees {
471 this.add_worktree(&worktree, cx);
472 }
473 this
474 });
475
476 let user_ids = response
477 .collaborators
478 .iter()
479 .map(|peer| peer.user_id)
480 .collect();
481 user_store
482 .update(cx, |user_store, cx| user_store.get_users(user_ids, cx))
483 .await?;
484 let mut collaborators = HashMap::default();
485 for message in response.collaborators {
486 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
487 collaborators.insert(collaborator.peer_id, collaborator);
488 }
489
490 this.update(cx, |this, _| {
491 this.collaborators = collaborators;
492 });
493
494 Ok(this)
495 }
496
497 #[cfg(any(test, feature = "test-support"))]
498 pub async fn test(
499 fs: Arc<dyn Fs>,
500 root_paths: impl IntoIterator<Item = impl AsRef<Path>>,
501 cx: &mut gpui::TestAppContext,
502 ) -> ModelHandle<Project> {
503 let languages = Arc::new(LanguageRegistry::test());
504 let http_client = client::test::FakeHttpClient::with_404_response();
505 let client = client::Client::new(http_client.clone());
506 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
507 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
508 for path in root_paths {
509 let (tree, _) = project
510 .update(cx, |project, cx| {
511 project.find_or_create_local_worktree(path, true, cx)
512 })
513 .await
514 .unwrap();
515 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
516 .await;
517 }
518 project
519 }
520
521 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
522 self.opened_buffers
523 .get(&remote_id)
524 .and_then(|buffer| buffer.upgrade(cx))
525 }
526
527 pub fn languages(&self) -> &Arc<LanguageRegistry> {
528 &self.languages
529 }
530
531 #[cfg(any(test, feature = "test-support"))]
532 pub fn check_invariants(&self, cx: &AppContext) {
533 if self.is_local() {
534 let mut worktree_root_paths = HashMap::default();
535 for worktree in self.worktrees(cx) {
536 let worktree = worktree.read(cx);
537 let abs_path = worktree.as_local().unwrap().abs_path().clone();
538 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
539 assert_eq!(
540 prev_worktree_id,
541 None,
542 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
543 abs_path,
544 worktree.id(),
545 prev_worktree_id
546 )
547 }
548 } else {
549 let replica_id = self.replica_id();
550 for buffer in self.opened_buffers.values() {
551 if let Some(buffer) = buffer.upgrade(cx) {
552 let buffer = buffer.read(cx);
553 assert_eq!(
554 buffer.deferred_ops_len(),
555 0,
556 "replica {}, buffer {} has deferred operations",
557 replica_id,
558 buffer.remote_id()
559 );
560 }
561 }
562 }
563 }
564
565 #[cfg(any(test, feature = "test-support"))]
566 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
567 let path = path.into();
568 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
569 self.opened_buffers.iter().any(|(_, buffer)| {
570 if let Some(buffer) = buffer.upgrade(cx) {
571 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
572 if file.worktree == worktree && file.path() == &path.path {
573 return true;
574 }
575 }
576 }
577 false
578 })
579 } else {
580 false
581 }
582 }
583
584 pub fn fs(&self) -> &Arc<dyn Fs> {
585 &self.fs
586 }
587
588 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
589 self.unshared(cx);
590 for worktree in &self.worktrees {
591 if let Some(worktree) = worktree.upgrade(cx) {
592 worktree.update(cx, |worktree, _| {
593 worktree.as_local_mut().unwrap().unregister();
594 });
595 }
596 }
597
598 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
599 *remote_id_tx.borrow_mut() = None;
600 }
601
602 self.subscriptions.clear();
603 }
604
605 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
606 self.unregister(cx);
607
608 let response = self.client.request(proto::RegisterProject {});
609 cx.spawn(|this, mut cx| async move {
610 let remote_id = response.await?.project_id;
611
612 let mut registrations = Vec::new();
613 this.update(&mut cx, |this, cx| {
614 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
615 *remote_id_tx.borrow_mut() = Some(remote_id);
616 }
617
618 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
619
620 this.subscriptions
621 .push(this.client.add_model_for_remote_entity(remote_id, cx));
622
623 for worktree in &this.worktrees {
624 if let Some(worktree) = worktree.upgrade(cx) {
625 registrations.push(worktree.update(cx, |worktree, cx| {
626 let worktree = worktree.as_local_mut().unwrap();
627 worktree.register(remote_id, cx)
628 }));
629 }
630 }
631 });
632
633 futures::future::try_join_all(registrations).await?;
634 Ok(())
635 })
636 }
637
638 pub fn remote_id(&self) -> Option<u64> {
639 match &self.client_state {
640 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
641 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
642 }
643 }
644
645 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
646 let mut id = None;
647 let mut watch = None;
648 match &self.client_state {
649 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
650 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
651 }
652
653 async move {
654 if let Some(id) = id {
655 return id;
656 }
657 let mut watch = watch.unwrap();
658 loop {
659 let id = *watch.borrow();
660 if let Some(id) = id {
661 return id;
662 }
663 watch.next().await;
664 }
665 }
666 }
667
668 pub fn replica_id(&self) -> ReplicaId {
669 match &self.client_state {
670 ProjectClientState::Local { .. } => 0,
671 ProjectClientState::Remote { replica_id, .. } => *replica_id,
672 }
673 }
674
675 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
676 &self.collaborators
677 }
678
679 pub fn worktrees<'a>(
680 &'a self,
681 cx: &'a AppContext,
682 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
683 self.worktrees
684 .iter()
685 .filter_map(move |worktree| worktree.upgrade(cx))
686 }
687
688 pub fn visible_worktrees<'a>(
689 &'a self,
690 cx: &'a AppContext,
691 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
692 self.worktrees.iter().filter_map(|worktree| {
693 worktree.upgrade(cx).and_then(|worktree| {
694 if worktree.read(cx).is_visible() {
695 Some(worktree)
696 } else {
697 None
698 }
699 })
700 })
701 }
702
703 pub fn worktree_for_id(
704 &self,
705 id: WorktreeId,
706 cx: &AppContext,
707 ) -> Option<ModelHandle<Worktree>> {
708 self.worktrees(cx)
709 .find(|worktree| worktree.read(cx).id() == id)
710 }
711
712 pub fn worktree_for_entry(
713 &self,
714 entry_id: ProjectEntryId,
715 cx: &AppContext,
716 ) -> Option<ModelHandle<Worktree>> {
717 self.worktrees(cx)
718 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
719 }
720
721 pub fn worktree_id_for_entry(
722 &self,
723 entry_id: ProjectEntryId,
724 cx: &AppContext,
725 ) -> Option<WorktreeId> {
726 self.worktree_for_entry(entry_id, cx)
727 .map(|worktree| worktree.read(cx).id())
728 }
729
730 pub fn create_entry(
731 &mut self,
732 project_path: impl Into<ProjectPath>,
733 is_directory: bool,
734 cx: &mut ModelContext<Self>,
735 ) -> Option<Task<Result<Entry>>> {
736 let project_path = project_path.into();
737 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
738 if self.is_local() {
739 Some(worktree.update(cx, |worktree, cx| {
740 worktree
741 .as_local_mut()
742 .unwrap()
743 .create_entry(project_path.path, is_directory, cx)
744 }))
745 } else {
746 let client = self.client.clone();
747 let project_id = self.remote_id().unwrap();
748 Some(cx.spawn_weak(|_, mut cx| async move {
749 let response = client
750 .request(proto::CreateProjectEntry {
751 worktree_id: project_path.worktree_id.to_proto(),
752 project_id,
753 path: project_path.path.as_os_str().as_bytes().to_vec(),
754 is_directory,
755 })
756 .await?;
757 let entry = response
758 .entry
759 .ok_or_else(|| anyhow!("missing entry in response"))?;
760 worktree
761 .update(&mut cx, |worktree, cx| {
762 worktree.as_remote().unwrap().insert_entry(
763 entry,
764 response.worktree_scan_id as usize,
765 cx,
766 )
767 })
768 .await
769 }))
770 }
771 }
772
773 pub fn rename_entry(
774 &mut self,
775 entry_id: ProjectEntryId,
776 new_path: impl Into<Arc<Path>>,
777 cx: &mut ModelContext<Self>,
778 ) -> Option<Task<Result<Entry>>> {
779 let worktree = self.worktree_for_entry(entry_id, cx)?;
780 let new_path = new_path.into();
781 if self.is_local() {
782 worktree.update(cx, |worktree, cx| {
783 worktree
784 .as_local_mut()
785 .unwrap()
786 .rename_entry(entry_id, new_path, cx)
787 })
788 } else {
789 let client = self.client.clone();
790 let project_id = self.remote_id().unwrap();
791
792 Some(cx.spawn_weak(|_, mut cx| async move {
793 let response = client
794 .request(proto::RenameProjectEntry {
795 project_id,
796 entry_id: entry_id.to_proto(),
797 new_path: new_path.as_os_str().as_bytes().to_vec(),
798 })
799 .await?;
800 let entry = response
801 .entry
802 .ok_or_else(|| anyhow!("missing entry in response"))?;
803 worktree
804 .update(&mut cx, |worktree, cx| {
805 worktree.as_remote().unwrap().insert_entry(
806 entry,
807 response.worktree_scan_id as usize,
808 cx,
809 )
810 })
811 .await
812 }))
813 }
814 }
815
816 pub fn delete_entry(
817 &mut self,
818 entry_id: ProjectEntryId,
819 cx: &mut ModelContext<Self>,
820 ) -> Option<Task<Result<()>>> {
821 let worktree = self.worktree_for_entry(entry_id, cx)?;
822 if self.is_local() {
823 worktree.update(cx, |worktree, cx| {
824 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
825 })
826 } else {
827 let client = self.client.clone();
828 let project_id = self.remote_id().unwrap();
829 Some(cx.spawn_weak(|_, mut cx| async move {
830 let response = client
831 .request(proto::DeleteProjectEntry {
832 project_id,
833 entry_id: entry_id.to_proto(),
834 })
835 .await?;
836 worktree
837 .update(&mut cx, move |worktree, cx| {
838 worktree.as_remote().unwrap().delete_entry(
839 entry_id,
840 response.worktree_scan_id as usize,
841 cx,
842 )
843 })
844 .await
845 }))
846 }
847 }
848
849 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
850 let project_id;
851 if let ProjectClientState::Local {
852 remote_id_rx,
853 is_shared,
854 ..
855 } = &mut self.client_state
856 {
857 if *is_shared {
858 return Task::ready(Ok(()));
859 }
860 *is_shared = true;
861 if let Some(id) = *remote_id_rx.borrow() {
862 project_id = id;
863 } else {
864 return Task::ready(Err(anyhow!("project hasn't been registered")));
865 }
866 } else {
867 return Task::ready(Err(anyhow!("can't share a remote project")));
868 };
869
870 for open_buffer in self.opened_buffers.values_mut() {
871 match open_buffer {
872 OpenBuffer::Strong(_) => {}
873 OpenBuffer::Weak(buffer) => {
874 if let Some(buffer) = buffer.upgrade(cx) {
875 *open_buffer = OpenBuffer::Strong(buffer);
876 }
877 }
878 OpenBuffer::Loading(_) => unreachable!(),
879 }
880 }
881
882 for worktree_handle in self.worktrees.iter_mut() {
883 match worktree_handle {
884 WorktreeHandle::Strong(_) => {}
885 WorktreeHandle::Weak(worktree) => {
886 if let Some(worktree) = worktree.upgrade(cx) {
887 *worktree_handle = WorktreeHandle::Strong(worktree);
888 }
889 }
890 }
891 }
892
893 let mut tasks = Vec::new();
894 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
895 worktree.update(cx, |worktree, cx| {
896 let worktree = worktree.as_local_mut().unwrap();
897 tasks.push(worktree.share(project_id, cx));
898 });
899 }
900
901 cx.spawn(|this, mut cx| async move {
902 for task in tasks {
903 task.await?;
904 }
905 this.update(&mut cx, |_, cx| cx.notify());
906 Ok(())
907 })
908 }
909
910 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
911 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
912 if !*is_shared {
913 return;
914 }
915
916 *is_shared = false;
917 self.collaborators.clear();
918 self.shared_buffers.clear();
919 for worktree_handle in self.worktrees.iter_mut() {
920 if let WorktreeHandle::Strong(worktree) = worktree_handle {
921 let is_visible = worktree.update(cx, |worktree, _| {
922 worktree.as_local_mut().unwrap().unshare();
923 worktree.is_visible()
924 });
925 if !is_visible {
926 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
927 }
928 }
929 }
930
931 for open_buffer in self.opened_buffers.values_mut() {
932 match open_buffer {
933 OpenBuffer::Strong(buffer) => {
934 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
935 }
936 _ => {}
937 }
938 }
939
940 cx.notify();
941 } else {
942 log::error!("attempted to unshare a remote project");
943 }
944 }
945
946 pub fn respond_to_join_request(
947 &mut self,
948 requester_id: u64,
949 allow: bool,
950 cx: &mut ModelContext<Self>,
951 ) {
952 if let Some(project_id) = self.remote_id() {
953 let share = self.share(cx);
954 let client = self.client.clone();
955 cx.foreground()
956 .spawn(async move {
957 share.await?;
958 client.send(proto::RespondToJoinProjectRequest {
959 requester_id,
960 project_id,
961 allow,
962 })
963 })
964 .detach_and_log_err(cx);
965 }
966 }
967
968 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
969 if let ProjectClientState::Remote {
970 sharing_has_stopped,
971 ..
972 } = &mut self.client_state
973 {
974 *sharing_has_stopped = true;
975 self.collaborators.clear();
976 cx.notify();
977 }
978 }
979
980 pub fn is_read_only(&self) -> bool {
981 match &self.client_state {
982 ProjectClientState::Local { .. } => false,
983 ProjectClientState::Remote {
984 sharing_has_stopped,
985 ..
986 } => *sharing_has_stopped,
987 }
988 }
989
990 pub fn is_local(&self) -> bool {
991 match &self.client_state {
992 ProjectClientState::Local { .. } => true,
993 ProjectClientState::Remote { .. } => false,
994 }
995 }
996
997 pub fn is_remote(&self) -> bool {
998 !self.is_local()
999 }
1000
1001 pub fn create_buffer(
1002 &mut self,
1003 text: &str,
1004 language: Option<Arc<Language>>,
1005 cx: &mut ModelContext<Self>,
1006 ) -> Result<ModelHandle<Buffer>> {
1007 if self.is_remote() {
1008 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1009 }
1010
1011 let buffer = cx.add_model(|cx| {
1012 Buffer::new(self.replica_id(), text, cx)
1013 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1014 });
1015 self.register_buffer(&buffer, cx)?;
1016 Ok(buffer)
1017 }
1018
1019 pub fn open_path(
1020 &mut self,
1021 path: impl Into<ProjectPath>,
1022 cx: &mut ModelContext<Self>,
1023 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1024 let task = self.open_buffer(path, cx);
1025 cx.spawn_weak(|_, cx| async move {
1026 let buffer = task.await?;
1027 let project_entry_id = buffer
1028 .read_with(&cx, |buffer, cx| {
1029 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1030 })
1031 .ok_or_else(|| anyhow!("no project entry"))?;
1032 Ok((project_entry_id, buffer.into()))
1033 })
1034 }
1035
1036 pub fn open_local_buffer(
1037 &mut self,
1038 abs_path: impl AsRef<Path>,
1039 cx: &mut ModelContext<Self>,
1040 ) -> Task<Result<ModelHandle<Buffer>>> {
1041 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1042 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1043 } else {
1044 Task::ready(Err(anyhow!("no such path")))
1045 }
1046 }
1047
1048 pub fn open_buffer(
1049 &mut self,
1050 path: impl Into<ProjectPath>,
1051 cx: &mut ModelContext<Self>,
1052 ) -> Task<Result<ModelHandle<Buffer>>> {
1053 let project_path = path.into();
1054 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1055 worktree
1056 } else {
1057 return Task::ready(Err(anyhow!("no such worktree")));
1058 };
1059
1060 // If there is already a buffer for the given path, then return it.
1061 let existing_buffer = self.get_open_buffer(&project_path, cx);
1062 if let Some(existing_buffer) = existing_buffer {
1063 return Task::ready(Ok(existing_buffer));
1064 }
1065
1066 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1067 // If the given path is already being loaded, then wait for that existing
1068 // task to complete and return the same buffer.
1069 hash_map::Entry::Occupied(e) => e.get().clone(),
1070
1071 // Otherwise, record the fact that this path is now being loaded.
1072 hash_map::Entry::Vacant(entry) => {
1073 let (mut tx, rx) = postage::watch::channel();
1074 entry.insert(rx.clone());
1075
1076 let load_buffer = if worktree.read(cx).is_local() {
1077 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1078 } else {
1079 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1080 };
1081
1082 cx.spawn(move |this, mut cx| async move {
1083 let load_result = load_buffer.await;
1084 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1085 // Record the fact that the buffer is no longer loading.
1086 this.loading_buffers.remove(&project_path);
1087 let buffer = load_result.map_err(Arc::new)?;
1088 Ok(buffer)
1089 }));
1090 })
1091 .detach();
1092 rx
1093 }
1094 };
1095
1096 cx.foreground().spawn(async move {
1097 loop {
1098 if let Some(result) = loading_watch.borrow().as_ref() {
1099 match result {
1100 Ok(buffer) => return Ok(buffer.clone()),
1101 Err(error) => return Err(anyhow!("{}", error)),
1102 }
1103 }
1104 loading_watch.next().await;
1105 }
1106 })
1107 }
1108
1109 fn open_local_buffer_internal(
1110 &mut self,
1111 path: &Arc<Path>,
1112 worktree: &ModelHandle<Worktree>,
1113 cx: &mut ModelContext<Self>,
1114 ) -> Task<Result<ModelHandle<Buffer>>> {
1115 let load_buffer = worktree.update(cx, |worktree, cx| {
1116 let worktree = worktree.as_local_mut().unwrap();
1117 worktree.load_buffer(path, cx)
1118 });
1119 cx.spawn(|this, mut cx| async move {
1120 let buffer = load_buffer.await?;
1121 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1122 Ok(buffer)
1123 })
1124 }
1125
1126 fn open_remote_buffer_internal(
1127 &mut self,
1128 path: &Arc<Path>,
1129 worktree: &ModelHandle<Worktree>,
1130 cx: &mut ModelContext<Self>,
1131 ) -> Task<Result<ModelHandle<Buffer>>> {
1132 let rpc = self.client.clone();
1133 let project_id = self.remote_id().unwrap();
1134 let remote_worktree_id = worktree.read(cx).id();
1135 let path = path.clone();
1136 let path_string = path.to_string_lossy().to_string();
1137 cx.spawn(|this, mut cx| async move {
1138 let response = rpc
1139 .request(proto::OpenBufferByPath {
1140 project_id,
1141 worktree_id: remote_worktree_id.to_proto(),
1142 path: path_string,
1143 })
1144 .await?;
1145 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1146 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1147 .await
1148 })
1149 }
1150
1151 fn open_local_buffer_via_lsp(
1152 &mut self,
1153 abs_path: lsp::Url,
1154 lsp_adapter: Arc<dyn LspAdapter>,
1155 lsp_server: Arc<LanguageServer>,
1156 cx: &mut ModelContext<Self>,
1157 ) -> Task<Result<ModelHandle<Buffer>>> {
1158 cx.spawn(|this, mut cx| async move {
1159 let abs_path = abs_path
1160 .to_file_path()
1161 .map_err(|_| anyhow!("can't convert URI to path"))?;
1162 let (worktree, relative_path) = if let Some(result) =
1163 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1164 {
1165 result
1166 } else {
1167 let worktree = this
1168 .update(&mut cx, |this, cx| {
1169 this.create_local_worktree(&abs_path, false, cx)
1170 })
1171 .await?;
1172 this.update(&mut cx, |this, cx| {
1173 this.language_servers.insert(
1174 (worktree.read(cx).id(), lsp_adapter.name()),
1175 (lsp_adapter, lsp_server),
1176 );
1177 });
1178 (worktree, PathBuf::new())
1179 };
1180
1181 let project_path = ProjectPath {
1182 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1183 path: relative_path.into(),
1184 };
1185 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1186 .await
1187 })
1188 }
1189
1190 pub fn open_buffer_by_id(
1191 &mut self,
1192 id: u64,
1193 cx: &mut ModelContext<Self>,
1194 ) -> Task<Result<ModelHandle<Buffer>>> {
1195 if let Some(buffer) = self.buffer_for_id(id, cx) {
1196 Task::ready(Ok(buffer))
1197 } else if self.is_local() {
1198 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1199 } else if let Some(project_id) = self.remote_id() {
1200 let request = self
1201 .client
1202 .request(proto::OpenBufferById { project_id, id });
1203 cx.spawn(|this, mut cx| async move {
1204 let buffer = request
1205 .await?
1206 .buffer
1207 .ok_or_else(|| anyhow!("invalid buffer"))?;
1208 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1209 .await
1210 })
1211 } else {
1212 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1213 }
1214 }
1215
1216 pub fn save_buffer_as(
1217 &mut self,
1218 buffer: ModelHandle<Buffer>,
1219 abs_path: PathBuf,
1220 cx: &mut ModelContext<Project>,
1221 ) -> Task<Result<()>> {
1222 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1223 let old_path =
1224 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1225 cx.spawn(|this, mut cx| async move {
1226 if let Some(old_path) = old_path {
1227 this.update(&mut cx, |this, cx| {
1228 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1229 });
1230 }
1231 let (worktree, path) = worktree_task.await?;
1232 worktree
1233 .update(&mut cx, |worktree, cx| {
1234 worktree
1235 .as_local_mut()
1236 .unwrap()
1237 .save_buffer_as(buffer.clone(), path, cx)
1238 })
1239 .await?;
1240 this.update(&mut cx, |this, cx| {
1241 this.assign_language_to_buffer(&buffer, cx);
1242 this.register_buffer_with_language_server(&buffer, cx);
1243 });
1244 Ok(())
1245 })
1246 }
1247
1248 pub fn get_open_buffer(
1249 &mut self,
1250 path: &ProjectPath,
1251 cx: &mut ModelContext<Self>,
1252 ) -> Option<ModelHandle<Buffer>> {
1253 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1254 self.opened_buffers.values().find_map(|buffer| {
1255 let buffer = buffer.upgrade(cx)?;
1256 let file = File::from_dyn(buffer.read(cx).file())?;
1257 if file.worktree == worktree && file.path() == &path.path {
1258 Some(buffer)
1259 } else {
1260 None
1261 }
1262 })
1263 }
1264
1265 fn register_buffer(
1266 &mut self,
1267 buffer: &ModelHandle<Buffer>,
1268 cx: &mut ModelContext<Self>,
1269 ) -> Result<()> {
1270 let remote_id = buffer.read(cx).remote_id();
1271 let open_buffer = if self.is_remote() || self.is_shared() {
1272 OpenBuffer::Strong(buffer.clone())
1273 } else {
1274 OpenBuffer::Weak(buffer.downgrade())
1275 };
1276
1277 match self.opened_buffers.insert(remote_id, open_buffer) {
1278 None => {}
1279 Some(OpenBuffer::Loading(operations)) => {
1280 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1281 }
1282 Some(OpenBuffer::Weak(existing_handle)) => {
1283 if existing_handle.upgrade(cx).is_some() {
1284 Err(anyhow!(
1285 "already registered buffer with remote id {}",
1286 remote_id
1287 ))?
1288 }
1289 }
1290 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1291 "already registered buffer with remote id {}",
1292 remote_id
1293 ))?,
1294 }
1295 cx.subscribe(buffer, |this, buffer, event, cx| {
1296 this.on_buffer_event(buffer, event, cx);
1297 })
1298 .detach();
1299
1300 self.assign_language_to_buffer(buffer, cx);
1301 self.register_buffer_with_language_server(buffer, cx);
1302 cx.observe_release(buffer, |this, buffer, cx| {
1303 if let Some(file) = File::from_dyn(buffer.file()) {
1304 if file.is_local() {
1305 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1306 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1307 server
1308 .notify::<lsp::notification::DidCloseTextDocument>(
1309 lsp::DidCloseTextDocumentParams {
1310 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1311 },
1312 )
1313 .log_err();
1314 }
1315 }
1316 }
1317 })
1318 .detach();
1319
1320 Ok(())
1321 }
1322
1323 fn register_buffer_with_language_server(
1324 &mut self,
1325 buffer_handle: &ModelHandle<Buffer>,
1326 cx: &mut ModelContext<Self>,
1327 ) {
1328 let buffer = buffer_handle.read(cx);
1329 let buffer_id = buffer.remote_id();
1330 if let Some(file) = File::from_dyn(buffer.file()) {
1331 if file.is_local() {
1332 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1333 let initial_snapshot = buffer.text_snapshot();
1334
1335 let mut language_server = None;
1336 let mut language_id = None;
1337 if let Some(language) = buffer.language() {
1338 let worktree_id = file.worktree_id(cx);
1339 if let Some(adapter) = language.lsp_adapter() {
1340 language_id = adapter.id_for_language(language.name().as_ref());
1341 language_server = self
1342 .language_servers
1343 .get(&(worktree_id, adapter.name()))
1344 .cloned();
1345 }
1346 }
1347
1348 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1349 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1350 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1351 .log_err();
1352 }
1353 }
1354
1355 if let Some((_, server)) = language_server {
1356 server
1357 .notify::<lsp::notification::DidOpenTextDocument>(
1358 lsp::DidOpenTextDocumentParams {
1359 text_document: lsp::TextDocumentItem::new(
1360 uri,
1361 language_id.unwrap_or_default(),
1362 0,
1363 initial_snapshot.text(),
1364 ),
1365 }
1366 .clone(),
1367 )
1368 .log_err();
1369 buffer_handle.update(cx, |buffer, cx| {
1370 buffer.set_completion_triggers(
1371 server
1372 .capabilities()
1373 .completion_provider
1374 .as_ref()
1375 .and_then(|provider| provider.trigger_characters.clone())
1376 .unwrap_or(Vec::new()),
1377 cx,
1378 )
1379 });
1380 self.buffer_snapshots
1381 .insert(buffer_id, vec![(0, initial_snapshot)]);
1382 }
1383 }
1384 }
1385 }
1386
1387 fn unregister_buffer_from_language_server(
1388 &mut self,
1389 buffer: &ModelHandle<Buffer>,
1390 old_path: PathBuf,
1391 cx: &mut ModelContext<Self>,
1392 ) {
1393 buffer.update(cx, |buffer, cx| {
1394 buffer.update_diagnostics(Default::default(), cx);
1395 self.buffer_snapshots.remove(&buffer.remote_id());
1396 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1397 language_server
1398 .notify::<lsp::notification::DidCloseTextDocument>(
1399 lsp::DidCloseTextDocumentParams {
1400 text_document: lsp::TextDocumentIdentifier::new(
1401 lsp::Url::from_file_path(old_path).unwrap(),
1402 ),
1403 },
1404 )
1405 .log_err();
1406 }
1407 });
1408 }
1409
1410 fn on_buffer_event(
1411 &mut self,
1412 buffer: ModelHandle<Buffer>,
1413 event: &BufferEvent,
1414 cx: &mut ModelContext<Self>,
1415 ) -> Option<()> {
1416 match event {
1417 BufferEvent::Operation(operation) => {
1418 let project_id = self.remote_id()?;
1419 let request = self.client.request(proto::UpdateBuffer {
1420 project_id,
1421 buffer_id: buffer.read(cx).remote_id(),
1422 operations: vec![language::proto::serialize_operation(&operation)],
1423 });
1424 cx.background().spawn(request).detach_and_log_err(cx);
1425 }
1426 BufferEvent::Edited { .. } => {
1427 let (_, language_server) = self
1428 .language_server_for_buffer(buffer.read(cx), cx)?
1429 .clone();
1430 let buffer = buffer.read(cx);
1431 let file = File::from_dyn(buffer.file())?;
1432 let abs_path = file.as_local()?.abs_path(cx);
1433 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1434 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1435 let (version, prev_snapshot) = buffer_snapshots.last()?;
1436 let next_snapshot = buffer.text_snapshot();
1437 let next_version = version + 1;
1438
1439 let content_changes = buffer
1440 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1441 .map(|edit| {
1442 let edit_start = edit.new.start.0;
1443 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1444 let new_text = next_snapshot
1445 .text_for_range(edit.new.start.1..edit.new.end.1)
1446 .collect();
1447 lsp::TextDocumentContentChangeEvent {
1448 range: Some(lsp::Range::new(
1449 point_to_lsp(edit_start),
1450 point_to_lsp(edit_end),
1451 )),
1452 range_length: None,
1453 text: new_text,
1454 }
1455 })
1456 .collect();
1457
1458 buffer_snapshots.push((next_version, next_snapshot));
1459
1460 language_server
1461 .notify::<lsp::notification::DidChangeTextDocument>(
1462 lsp::DidChangeTextDocumentParams {
1463 text_document: lsp::VersionedTextDocumentIdentifier::new(
1464 uri,
1465 next_version,
1466 ),
1467 content_changes,
1468 },
1469 )
1470 .log_err();
1471 }
1472 BufferEvent::Saved => {
1473 let file = File::from_dyn(buffer.read(cx).file())?;
1474 let worktree_id = file.worktree_id(cx);
1475 let abs_path = file.as_local()?.abs_path(cx);
1476 let text_document = lsp::TextDocumentIdentifier {
1477 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1478 };
1479
1480 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1481 server
1482 .notify::<lsp::notification::DidSaveTextDocument>(
1483 lsp::DidSaveTextDocumentParams {
1484 text_document: text_document.clone(),
1485 text: None,
1486 },
1487 )
1488 .log_err();
1489 }
1490 }
1491 _ => {}
1492 }
1493
1494 None
1495 }
1496
1497 fn language_servers_for_worktree(
1498 &self,
1499 worktree_id: WorktreeId,
1500 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1501 self.language_servers.iter().filter_map(
1502 move |((language_server_worktree_id, _), server)| {
1503 if *language_server_worktree_id == worktree_id {
1504 Some(server)
1505 } else {
1506 None
1507 }
1508 },
1509 )
1510 }
1511
1512 fn assign_language_to_buffer(
1513 &mut self,
1514 buffer: &ModelHandle<Buffer>,
1515 cx: &mut ModelContext<Self>,
1516 ) -> Option<()> {
1517 // If the buffer has a language, set it and start the language server if we haven't already.
1518 let full_path = buffer.read(cx).file()?.full_path(cx);
1519 let language = self.languages.select_language(&full_path)?;
1520 buffer.update(cx, |buffer, cx| {
1521 buffer.set_language(Some(language.clone()), cx);
1522 });
1523
1524 let file = File::from_dyn(buffer.read(cx).file())?;
1525 let worktree = file.worktree.read(cx).as_local()?;
1526 let worktree_id = worktree.id();
1527 let worktree_abs_path = worktree.abs_path().clone();
1528 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1529
1530 None
1531 }
1532
1533 fn start_language_server(
1534 &mut self,
1535 worktree_id: WorktreeId,
1536 worktree_path: Arc<Path>,
1537 language: Arc<Language>,
1538 cx: &mut ModelContext<Self>,
1539 ) {
1540 let adapter = if let Some(adapter) = language.lsp_adapter() {
1541 adapter
1542 } else {
1543 return;
1544 };
1545 let key = (worktree_id, adapter.name());
1546 self.started_language_servers
1547 .entry(key.clone())
1548 .or_insert_with(|| {
1549 let server_id = post_inc(&mut self.next_language_server_id);
1550 let language_server = self.languages.start_language_server(
1551 server_id,
1552 language.clone(),
1553 worktree_path,
1554 self.client.http_client(),
1555 cx,
1556 );
1557 cx.spawn_weak(|this, mut cx| async move {
1558 let language_server = language_server?.await.log_err()?;
1559 let language_server = language_server
1560 .initialize(adapter.initialization_options())
1561 .await
1562 .log_err()?;
1563 let this = this.upgrade(&cx)?;
1564 let disk_based_diagnostics_progress_token =
1565 adapter.disk_based_diagnostics_progress_token();
1566
1567 language_server
1568 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1569 let this = this.downgrade();
1570 let adapter = adapter.clone();
1571 move |params, mut cx| {
1572 if let Some(this) = this.upgrade(&cx) {
1573 this.update(&mut cx, |this, cx| {
1574 this.on_lsp_diagnostics_published(
1575 server_id,
1576 params,
1577 &adapter,
1578 disk_based_diagnostics_progress_token,
1579 cx,
1580 );
1581 });
1582 }
1583 }
1584 })
1585 .detach();
1586
1587 language_server
1588 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1589 let settings = this
1590 .read_with(&cx, |this, _| this.language_server_settings.clone());
1591 move |params, _| {
1592 let settings = settings.lock().clone();
1593 async move {
1594 Ok(params
1595 .items
1596 .into_iter()
1597 .map(|item| {
1598 if let Some(section) = &item.section {
1599 settings
1600 .get(section)
1601 .cloned()
1602 .unwrap_or(serde_json::Value::Null)
1603 } else {
1604 settings.clone()
1605 }
1606 })
1607 .collect())
1608 }
1609 }
1610 })
1611 .detach();
1612
1613 language_server
1614 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1615 let this = this.downgrade();
1616 let adapter = adapter.clone();
1617 let language_server = language_server.clone();
1618 move |params, cx| {
1619 Self::on_lsp_workspace_edit(
1620 this,
1621 params,
1622 server_id,
1623 adapter.clone(),
1624 language_server.clone(),
1625 cx,
1626 )
1627 }
1628 })
1629 .detach();
1630
1631 language_server
1632 .on_notification::<lsp::notification::Progress, _>({
1633 let this = this.downgrade();
1634 move |params, mut cx| {
1635 if let Some(this) = this.upgrade(&cx) {
1636 this.update(&mut cx, |this, cx| {
1637 this.on_lsp_progress(
1638 params,
1639 server_id,
1640 disk_based_diagnostics_progress_token,
1641 cx,
1642 );
1643 });
1644 }
1645 }
1646 })
1647 .detach();
1648
1649 this.update(&mut cx, |this, cx| {
1650 this.language_servers
1651 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1652 this.language_server_statuses.insert(
1653 server_id,
1654 LanguageServerStatus {
1655 name: language_server.name().to_string(),
1656 pending_work: Default::default(),
1657 pending_diagnostic_updates: 0,
1658 },
1659 );
1660 language_server
1661 .notify::<lsp::notification::DidChangeConfiguration>(
1662 lsp::DidChangeConfigurationParams {
1663 settings: this.language_server_settings.lock().clone(),
1664 },
1665 )
1666 .ok();
1667
1668 if let Some(project_id) = this.remote_id() {
1669 this.client
1670 .send(proto::StartLanguageServer {
1671 project_id,
1672 server: Some(proto::LanguageServer {
1673 id: server_id as u64,
1674 name: language_server.name().to_string(),
1675 }),
1676 })
1677 .log_err();
1678 }
1679
1680 // Tell the language server about every open buffer in the worktree that matches the language.
1681 for buffer in this.opened_buffers.values() {
1682 if let Some(buffer_handle) = buffer.upgrade(cx) {
1683 let buffer = buffer_handle.read(cx);
1684 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1685 file
1686 } else {
1687 continue;
1688 };
1689 let language = if let Some(language) = buffer.language() {
1690 language
1691 } else {
1692 continue;
1693 };
1694 if file.worktree.read(cx).id() != key.0
1695 || language.lsp_adapter().map(|a| a.name())
1696 != Some(key.1.clone())
1697 {
1698 continue;
1699 }
1700
1701 let file = file.as_local()?;
1702 let versions = this
1703 .buffer_snapshots
1704 .entry(buffer.remote_id())
1705 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1706 let (version, initial_snapshot) = versions.last().unwrap();
1707 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1708 let language_id = adapter.id_for_language(language.name().as_ref());
1709 language_server
1710 .notify::<lsp::notification::DidOpenTextDocument>(
1711 lsp::DidOpenTextDocumentParams {
1712 text_document: lsp::TextDocumentItem::new(
1713 uri,
1714 language_id.unwrap_or_default(),
1715 *version,
1716 initial_snapshot.text(),
1717 ),
1718 },
1719 )
1720 .log_err()?;
1721 buffer_handle.update(cx, |buffer, cx| {
1722 buffer.set_completion_triggers(
1723 language_server
1724 .capabilities()
1725 .completion_provider
1726 .as_ref()
1727 .and_then(|provider| {
1728 provider.trigger_characters.clone()
1729 })
1730 .unwrap_or(Vec::new()),
1731 cx,
1732 )
1733 });
1734 }
1735 }
1736
1737 cx.notify();
1738 Some(())
1739 });
1740
1741 Some(language_server)
1742 })
1743 });
1744 }
1745
1746 pub fn restart_language_servers_for_buffers(
1747 &mut self,
1748 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1749 cx: &mut ModelContext<Self>,
1750 ) -> Option<()> {
1751 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1752 .into_iter()
1753 .filter_map(|buffer| {
1754 let file = File::from_dyn(buffer.read(cx).file())?;
1755 let worktree = file.worktree.read(cx).as_local()?;
1756 let worktree_id = worktree.id();
1757 let worktree_abs_path = worktree.abs_path().clone();
1758 let full_path = file.full_path(cx);
1759 Some((worktree_id, worktree_abs_path, full_path))
1760 })
1761 .collect();
1762 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1763 let language = self.languages.select_language(&full_path)?;
1764 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1765 }
1766
1767 None
1768 }
1769
1770 fn restart_language_server(
1771 &mut self,
1772 worktree_id: WorktreeId,
1773 worktree_path: Arc<Path>,
1774 language: Arc<Language>,
1775 cx: &mut ModelContext<Self>,
1776 ) {
1777 let adapter = if let Some(adapter) = language.lsp_adapter() {
1778 adapter
1779 } else {
1780 return;
1781 };
1782 let key = (worktree_id, adapter.name());
1783 let server_to_shutdown = self.language_servers.remove(&key);
1784 self.started_language_servers.remove(&key);
1785 server_to_shutdown
1786 .as_ref()
1787 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1788 cx.spawn_weak(|this, mut cx| async move {
1789 if let Some(this) = this.upgrade(&cx) {
1790 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1791 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1792 shutdown_task.await;
1793 }
1794 }
1795
1796 this.update(&mut cx, |this, cx| {
1797 this.start_language_server(worktree_id, worktree_path, language, cx);
1798 });
1799 }
1800 })
1801 .detach();
1802 }
1803
1804 fn on_lsp_diagnostics_published(
1805 &mut self,
1806 server_id: usize,
1807 mut params: lsp::PublishDiagnosticsParams,
1808 adapter: &Arc<dyn LspAdapter>,
1809 disk_based_diagnostics_progress_token: Option<&str>,
1810 cx: &mut ModelContext<Self>,
1811 ) {
1812 adapter.process_diagnostics(&mut params);
1813 if disk_based_diagnostics_progress_token.is_none() {
1814 self.disk_based_diagnostics_started(cx);
1815 self.broadcast_language_server_update(
1816 server_id,
1817 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1818 proto::LspDiskBasedDiagnosticsUpdating {},
1819 ),
1820 );
1821 }
1822 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1823 .log_err();
1824 if disk_based_diagnostics_progress_token.is_none() {
1825 self.disk_based_diagnostics_finished(cx);
1826 self.broadcast_language_server_update(
1827 server_id,
1828 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1829 proto::LspDiskBasedDiagnosticsUpdated {},
1830 ),
1831 );
1832 }
1833 }
1834
1835 fn on_lsp_progress(
1836 &mut self,
1837 progress: lsp::ProgressParams,
1838 server_id: usize,
1839 disk_based_diagnostics_progress_token: Option<&str>,
1840 cx: &mut ModelContext<Self>,
1841 ) {
1842 let token = match progress.token {
1843 lsp::NumberOrString::String(token) => token,
1844 lsp::NumberOrString::Number(token) => {
1845 log::info!("skipping numeric progress token {}", token);
1846 return;
1847 }
1848 };
1849 let progress = match progress.value {
1850 lsp::ProgressParamsValue::WorkDone(value) => value,
1851 };
1852 let language_server_status =
1853 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1854 status
1855 } else {
1856 return;
1857 };
1858 match progress {
1859 lsp::WorkDoneProgress::Begin(_) => {
1860 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1861 language_server_status.pending_diagnostic_updates += 1;
1862 if language_server_status.pending_diagnostic_updates == 1 {
1863 self.disk_based_diagnostics_started(cx);
1864 self.broadcast_language_server_update(
1865 server_id,
1866 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1867 proto::LspDiskBasedDiagnosticsUpdating {},
1868 ),
1869 );
1870 }
1871 } else {
1872 self.on_lsp_work_start(server_id, token.clone(), cx);
1873 self.broadcast_language_server_update(
1874 server_id,
1875 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1876 token,
1877 }),
1878 );
1879 }
1880 }
1881 lsp::WorkDoneProgress::Report(report) => {
1882 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1883 self.on_lsp_work_progress(
1884 server_id,
1885 token.clone(),
1886 LanguageServerProgress {
1887 message: report.message.clone(),
1888 percentage: report.percentage.map(|p| p as usize),
1889 last_update_at: Instant::now(),
1890 },
1891 cx,
1892 );
1893 self.broadcast_language_server_update(
1894 server_id,
1895 proto::update_language_server::Variant::WorkProgress(
1896 proto::LspWorkProgress {
1897 token,
1898 message: report.message,
1899 percentage: report.percentage.map(|p| p as u32),
1900 },
1901 ),
1902 );
1903 }
1904 }
1905 lsp::WorkDoneProgress::End(_) => {
1906 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1907 language_server_status.pending_diagnostic_updates -= 1;
1908 if language_server_status.pending_diagnostic_updates == 0 {
1909 self.disk_based_diagnostics_finished(cx);
1910 self.broadcast_language_server_update(
1911 server_id,
1912 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1913 proto::LspDiskBasedDiagnosticsUpdated {},
1914 ),
1915 );
1916 }
1917 } else {
1918 self.on_lsp_work_end(server_id, token.clone(), cx);
1919 self.broadcast_language_server_update(
1920 server_id,
1921 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1922 token,
1923 }),
1924 );
1925 }
1926 }
1927 }
1928 }
1929
1930 fn on_lsp_work_start(
1931 &mut self,
1932 language_server_id: usize,
1933 token: String,
1934 cx: &mut ModelContext<Self>,
1935 ) {
1936 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1937 status.pending_work.insert(
1938 token,
1939 LanguageServerProgress {
1940 message: None,
1941 percentage: None,
1942 last_update_at: Instant::now(),
1943 },
1944 );
1945 cx.notify();
1946 }
1947 }
1948
1949 fn on_lsp_work_progress(
1950 &mut self,
1951 language_server_id: usize,
1952 token: String,
1953 progress: LanguageServerProgress,
1954 cx: &mut ModelContext<Self>,
1955 ) {
1956 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1957 status.pending_work.insert(token, progress);
1958 cx.notify();
1959 }
1960 }
1961
1962 fn on_lsp_work_end(
1963 &mut self,
1964 language_server_id: usize,
1965 token: String,
1966 cx: &mut ModelContext<Self>,
1967 ) {
1968 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1969 status.pending_work.remove(&token);
1970 cx.notify();
1971 }
1972 }
1973
1974 async fn on_lsp_workspace_edit(
1975 this: WeakModelHandle<Self>,
1976 params: lsp::ApplyWorkspaceEditParams,
1977 server_id: usize,
1978 adapter: Arc<dyn LspAdapter>,
1979 language_server: Arc<LanguageServer>,
1980 mut cx: AsyncAppContext,
1981 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1982 let this = this
1983 .upgrade(&cx)
1984 .ok_or_else(|| anyhow!("project project closed"))?;
1985 let transaction = Self::deserialize_workspace_edit(
1986 this.clone(),
1987 params.edit,
1988 true,
1989 adapter.clone(),
1990 language_server.clone(),
1991 &mut cx,
1992 )
1993 .await
1994 .log_err();
1995 this.update(&mut cx, |this, _| {
1996 if let Some(transaction) = transaction {
1997 this.last_workspace_edits_by_language_server
1998 .insert(server_id, transaction);
1999 }
2000 });
2001 Ok(lsp::ApplyWorkspaceEditResponse {
2002 applied: true,
2003 failed_change: None,
2004 failure_reason: None,
2005 })
2006 }
2007
2008 fn broadcast_language_server_update(
2009 &self,
2010 language_server_id: usize,
2011 event: proto::update_language_server::Variant,
2012 ) {
2013 if let Some(project_id) = self.remote_id() {
2014 self.client
2015 .send(proto::UpdateLanguageServer {
2016 project_id,
2017 language_server_id: language_server_id as u64,
2018 variant: Some(event),
2019 })
2020 .log_err();
2021 }
2022 }
2023
2024 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2025 for (_, server) in self.language_servers.values() {
2026 server
2027 .notify::<lsp::notification::DidChangeConfiguration>(
2028 lsp::DidChangeConfigurationParams {
2029 settings: settings.clone(),
2030 },
2031 )
2032 .ok();
2033 }
2034 *self.language_server_settings.lock() = settings;
2035 }
2036
2037 pub fn language_server_statuses(
2038 &self,
2039 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2040 self.language_server_statuses.values()
2041 }
2042
2043 pub fn update_diagnostics(
2044 &mut self,
2045 params: lsp::PublishDiagnosticsParams,
2046 disk_based_sources: &[&str],
2047 cx: &mut ModelContext<Self>,
2048 ) -> Result<()> {
2049 let abs_path = params
2050 .uri
2051 .to_file_path()
2052 .map_err(|_| anyhow!("URI is not a file"))?;
2053 let mut next_group_id = 0;
2054 let mut diagnostics = Vec::default();
2055 let mut primary_diagnostic_group_ids = HashMap::default();
2056 let mut sources_by_group_id = HashMap::default();
2057 let mut supporting_diagnostics = HashMap::default();
2058 for diagnostic in ¶ms.diagnostics {
2059 let source = diagnostic.source.as_ref();
2060 let code = diagnostic.code.as_ref().map(|code| match code {
2061 lsp::NumberOrString::Number(code) => code.to_string(),
2062 lsp::NumberOrString::String(code) => code.clone(),
2063 });
2064 let range = range_from_lsp(diagnostic.range);
2065 let is_supporting = diagnostic
2066 .related_information
2067 .as_ref()
2068 .map_or(false, |infos| {
2069 infos.iter().any(|info| {
2070 primary_diagnostic_group_ids.contains_key(&(
2071 source,
2072 code.clone(),
2073 range_from_lsp(info.location.range),
2074 ))
2075 })
2076 });
2077
2078 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2079 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2080 });
2081
2082 if is_supporting {
2083 supporting_diagnostics.insert(
2084 (source, code.clone(), range),
2085 (diagnostic.severity, is_unnecessary),
2086 );
2087 } else {
2088 let group_id = post_inc(&mut next_group_id);
2089 let is_disk_based = source.map_or(false, |source| {
2090 disk_based_sources.contains(&source.as_str())
2091 });
2092
2093 sources_by_group_id.insert(group_id, source);
2094 primary_diagnostic_group_ids
2095 .insert((source, code.clone(), range.clone()), group_id);
2096
2097 diagnostics.push(DiagnosticEntry {
2098 range,
2099 diagnostic: Diagnostic {
2100 code: code.clone(),
2101 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2102 message: diagnostic.message.clone(),
2103 group_id,
2104 is_primary: true,
2105 is_valid: true,
2106 is_disk_based,
2107 is_unnecessary,
2108 },
2109 });
2110 if let Some(infos) = &diagnostic.related_information {
2111 for info in infos {
2112 if info.location.uri == params.uri && !info.message.is_empty() {
2113 let range = range_from_lsp(info.location.range);
2114 diagnostics.push(DiagnosticEntry {
2115 range,
2116 diagnostic: Diagnostic {
2117 code: code.clone(),
2118 severity: DiagnosticSeverity::INFORMATION,
2119 message: info.message.clone(),
2120 group_id,
2121 is_primary: false,
2122 is_valid: true,
2123 is_disk_based,
2124 is_unnecessary: false,
2125 },
2126 });
2127 }
2128 }
2129 }
2130 }
2131 }
2132
2133 for entry in &mut diagnostics {
2134 let diagnostic = &mut entry.diagnostic;
2135 if !diagnostic.is_primary {
2136 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2137 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2138 source,
2139 diagnostic.code.clone(),
2140 entry.range.clone(),
2141 )) {
2142 if let Some(severity) = severity {
2143 diagnostic.severity = severity;
2144 }
2145 diagnostic.is_unnecessary = is_unnecessary;
2146 }
2147 }
2148 }
2149
2150 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2151 Ok(())
2152 }
2153
2154 pub fn update_diagnostic_entries(
2155 &mut self,
2156 abs_path: PathBuf,
2157 version: Option<i32>,
2158 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2159 cx: &mut ModelContext<Project>,
2160 ) -> Result<(), anyhow::Error> {
2161 let (worktree, relative_path) = self
2162 .find_local_worktree(&abs_path, cx)
2163 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2164 if !worktree.read(cx).is_visible() {
2165 return Ok(());
2166 }
2167
2168 let project_path = ProjectPath {
2169 worktree_id: worktree.read(cx).id(),
2170 path: relative_path.into(),
2171 };
2172 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2173 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2174 }
2175
2176 let updated = worktree.update(cx, |worktree, cx| {
2177 worktree
2178 .as_local_mut()
2179 .ok_or_else(|| anyhow!("not a local worktree"))?
2180 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2181 })?;
2182 if updated {
2183 cx.emit(Event::DiagnosticsUpdated(project_path));
2184 }
2185 Ok(())
2186 }
2187
2188 fn update_buffer_diagnostics(
2189 &mut self,
2190 buffer: &ModelHandle<Buffer>,
2191 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2192 version: Option<i32>,
2193 cx: &mut ModelContext<Self>,
2194 ) -> Result<()> {
2195 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2196 Ordering::Equal
2197 .then_with(|| b.is_primary.cmp(&a.is_primary))
2198 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2199 .then_with(|| a.severity.cmp(&b.severity))
2200 .then_with(|| a.message.cmp(&b.message))
2201 }
2202
2203 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2204
2205 diagnostics.sort_unstable_by(|a, b| {
2206 Ordering::Equal
2207 .then_with(|| a.range.start.cmp(&b.range.start))
2208 .then_with(|| b.range.end.cmp(&a.range.end))
2209 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2210 });
2211
2212 let mut sanitized_diagnostics = Vec::new();
2213 let edits_since_save = Patch::new(
2214 snapshot
2215 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2216 .collect(),
2217 );
2218 for entry in diagnostics {
2219 let start;
2220 let end;
2221 if entry.diagnostic.is_disk_based {
2222 // Some diagnostics are based on files on disk instead of buffers'
2223 // current contents. Adjust these diagnostics' ranges to reflect
2224 // any unsaved edits.
2225 start = edits_since_save.old_to_new(entry.range.start);
2226 end = edits_since_save.old_to_new(entry.range.end);
2227 } else {
2228 start = entry.range.start;
2229 end = entry.range.end;
2230 }
2231
2232 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2233 ..snapshot.clip_point_utf16(end, Bias::Right);
2234
2235 // Expand empty ranges by one character
2236 if range.start == range.end {
2237 range.end.column += 1;
2238 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2239 if range.start == range.end && range.end.column > 0 {
2240 range.start.column -= 1;
2241 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2242 }
2243 }
2244
2245 sanitized_diagnostics.push(DiagnosticEntry {
2246 range,
2247 diagnostic: entry.diagnostic,
2248 });
2249 }
2250 drop(edits_since_save);
2251
2252 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2253 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2254 Ok(())
2255 }
2256
2257 pub fn reload_buffers(
2258 &self,
2259 buffers: HashSet<ModelHandle<Buffer>>,
2260 push_to_history: bool,
2261 cx: &mut ModelContext<Self>,
2262 ) -> Task<Result<ProjectTransaction>> {
2263 let mut local_buffers = Vec::new();
2264 let mut remote_buffers = None;
2265 for buffer_handle in buffers {
2266 let buffer = buffer_handle.read(cx);
2267 if buffer.is_dirty() {
2268 if let Some(file) = File::from_dyn(buffer.file()) {
2269 if file.is_local() {
2270 local_buffers.push(buffer_handle);
2271 } else {
2272 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2273 }
2274 }
2275 }
2276 }
2277
2278 let remote_buffers = self.remote_id().zip(remote_buffers);
2279 let client = self.client.clone();
2280
2281 cx.spawn(|this, mut cx| async move {
2282 let mut project_transaction = ProjectTransaction::default();
2283
2284 if let Some((project_id, remote_buffers)) = remote_buffers {
2285 let response = client
2286 .request(proto::ReloadBuffers {
2287 project_id,
2288 buffer_ids: remote_buffers
2289 .iter()
2290 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2291 .collect(),
2292 })
2293 .await?
2294 .transaction
2295 .ok_or_else(|| anyhow!("missing transaction"))?;
2296 project_transaction = this
2297 .update(&mut cx, |this, cx| {
2298 this.deserialize_project_transaction(response, push_to_history, cx)
2299 })
2300 .await?;
2301 }
2302
2303 for buffer in local_buffers {
2304 let transaction = buffer
2305 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2306 .await?;
2307 buffer.update(&mut cx, |buffer, cx| {
2308 if let Some(transaction) = transaction {
2309 if !push_to_history {
2310 buffer.forget_transaction(transaction.id);
2311 }
2312 project_transaction.0.insert(cx.handle(), transaction);
2313 }
2314 });
2315 }
2316
2317 Ok(project_transaction)
2318 })
2319 }
2320
2321 pub fn format(
2322 &self,
2323 buffers: HashSet<ModelHandle<Buffer>>,
2324 push_to_history: bool,
2325 cx: &mut ModelContext<Project>,
2326 ) -> Task<Result<ProjectTransaction>> {
2327 let mut local_buffers = Vec::new();
2328 let mut remote_buffers = None;
2329 for buffer_handle in buffers {
2330 let buffer = buffer_handle.read(cx);
2331 if let Some(file) = File::from_dyn(buffer.file()) {
2332 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2333 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2334 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2335 }
2336 } else {
2337 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2338 }
2339 } else {
2340 return Task::ready(Ok(Default::default()));
2341 }
2342 }
2343
2344 let remote_buffers = self.remote_id().zip(remote_buffers);
2345 let client = self.client.clone();
2346
2347 cx.spawn(|this, mut cx| async move {
2348 let mut project_transaction = ProjectTransaction::default();
2349
2350 if let Some((project_id, remote_buffers)) = remote_buffers {
2351 let response = client
2352 .request(proto::FormatBuffers {
2353 project_id,
2354 buffer_ids: remote_buffers
2355 .iter()
2356 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2357 .collect(),
2358 })
2359 .await?
2360 .transaction
2361 .ok_or_else(|| anyhow!("missing transaction"))?;
2362 project_transaction = this
2363 .update(&mut cx, |this, cx| {
2364 this.deserialize_project_transaction(response, push_to_history, cx)
2365 })
2366 .await?;
2367 }
2368
2369 for (buffer, buffer_abs_path, language_server) in local_buffers {
2370 let text_document = lsp::TextDocumentIdentifier::new(
2371 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2372 );
2373 let capabilities = &language_server.capabilities();
2374 let tab_size = cx.update(|cx| {
2375 let language_name = buffer.read(cx).language().map(|language| language.name());
2376 cx.global::<Settings>().tab_size(language_name.as_deref())
2377 });
2378 let lsp_edits = if capabilities
2379 .document_formatting_provider
2380 .as_ref()
2381 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2382 {
2383 language_server
2384 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2385 text_document,
2386 options: lsp::FormattingOptions {
2387 tab_size,
2388 insert_spaces: true,
2389 insert_final_newline: Some(true),
2390 ..Default::default()
2391 },
2392 work_done_progress_params: Default::default(),
2393 })
2394 .await?
2395 } else if capabilities
2396 .document_range_formatting_provider
2397 .as_ref()
2398 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2399 {
2400 let buffer_start = lsp::Position::new(0, 0);
2401 let buffer_end =
2402 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2403 language_server
2404 .request::<lsp::request::RangeFormatting>(
2405 lsp::DocumentRangeFormattingParams {
2406 text_document,
2407 range: lsp::Range::new(buffer_start, buffer_end),
2408 options: lsp::FormattingOptions {
2409 tab_size: 4,
2410 insert_spaces: true,
2411 insert_final_newline: Some(true),
2412 ..Default::default()
2413 },
2414 work_done_progress_params: Default::default(),
2415 },
2416 )
2417 .await?
2418 } else {
2419 continue;
2420 };
2421
2422 if let Some(lsp_edits) = lsp_edits {
2423 let edits = this
2424 .update(&mut cx, |this, cx| {
2425 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2426 })
2427 .await?;
2428 buffer.update(&mut cx, |buffer, cx| {
2429 buffer.finalize_last_transaction();
2430 buffer.start_transaction();
2431 for (range, text) in edits {
2432 buffer.edit([(range, text)], cx);
2433 }
2434 if buffer.end_transaction(cx).is_some() {
2435 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2436 if !push_to_history {
2437 buffer.forget_transaction(transaction.id);
2438 }
2439 project_transaction.0.insert(cx.handle(), transaction);
2440 }
2441 });
2442 }
2443 }
2444
2445 Ok(project_transaction)
2446 })
2447 }
2448
2449 pub fn definition<T: ToPointUtf16>(
2450 &self,
2451 buffer: &ModelHandle<Buffer>,
2452 position: T,
2453 cx: &mut ModelContext<Self>,
2454 ) -> Task<Result<Vec<Location>>> {
2455 let position = position.to_point_utf16(buffer.read(cx));
2456 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2457 }
2458
2459 pub fn references<T: ToPointUtf16>(
2460 &self,
2461 buffer: &ModelHandle<Buffer>,
2462 position: T,
2463 cx: &mut ModelContext<Self>,
2464 ) -> Task<Result<Vec<Location>>> {
2465 let position = position.to_point_utf16(buffer.read(cx));
2466 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2467 }
2468
2469 pub fn document_highlights<T: ToPointUtf16>(
2470 &self,
2471 buffer: &ModelHandle<Buffer>,
2472 position: T,
2473 cx: &mut ModelContext<Self>,
2474 ) -> Task<Result<Vec<DocumentHighlight>>> {
2475 let position = position.to_point_utf16(buffer.read(cx));
2476
2477 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2478 }
2479
2480 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2481 if self.is_local() {
2482 let mut requests = Vec::new();
2483 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2484 let worktree_id = *worktree_id;
2485 if let Some(worktree) = self
2486 .worktree_for_id(worktree_id, cx)
2487 .and_then(|worktree| worktree.read(cx).as_local())
2488 {
2489 let lsp_adapter = lsp_adapter.clone();
2490 let worktree_abs_path = worktree.abs_path().clone();
2491 requests.push(
2492 language_server
2493 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2494 query: query.to_string(),
2495 ..Default::default()
2496 })
2497 .log_err()
2498 .map(move |response| {
2499 (
2500 lsp_adapter,
2501 worktree_id,
2502 worktree_abs_path,
2503 response.unwrap_or_default(),
2504 )
2505 }),
2506 );
2507 }
2508 }
2509
2510 cx.spawn_weak(|this, cx| async move {
2511 let responses = futures::future::join_all(requests).await;
2512 let this = if let Some(this) = this.upgrade(&cx) {
2513 this
2514 } else {
2515 return Ok(Default::default());
2516 };
2517 this.read_with(&cx, |this, cx| {
2518 let mut symbols = Vec::new();
2519 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2520 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2521 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2522 let mut worktree_id = source_worktree_id;
2523 let path;
2524 if let Some((worktree, rel_path)) =
2525 this.find_local_worktree(&abs_path, cx)
2526 {
2527 worktree_id = worktree.read(cx).id();
2528 path = rel_path;
2529 } else {
2530 path = relativize_path(&worktree_abs_path, &abs_path);
2531 }
2532
2533 let label = this
2534 .languages
2535 .select_language(&path)
2536 .and_then(|language| {
2537 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2538 })
2539 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2540 let signature = this.symbol_signature(worktree_id, &path);
2541
2542 Some(Symbol {
2543 source_worktree_id,
2544 worktree_id,
2545 language_server_name: adapter.name(),
2546 name: lsp_symbol.name,
2547 kind: lsp_symbol.kind,
2548 label,
2549 path,
2550 range: range_from_lsp(lsp_symbol.location.range),
2551 signature,
2552 })
2553 }));
2554 }
2555 Ok(symbols)
2556 })
2557 })
2558 } else if let Some(project_id) = self.remote_id() {
2559 let request = self.client.request(proto::GetProjectSymbols {
2560 project_id,
2561 query: query.to_string(),
2562 });
2563 cx.spawn_weak(|this, cx| async move {
2564 let response = request.await?;
2565 let mut symbols = Vec::new();
2566 if let Some(this) = this.upgrade(&cx) {
2567 this.read_with(&cx, |this, _| {
2568 symbols.extend(
2569 response
2570 .symbols
2571 .into_iter()
2572 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2573 );
2574 })
2575 }
2576 Ok(symbols)
2577 })
2578 } else {
2579 Task::ready(Ok(Default::default()))
2580 }
2581 }
2582
2583 pub fn open_buffer_for_symbol(
2584 &mut self,
2585 symbol: &Symbol,
2586 cx: &mut ModelContext<Self>,
2587 ) -> Task<Result<ModelHandle<Buffer>>> {
2588 if self.is_local() {
2589 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2590 symbol.source_worktree_id,
2591 symbol.language_server_name.clone(),
2592 )) {
2593 server.clone()
2594 } else {
2595 return Task::ready(Err(anyhow!(
2596 "language server for worktree and language not found"
2597 )));
2598 };
2599
2600 let worktree_abs_path = if let Some(worktree_abs_path) = self
2601 .worktree_for_id(symbol.worktree_id, cx)
2602 .and_then(|worktree| worktree.read(cx).as_local())
2603 .map(|local_worktree| local_worktree.abs_path())
2604 {
2605 worktree_abs_path
2606 } else {
2607 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2608 };
2609 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2610 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2611 uri
2612 } else {
2613 return Task::ready(Err(anyhow!("invalid symbol path")));
2614 };
2615
2616 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2617 } else if let Some(project_id) = self.remote_id() {
2618 let request = self.client.request(proto::OpenBufferForSymbol {
2619 project_id,
2620 symbol: Some(serialize_symbol(symbol)),
2621 });
2622 cx.spawn(|this, mut cx| async move {
2623 let response = request.await?;
2624 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2625 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2626 .await
2627 })
2628 } else {
2629 Task::ready(Err(anyhow!("project does not have a remote id")))
2630 }
2631 }
2632
2633 pub fn completions<T: ToPointUtf16>(
2634 &self,
2635 source_buffer_handle: &ModelHandle<Buffer>,
2636 position: T,
2637 cx: &mut ModelContext<Self>,
2638 ) -> Task<Result<Vec<Completion>>> {
2639 let source_buffer_handle = source_buffer_handle.clone();
2640 let source_buffer = source_buffer_handle.read(cx);
2641 let buffer_id = source_buffer.remote_id();
2642 let language = source_buffer.language().cloned();
2643 let worktree;
2644 let buffer_abs_path;
2645 if let Some(file) = File::from_dyn(source_buffer.file()) {
2646 worktree = file.worktree.clone();
2647 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2648 } else {
2649 return Task::ready(Ok(Default::default()));
2650 };
2651
2652 let position = position.to_point_utf16(source_buffer);
2653 let anchor = source_buffer.anchor_after(position);
2654
2655 if worktree.read(cx).as_local().is_some() {
2656 let buffer_abs_path = buffer_abs_path.unwrap();
2657 let (_, lang_server) =
2658 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2659 server.clone()
2660 } else {
2661 return Task::ready(Ok(Default::default()));
2662 };
2663
2664 cx.spawn(|_, cx| async move {
2665 let completions = lang_server
2666 .request::<lsp::request::Completion>(lsp::CompletionParams {
2667 text_document_position: lsp::TextDocumentPositionParams::new(
2668 lsp::TextDocumentIdentifier::new(
2669 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2670 ),
2671 point_to_lsp(position),
2672 ),
2673 context: Default::default(),
2674 work_done_progress_params: Default::default(),
2675 partial_result_params: Default::default(),
2676 })
2677 .await
2678 .context("lsp completion request failed")?;
2679
2680 let completions = if let Some(completions) = completions {
2681 match completions {
2682 lsp::CompletionResponse::Array(completions) => completions,
2683 lsp::CompletionResponse::List(list) => list.items,
2684 }
2685 } else {
2686 Default::default()
2687 };
2688
2689 source_buffer_handle.read_with(&cx, |this, _| {
2690 let snapshot = this.snapshot();
2691 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2692 let mut range_for_token = None;
2693 Ok(completions
2694 .into_iter()
2695 .filter_map(|lsp_completion| {
2696 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2697 // If the language server provides a range to overwrite, then
2698 // check that the range is valid.
2699 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2700 let range = range_from_lsp(edit.range);
2701 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2702 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2703 if start != range.start || end != range.end {
2704 log::info!("completion out of expected range");
2705 return None;
2706 }
2707 (
2708 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2709 edit.new_text.clone(),
2710 )
2711 }
2712 // If the language server does not provide a range, then infer
2713 // the range based on the syntax tree.
2714 None => {
2715 if position != clipped_position {
2716 log::info!("completion out of expected range");
2717 return None;
2718 }
2719 let Range { start, end } = range_for_token
2720 .get_or_insert_with(|| {
2721 let offset = position.to_offset(&snapshot);
2722 snapshot
2723 .range_for_word_token_at(offset)
2724 .unwrap_or_else(|| offset..offset)
2725 })
2726 .clone();
2727 let text = lsp_completion
2728 .insert_text
2729 .as_ref()
2730 .unwrap_or(&lsp_completion.label)
2731 .clone();
2732 (
2733 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2734 text.clone(),
2735 )
2736 }
2737 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2738 log::info!("unsupported insert/replace completion");
2739 return None;
2740 }
2741 };
2742
2743 Some(Completion {
2744 old_range,
2745 new_text,
2746 label: language
2747 .as_ref()
2748 .and_then(|l| l.label_for_completion(&lsp_completion))
2749 .unwrap_or_else(|| {
2750 CodeLabel::plain(
2751 lsp_completion.label.clone(),
2752 lsp_completion.filter_text.as_deref(),
2753 )
2754 }),
2755 lsp_completion,
2756 })
2757 })
2758 .collect())
2759 })
2760 })
2761 } else if let Some(project_id) = self.remote_id() {
2762 let rpc = self.client.clone();
2763 let message = proto::GetCompletions {
2764 project_id,
2765 buffer_id,
2766 position: Some(language::proto::serialize_anchor(&anchor)),
2767 version: serialize_version(&source_buffer.version()),
2768 };
2769 cx.spawn_weak(|_, mut cx| async move {
2770 let response = rpc.request(message).await?;
2771
2772 source_buffer_handle
2773 .update(&mut cx, |buffer, _| {
2774 buffer.wait_for_version(deserialize_version(response.version))
2775 })
2776 .await;
2777
2778 response
2779 .completions
2780 .into_iter()
2781 .map(|completion| {
2782 language::proto::deserialize_completion(completion, language.as_ref())
2783 })
2784 .collect()
2785 })
2786 } else {
2787 Task::ready(Ok(Default::default()))
2788 }
2789 }
2790
2791 pub fn apply_additional_edits_for_completion(
2792 &self,
2793 buffer_handle: ModelHandle<Buffer>,
2794 completion: Completion,
2795 push_to_history: bool,
2796 cx: &mut ModelContext<Self>,
2797 ) -> Task<Result<Option<Transaction>>> {
2798 let buffer = buffer_handle.read(cx);
2799 let buffer_id = buffer.remote_id();
2800
2801 if self.is_local() {
2802 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2803 {
2804 server.clone()
2805 } else {
2806 return Task::ready(Ok(Default::default()));
2807 };
2808
2809 cx.spawn(|this, mut cx| async move {
2810 let resolved_completion = lang_server
2811 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2812 .await?;
2813 if let Some(edits) = resolved_completion.additional_text_edits {
2814 let edits = this
2815 .update(&mut cx, |this, cx| {
2816 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2817 })
2818 .await?;
2819 buffer_handle.update(&mut cx, |buffer, cx| {
2820 buffer.finalize_last_transaction();
2821 buffer.start_transaction();
2822 for (range, text) in edits {
2823 buffer.edit([(range, text)], cx);
2824 }
2825 let transaction = if buffer.end_transaction(cx).is_some() {
2826 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2827 if !push_to_history {
2828 buffer.forget_transaction(transaction.id);
2829 }
2830 Some(transaction)
2831 } else {
2832 None
2833 };
2834 Ok(transaction)
2835 })
2836 } else {
2837 Ok(None)
2838 }
2839 })
2840 } else if let Some(project_id) = self.remote_id() {
2841 let client = self.client.clone();
2842 cx.spawn(|_, mut cx| async move {
2843 let response = client
2844 .request(proto::ApplyCompletionAdditionalEdits {
2845 project_id,
2846 buffer_id,
2847 completion: Some(language::proto::serialize_completion(&completion)),
2848 })
2849 .await?;
2850
2851 if let Some(transaction) = response.transaction {
2852 let transaction = language::proto::deserialize_transaction(transaction)?;
2853 buffer_handle
2854 .update(&mut cx, |buffer, _| {
2855 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2856 })
2857 .await;
2858 if push_to_history {
2859 buffer_handle.update(&mut cx, |buffer, _| {
2860 buffer.push_transaction(transaction.clone(), Instant::now());
2861 });
2862 }
2863 Ok(Some(transaction))
2864 } else {
2865 Ok(None)
2866 }
2867 })
2868 } else {
2869 Task::ready(Err(anyhow!("project does not have a remote id")))
2870 }
2871 }
2872
2873 pub fn code_actions<T: Clone + ToOffset>(
2874 &self,
2875 buffer_handle: &ModelHandle<Buffer>,
2876 range: Range<T>,
2877 cx: &mut ModelContext<Self>,
2878 ) -> Task<Result<Vec<CodeAction>>> {
2879 let buffer_handle = buffer_handle.clone();
2880 let buffer = buffer_handle.read(cx);
2881 let snapshot = buffer.snapshot();
2882 let relevant_diagnostics = snapshot
2883 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2884 .map(|entry| entry.to_lsp_diagnostic_stub())
2885 .collect();
2886 let buffer_id = buffer.remote_id();
2887 let worktree;
2888 let buffer_abs_path;
2889 if let Some(file) = File::from_dyn(buffer.file()) {
2890 worktree = file.worktree.clone();
2891 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2892 } else {
2893 return Task::ready(Ok(Default::default()));
2894 };
2895 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2896
2897 if worktree.read(cx).as_local().is_some() {
2898 let buffer_abs_path = buffer_abs_path.unwrap();
2899 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2900 {
2901 server.clone()
2902 } else {
2903 return Task::ready(Ok(Default::default()));
2904 };
2905
2906 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2907 cx.foreground().spawn(async move {
2908 if !lang_server.capabilities().code_action_provider.is_some() {
2909 return Ok(Default::default());
2910 }
2911
2912 Ok(lang_server
2913 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2914 text_document: lsp::TextDocumentIdentifier::new(
2915 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2916 ),
2917 range: lsp_range,
2918 work_done_progress_params: Default::default(),
2919 partial_result_params: Default::default(),
2920 context: lsp::CodeActionContext {
2921 diagnostics: relevant_diagnostics,
2922 only: Some(vec![
2923 lsp::CodeActionKind::QUICKFIX,
2924 lsp::CodeActionKind::REFACTOR,
2925 lsp::CodeActionKind::REFACTOR_EXTRACT,
2926 lsp::CodeActionKind::SOURCE,
2927 ]),
2928 },
2929 })
2930 .await?
2931 .unwrap_or_default()
2932 .into_iter()
2933 .filter_map(|entry| {
2934 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2935 Some(CodeAction {
2936 range: range.clone(),
2937 lsp_action,
2938 })
2939 } else {
2940 None
2941 }
2942 })
2943 .collect())
2944 })
2945 } else if let Some(project_id) = self.remote_id() {
2946 let rpc = self.client.clone();
2947 let version = buffer.version();
2948 cx.spawn_weak(|_, mut cx| async move {
2949 let response = rpc
2950 .request(proto::GetCodeActions {
2951 project_id,
2952 buffer_id,
2953 start: Some(language::proto::serialize_anchor(&range.start)),
2954 end: Some(language::proto::serialize_anchor(&range.end)),
2955 version: serialize_version(&version),
2956 })
2957 .await?;
2958
2959 buffer_handle
2960 .update(&mut cx, |buffer, _| {
2961 buffer.wait_for_version(deserialize_version(response.version))
2962 })
2963 .await;
2964
2965 response
2966 .actions
2967 .into_iter()
2968 .map(language::proto::deserialize_code_action)
2969 .collect()
2970 })
2971 } else {
2972 Task::ready(Ok(Default::default()))
2973 }
2974 }
2975
2976 pub fn apply_code_action(
2977 &self,
2978 buffer_handle: ModelHandle<Buffer>,
2979 mut action: CodeAction,
2980 push_to_history: bool,
2981 cx: &mut ModelContext<Self>,
2982 ) -> Task<Result<ProjectTransaction>> {
2983 if self.is_local() {
2984 let buffer = buffer_handle.read(cx);
2985 let (lsp_adapter, lang_server) =
2986 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2987 server.clone()
2988 } else {
2989 return Task::ready(Ok(Default::default()));
2990 };
2991 let range = action.range.to_point_utf16(buffer);
2992
2993 cx.spawn(|this, mut cx| async move {
2994 if let Some(lsp_range) = action
2995 .lsp_action
2996 .data
2997 .as_mut()
2998 .and_then(|d| d.get_mut("codeActionParams"))
2999 .and_then(|d| d.get_mut("range"))
3000 {
3001 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3002 action.lsp_action = lang_server
3003 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3004 .await?;
3005 } else {
3006 let actions = this
3007 .update(&mut cx, |this, cx| {
3008 this.code_actions(&buffer_handle, action.range, cx)
3009 })
3010 .await?;
3011 action.lsp_action = actions
3012 .into_iter()
3013 .find(|a| a.lsp_action.title == action.lsp_action.title)
3014 .ok_or_else(|| anyhow!("code action is outdated"))?
3015 .lsp_action;
3016 }
3017
3018 if let Some(edit) = action.lsp_action.edit {
3019 Self::deserialize_workspace_edit(
3020 this,
3021 edit,
3022 push_to_history,
3023 lsp_adapter,
3024 lang_server,
3025 &mut cx,
3026 )
3027 .await
3028 } else if let Some(command) = action.lsp_action.command {
3029 this.update(&mut cx, |this, _| {
3030 this.last_workspace_edits_by_language_server
3031 .remove(&lang_server.server_id());
3032 });
3033 lang_server
3034 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3035 command: command.command,
3036 arguments: command.arguments.unwrap_or_default(),
3037 ..Default::default()
3038 })
3039 .await?;
3040 Ok(this.update(&mut cx, |this, _| {
3041 this.last_workspace_edits_by_language_server
3042 .remove(&lang_server.server_id())
3043 .unwrap_or_default()
3044 }))
3045 } else {
3046 Ok(ProjectTransaction::default())
3047 }
3048 })
3049 } else if let Some(project_id) = self.remote_id() {
3050 let client = self.client.clone();
3051 let request = proto::ApplyCodeAction {
3052 project_id,
3053 buffer_id: buffer_handle.read(cx).remote_id(),
3054 action: Some(language::proto::serialize_code_action(&action)),
3055 };
3056 cx.spawn(|this, mut cx| async move {
3057 let response = client
3058 .request(request)
3059 .await?
3060 .transaction
3061 .ok_or_else(|| anyhow!("missing transaction"))?;
3062 this.update(&mut cx, |this, cx| {
3063 this.deserialize_project_transaction(response, push_to_history, cx)
3064 })
3065 .await
3066 })
3067 } else {
3068 Task::ready(Err(anyhow!("project does not have a remote id")))
3069 }
3070 }
3071
3072 async fn deserialize_workspace_edit(
3073 this: ModelHandle<Self>,
3074 edit: lsp::WorkspaceEdit,
3075 push_to_history: bool,
3076 lsp_adapter: Arc<dyn LspAdapter>,
3077 language_server: Arc<LanguageServer>,
3078 cx: &mut AsyncAppContext,
3079 ) -> Result<ProjectTransaction> {
3080 let fs = this.read_with(cx, |this, _| this.fs.clone());
3081 let mut operations = Vec::new();
3082 if let Some(document_changes) = edit.document_changes {
3083 match document_changes {
3084 lsp::DocumentChanges::Edits(edits) => {
3085 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3086 }
3087 lsp::DocumentChanges::Operations(ops) => operations = ops,
3088 }
3089 } else if let Some(changes) = edit.changes {
3090 operations.extend(changes.into_iter().map(|(uri, edits)| {
3091 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3092 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3093 uri,
3094 version: None,
3095 },
3096 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3097 })
3098 }));
3099 }
3100
3101 let mut project_transaction = ProjectTransaction::default();
3102 for operation in operations {
3103 match operation {
3104 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3105 let abs_path = op
3106 .uri
3107 .to_file_path()
3108 .map_err(|_| anyhow!("can't convert URI to path"))?;
3109
3110 if let Some(parent_path) = abs_path.parent() {
3111 fs.create_dir(parent_path).await?;
3112 }
3113 if abs_path.ends_with("/") {
3114 fs.create_dir(&abs_path).await?;
3115 } else {
3116 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3117 .await?;
3118 }
3119 }
3120 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3121 let source_abs_path = op
3122 .old_uri
3123 .to_file_path()
3124 .map_err(|_| anyhow!("can't convert URI to path"))?;
3125 let target_abs_path = op
3126 .new_uri
3127 .to_file_path()
3128 .map_err(|_| anyhow!("can't convert URI to path"))?;
3129 fs.rename(
3130 &source_abs_path,
3131 &target_abs_path,
3132 op.options.map(Into::into).unwrap_or_default(),
3133 )
3134 .await?;
3135 }
3136 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3137 let abs_path = op
3138 .uri
3139 .to_file_path()
3140 .map_err(|_| anyhow!("can't convert URI to path"))?;
3141 let options = op.options.map(Into::into).unwrap_or_default();
3142 if abs_path.ends_with("/") {
3143 fs.remove_dir(&abs_path, options).await?;
3144 } else {
3145 fs.remove_file(&abs_path, options).await?;
3146 }
3147 }
3148 lsp::DocumentChangeOperation::Edit(op) => {
3149 let buffer_to_edit = this
3150 .update(cx, |this, cx| {
3151 this.open_local_buffer_via_lsp(
3152 op.text_document.uri,
3153 lsp_adapter.clone(),
3154 language_server.clone(),
3155 cx,
3156 )
3157 })
3158 .await?;
3159
3160 let edits = this
3161 .update(cx, |this, cx| {
3162 let edits = op.edits.into_iter().map(|edit| match edit {
3163 lsp::OneOf::Left(edit) => edit,
3164 lsp::OneOf::Right(edit) => edit.text_edit,
3165 });
3166 this.edits_from_lsp(
3167 &buffer_to_edit,
3168 edits,
3169 op.text_document.version,
3170 cx,
3171 )
3172 })
3173 .await?;
3174
3175 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3176 buffer.finalize_last_transaction();
3177 buffer.start_transaction();
3178 for (range, text) in edits {
3179 buffer.edit([(range, text)], cx);
3180 }
3181 let transaction = if buffer.end_transaction(cx).is_some() {
3182 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3183 if !push_to_history {
3184 buffer.forget_transaction(transaction.id);
3185 }
3186 Some(transaction)
3187 } else {
3188 None
3189 };
3190
3191 transaction
3192 });
3193 if let Some(transaction) = transaction {
3194 project_transaction.0.insert(buffer_to_edit, transaction);
3195 }
3196 }
3197 }
3198 }
3199
3200 Ok(project_transaction)
3201 }
3202
3203 pub fn prepare_rename<T: ToPointUtf16>(
3204 &self,
3205 buffer: ModelHandle<Buffer>,
3206 position: T,
3207 cx: &mut ModelContext<Self>,
3208 ) -> Task<Result<Option<Range<Anchor>>>> {
3209 let position = position.to_point_utf16(buffer.read(cx));
3210 self.request_lsp(buffer, PrepareRename { position }, cx)
3211 }
3212
3213 pub fn perform_rename<T: ToPointUtf16>(
3214 &self,
3215 buffer: ModelHandle<Buffer>,
3216 position: T,
3217 new_name: String,
3218 push_to_history: bool,
3219 cx: &mut ModelContext<Self>,
3220 ) -> Task<Result<ProjectTransaction>> {
3221 let position = position.to_point_utf16(buffer.read(cx));
3222 self.request_lsp(
3223 buffer,
3224 PerformRename {
3225 position,
3226 new_name,
3227 push_to_history,
3228 },
3229 cx,
3230 )
3231 }
3232
3233 pub fn search(
3234 &self,
3235 query: SearchQuery,
3236 cx: &mut ModelContext<Self>,
3237 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3238 if self.is_local() {
3239 let snapshots = self
3240 .visible_worktrees(cx)
3241 .filter_map(|tree| {
3242 let tree = tree.read(cx).as_local()?;
3243 Some(tree.snapshot())
3244 })
3245 .collect::<Vec<_>>();
3246
3247 let background = cx.background().clone();
3248 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3249 if path_count == 0 {
3250 return Task::ready(Ok(Default::default()));
3251 }
3252 let workers = background.num_cpus().min(path_count);
3253 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3254 cx.background()
3255 .spawn({
3256 let fs = self.fs.clone();
3257 let background = cx.background().clone();
3258 let query = query.clone();
3259 async move {
3260 let fs = &fs;
3261 let query = &query;
3262 let matching_paths_tx = &matching_paths_tx;
3263 let paths_per_worker = (path_count + workers - 1) / workers;
3264 let snapshots = &snapshots;
3265 background
3266 .scoped(|scope| {
3267 for worker_ix in 0..workers {
3268 let worker_start_ix = worker_ix * paths_per_worker;
3269 let worker_end_ix = worker_start_ix + paths_per_worker;
3270 scope.spawn(async move {
3271 let mut snapshot_start_ix = 0;
3272 let mut abs_path = PathBuf::new();
3273 for snapshot in snapshots {
3274 let snapshot_end_ix =
3275 snapshot_start_ix + snapshot.visible_file_count();
3276 if worker_end_ix <= snapshot_start_ix {
3277 break;
3278 } else if worker_start_ix > snapshot_end_ix {
3279 snapshot_start_ix = snapshot_end_ix;
3280 continue;
3281 } else {
3282 let start_in_snapshot = worker_start_ix
3283 .saturating_sub(snapshot_start_ix);
3284 let end_in_snapshot =
3285 cmp::min(worker_end_ix, snapshot_end_ix)
3286 - snapshot_start_ix;
3287
3288 for entry in snapshot
3289 .files(false, start_in_snapshot)
3290 .take(end_in_snapshot - start_in_snapshot)
3291 {
3292 if matching_paths_tx.is_closed() {
3293 break;
3294 }
3295
3296 abs_path.clear();
3297 abs_path.push(&snapshot.abs_path());
3298 abs_path.push(&entry.path);
3299 let matches = if let Some(file) =
3300 fs.open_sync(&abs_path).await.log_err()
3301 {
3302 query.detect(file).unwrap_or(false)
3303 } else {
3304 false
3305 };
3306
3307 if matches {
3308 let project_path =
3309 (snapshot.id(), entry.path.clone());
3310 if matching_paths_tx
3311 .send(project_path)
3312 .await
3313 .is_err()
3314 {
3315 break;
3316 }
3317 }
3318 }
3319
3320 snapshot_start_ix = snapshot_end_ix;
3321 }
3322 }
3323 });
3324 }
3325 })
3326 .await;
3327 }
3328 })
3329 .detach();
3330
3331 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3332 let open_buffers = self
3333 .opened_buffers
3334 .values()
3335 .filter_map(|b| b.upgrade(cx))
3336 .collect::<HashSet<_>>();
3337 cx.spawn(|this, cx| async move {
3338 for buffer in &open_buffers {
3339 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3340 buffers_tx.send((buffer.clone(), snapshot)).await?;
3341 }
3342
3343 let open_buffers = Rc::new(RefCell::new(open_buffers));
3344 while let Some(project_path) = matching_paths_rx.next().await {
3345 if buffers_tx.is_closed() {
3346 break;
3347 }
3348
3349 let this = this.clone();
3350 let open_buffers = open_buffers.clone();
3351 let buffers_tx = buffers_tx.clone();
3352 cx.spawn(|mut cx| async move {
3353 if let Some(buffer) = this
3354 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3355 .await
3356 .log_err()
3357 {
3358 if open_buffers.borrow_mut().insert(buffer.clone()) {
3359 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3360 buffers_tx.send((buffer, snapshot)).await?;
3361 }
3362 }
3363
3364 Ok::<_, anyhow::Error>(())
3365 })
3366 .detach();
3367 }
3368
3369 Ok::<_, anyhow::Error>(())
3370 })
3371 .detach_and_log_err(cx);
3372
3373 let background = cx.background().clone();
3374 cx.background().spawn(async move {
3375 let query = &query;
3376 let mut matched_buffers = Vec::new();
3377 for _ in 0..workers {
3378 matched_buffers.push(HashMap::default());
3379 }
3380 background
3381 .scoped(|scope| {
3382 for worker_matched_buffers in matched_buffers.iter_mut() {
3383 let mut buffers_rx = buffers_rx.clone();
3384 scope.spawn(async move {
3385 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3386 let buffer_matches = query
3387 .search(snapshot.as_rope())
3388 .await
3389 .iter()
3390 .map(|range| {
3391 snapshot.anchor_before(range.start)
3392 ..snapshot.anchor_after(range.end)
3393 })
3394 .collect::<Vec<_>>();
3395 if !buffer_matches.is_empty() {
3396 worker_matched_buffers
3397 .insert(buffer.clone(), buffer_matches);
3398 }
3399 }
3400 });
3401 }
3402 })
3403 .await;
3404 Ok(matched_buffers.into_iter().flatten().collect())
3405 })
3406 } else if let Some(project_id) = self.remote_id() {
3407 let request = self.client.request(query.to_proto(project_id));
3408 cx.spawn(|this, mut cx| async move {
3409 let response = request.await?;
3410 let mut result = HashMap::default();
3411 for location in response.locations {
3412 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3413 let target_buffer = this
3414 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3415 .await?;
3416 let start = location
3417 .start
3418 .and_then(deserialize_anchor)
3419 .ok_or_else(|| anyhow!("missing target start"))?;
3420 let end = location
3421 .end
3422 .and_then(deserialize_anchor)
3423 .ok_or_else(|| anyhow!("missing target end"))?;
3424 result
3425 .entry(target_buffer)
3426 .or_insert(Vec::new())
3427 .push(start..end)
3428 }
3429 Ok(result)
3430 })
3431 } else {
3432 Task::ready(Ok(Default::default()))
3433 }
3434 }
3435
3436 fn request_lsp<R: LspCommand>(
3437 &self,
3438 buffer_handle: ModelHandle<Buffer>,
3439 request: R,
3440 cx: &mut ModelContext<Self>,
3441 ) -> Task<Result<R::Response>>
3442 where
3443 <R::LspRequest as lsp::request::Request>::Result: Send,
3444 {
3445 let buffer = buffer_handle.read(cx);
3446 if self.is_local() {
3447 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3448 if let Some((file, (_, language_server))) =
3449 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3450 {
3451 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3452 return cx.spawn(|this, cx| async move {
3453 if !request.check_capabilities(&language_server.capabilities()) {
3454 return Ok(Default::default());
3455 }
3456
3457 let response = language_server
3458 .request::<R::LspRequest>(lsp_params)
3459 .await
3460 .context("lsp request failed")?;
3461 request
3462 .response_from_lsp(response, this, buffer_handle, cx)
3463 .await
3464 });
3465 }
3466 } else if let Some(project_id) = self.remote_id() {
3467 let rpc = self.client.clone();
3468 let message = request.to_proto(project_id, buffer);
3469 return cx.spawn(|this, cx| async move {
3470 let response = rpc.request(message).await?;
3471 request
3472 .response_from_proto(response, this, buffer_handle, cx)
3473 .await
3474 });
3475 }
3476 Task::ready(Ok(Default::default()))
3477 }
3478
3479 pub fn find_or_create_local_worktree(
3480 &mut self,
3481 abs_path: impl AsRef<Path>,
3482 visible: bool,
3483 cx: &mut ModelContext<Self>,
3484 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3485 let abs_path = abs_path.as_ref();
3486 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3487 Task::ready(Ok((tree.clone(), relative_path.into())))
3488 } else {
3489 let worktree = self.create_local_worktree(abs_path, visible, cx);
3490 cx.foreground()
3491 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3492 }
3493 }
3494
3495 pub fn find_local_worktree(
3496 &self,
3497 abs_path: &Path,
3498 cx: &AppContext,
3499 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3500 for tree in self.worktrees(cx) {
3501 if let Some(relative_path) = tree
3502 .read(cx)
3503 .as_local()
3504 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3505 {
3506 return Some((tree.clone(), relative_path.into()));
3507 }
3508 }
3509 None
3510 }
3511
3512 pub fn is_shared(&self) -> bool {
3513 match &self.client_state {
3514 ProjectClientState::Local { is_shared, .. } => *is_shared,
3515 ProjectClientState::Remote { .. } => false,
3516 }
3517 }
3518
3519 fn create_local_worktree(
3520 &mut self,
3521 abs_path: impl AsRef<Path>,
3522 visible: bool,
3523 cx: &mut ModelContext<Self>,
3524 ) -> Task<Result<ModelHandle<Worktree>>> {
3525 let fs = self.fs.clone();
3526 let client = self.client.clone();
3527 let next_entry_id = self.next_entry_id.clone();
3528 let path: Arc<Path> = abs_path.as_ref().into();
3529 let task = self
3530 .loading_local_worktrees
3531 .entry(path.clone())
3532 .or_insert_with(|| {
3533 cx.spawn(|project, mut cx| {
3534 async move {
3535 let worktree = Worktree::local(
3536 client.clone(),
3537 path.clone(),
3538 visible,
3539 fs,
3540 next_entry_id,
3541 &mut cx,
3542 )
3543 .await;
3544 project.update(&mut cx, |project, _| {
3545 project.loading_local_worktrees.remove(&path);
3546 });
3547 let worktree = worktree?;
3548
3549 let remote_project_id = project.update(&mut cx, |project, cx| {
3550 project.add_worktree(&worktree, cx);
3551 project.remote_id()
3552 });
3553
3554 if let Some(project_id) = remote_project_id {
3555 // Because sharing is async, we may have *unshared* the project by the time it completes,
3556 // in which case we need to register the worktree instead.
3557 loop {
3558 if project.read_with(&cx, |project, _| project.is_shared()) {
3559 if worktree
3560 .update(&mut cx, |worktree, cx| {
3561 worktree.as_local_mut().unwrap().share(project_id, cx)
3562 })
3563 .await
3564 .is_ok()
3565 {
3566 break;
3567 }
3568 } else {
3569 worktree
3570 .update(&mut cx, |worktree, cx| {
3571 worktree
3572 .as_local_mut()
3573 .unwrap()
3574 .register(project_id, cx)
3575 })
3576 .await?;
3577 break;
3578 }
3579 }
3580 }
3581
3582 Ok(worktree)
3583 }
3584 .map_err(|err| Arc::new(err))
3585 })
3586 .shared()
3587 })
3588 .clone();
3589 cx.foreground().spawn(async move {
3590 match task.await {
3591 Ok(worktree) => Ok(worktree),
3592 Err(err) => Err(anyhow!("{}", err)),
3593 }
3594 })
3595 }
3596
3597 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3598 self.worktrees.retain(|worktree| {
3599 worktree
3600 .upgrade(cx)
3601 .map_or(false, |w| w.read(cx).id() != id)
3602 });
3603 cx.notify();
3604 }
3605
3606 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3607 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3608 if worktree.read(cx).is_local() {
3609 cx.subscribe(&worktree, |this, worktree, _, cx| {
3610 this.update_local_worktree_buffers(worktree, cx);
3611 })
3612 .detach();
3613 }
3614
3615 let push_strong_handle = {
3616 let worktree = worktree.read(cx);
3617 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3618 };
3619 if push_strong_handle {
3620 self.worktrees
3621 .push(WorktreeHandle::Strong(worktree.clone()));
3622 } else {
3623 cx.observe_release(&worktree, |this, _, cx| {
3624 this.worktrees
3625 .retain(|worktree| worktree.upgrade(cx).is_some());
3626 cx.notify();
3627 })
3628 .detach();
3629 self.worktrees
3630 .push(WorktreeHandle::Weak(worktree.downgrade()));
3631 }
3632 cx.notify();
3633 }
3634
3635 fn update_local_worktree_buffers(
3636 &mut self,
3637 worktree_handle: ModelHandle<Worktree>,
3638 cx: &mut ModelContext<Self>,
3639 ) {
3640 let snapshot = worktree_handle.read(cx).snapshot();
3641 let mut buffers_to_delete = Vec::new();
3642 let mut renamed_buffers = Vec::new();
3643 for (buffer_id, buffer) in &self.opened_buffers {
3644 if let Some(buffer) = buffer.upgrade(cx) {
3645 buffer.update(cx, |buffer, cx| {
3646 if let Some(old_file) = File::from_dyn(buffer.file()) {
3647 if old_file.worktree != worktree_handle {
3648 return;
3649 }
3650
3651 let new_file = if let Some(entry) = old_file
3652 .entry_id
3653 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3654 {
3655 File {
3656 is_local: true,
3657 entry_id: Some(entry.id),
3658 mtime: entry.mtime,
3659 path: entry.path.clone(),
3660 worktree: worktree_handle.clone(),
3661 }
3662 } else if let Some(entry) =
3663 snapshot.entry_for_path(old_file.path().as_ref())
3664 {
3665 File {
3666 is_local: true,
3667 entry_id: Some(entry.id),
3668 mtime: entry.mtime,
3669 path: entry.path.clone(),
3670 worktree: worktree_handle.clone(),
3671 }
3672 } else {
3673 File {
3674 is_local: true,
3675 entry_id: None,
3676 path: old_file.path().clone(),
3677 mtime: old_file.mtime(),
3678 worktree: worktree_handle.clone(),
3679 }
3680 };
3681
3682 let old_path = old_file.abs_path(cx);
3683 if new_file.abs_path(cx) != old_path {
3684 renamed_buffers.push((cx.handle(), old_path));
3685 }
3686
3687 if let Some(project_id) = self.remote_id() {
3688 self.client
3689 .send(proto::UpdateBufferFile {
3690 project_id,
3691 buffer_id: *buffer_id as u64,
3692 file: Some(new_file.to_proto()),
3693 })
3694 .log_err();
3695 }
3696 buffer.file_updated(Box::new(new_file), cx).detach();
3697 }
3698 });
3699 } else {
3700 buffers_to_delete.push(*buffer_id);
3701 }
3702 }
3703
3704 for buffer_id in buffers_to_delete {
3705 self.opened_buffers.remove(&buffer_id);
3706 }
3707
3708 for (buffer, old_path) in renamed_buffers {
3709 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3710 self.assign_language_to_buffer(&buffer, cx);
3711 self.register_buffer_with_language_server(&buffer, cx);
3712 }
3713 }
3714
3715 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3716 let new_active_entry = entry.and_then(|project_path| {
3717 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3718 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3719 Some(entry.id)
3720 });
3721 if new_active_entry != self.active_entry {
3722 self.active_entry = new_active_entry;
3723 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3724 }
3725 }
3726
3727 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3728 self.language_server_statuses
3729 .values()
3730 .any(|status| status.pending_diagnostic_updates > 0)
3731 }
3732
3733 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3734 let mut summary = DiagnosticSummary::default();
3735 for (_, path_summary) in self.diagnostic_summaries(cx) {
3736 summary.error_count += path_summary.error_count;
3737 summary.warning_count += path_summary.warning_count;
3738 }
3739 summary
3740 }
3741
3742 pub fn diagnostic_summaries<'a>(
3743 &'a self,
3744 cx: &'a AppContext,
3745 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3746 self.worktrees(cx).flat_map(move |worktree| {
3747 let worktree = worktree.read(cx);
3748 let worktree_id = worktree.id();
3749 worktree
3750 .diagnostic_summaries()
3751 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3752 })
3753 }
3754
3755 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3756 if self
3757 .language_server_statuses
3758 .values()
3759 .map(|status| status.pending_diagnostic_updates)
3760 .sum::<isize>()
3761 == 1
3762 {
3763 cx.emit(Event::DiskBasedDiagnosticsStarted);
3764 }
3765 }
3766
3767 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3768 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3769 if self
3770 .language_server_statuses
3771 .values()
3772 .map(|status| status.pending_diagnostic_updates)
3773 .sum::<isize>()
3774 == 0
3775 {
3776 cx.emit(Event::DiskBasedDiagnosticsFinished);
3777 }
3778 }
3779
3780 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3781 self.active_entry
3782 }
3783
3784 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3785 self.worktree_for_id(path.worktree_id, cx)?
3786 .read(cx)
3787 .entry_for_path(&path.path)
3788 .map(|entry| entry.id)
3789 }
3790
3791 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3792 let worktree = self.worktree_for_entry(entry_id, cx)?;
3793 let worktree = worktree.read(cx);
3794 let worktree_id = worktree.id();
3795 let path = worktree.entry_for_id(entry_id)?.path.clone();
3796 Some(ProjectPath { worktree_id, path })
3797 }
3798
3799 // RPC message handlers
3800
3801 async fn handle_request_join_project(
3802 this: ModelHandle<Self>,
3803 message: TypedEnvelope<proto::RequestJoinProject>,
3804 _: Arc<Client>,
3805 mut cx: AsyncAppContext,
3806 ) -> Result<()> {
3807 let user_id = message.payload.requester_id;
3808 if this.read_with(&cx, |project, _| {
3809 project.collaborators.values().any(|c| c.user.id == user_id)
3810 }) {
3811 this.update(&mut cx, |this, cx| {
3812 this.respond_to_join_request(user_id, true, cx)
3813 });
3814 } else {
3815 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3816 let user = user_store
3817 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3818 .await?;
3819 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3820 }
3821 Ok(())
3822 }
3823
3824 async fn handle_unregister_project(
3825 this: ModelHandle<Self>,
3826 _: TypedEnvelope<proto::UnregisterProject>,
3827 _: Arc<Client>,
3828 mut cx: AsyncAppContext,
3829 ) -> Result<()> {
3830 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3831 Ok(())
3832 }
3833
3834 async fn handle_project_unshared(
3835 this: ModelHandle<Self>,
3836 _: TypedEnvelope<proto::ProjectUnshared>,
3837 _: Arc<Client>,
3838 mut cx: AsyncAppContext,
3839 ) -> Result<()> {
3840 this.update(&mut cx, |this, cx| this.unshared(cx));
3841 Ok(())
3842 }
3843
3844 async fn handle_add_collaborator(
3845 this: ModelHandle<Self>,
3846 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3847 _: Arc<Client>,
3848 mut cx: AsyncAppContext,
3849 ) -> Result<()> {
3850 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3851 let collaborator = envelope
3852 .payload
3853 .collaborator
3854 .take()
3855 .ok_or_else(|| anyhow!("empty collaborator"))?;
3856
3857 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3858 this.update(&mut cx, |this, cx| {
3859 this.collaborators
3860 .insert(collaborator.peer_id, collaborator);
3861 cx.notify();
3862 });
3863
3864 Ok(())
3865 }
3866
3867 async fn handle_remove_collaborator(
3868 this: ModelHandle<Self>,
3869 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3870 _: Arc<Client>,
3871 mut cx: AsyncAppContext,
3872 ) -> Result<()> {
3873 this.update(&mut cx, |this, cx| {
3874 let peer_id = PeerId(envelope.payload.peer_id);
3875 let replica_id = this
3876 .collaborators
3877 .remove(&peer_id)
3878 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3879 .replica_id;
3880 for (_, buffer) in &this.opened_buffers {
3881 if let Some(buffer) = buffer.upgrade(cx) {
3882 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3883 }
3884 }
3885
3886 cx.emit(Event::CollaboratorLeft(peer_id));
3887 cx.notify();
3888 Ok(())
3889 })
3890 }
3891
3892 async fn handle_join_project_request_cancelled(
3893 this: ModelHandle<Self>,
3894 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
3895 _: Arc<Client>,
3896 mut cx: AsyncAppContext,
3897 ) -> Result<()> {
3898 let user = this
3899 .update(&mut cx, |this, cx| {
3900 this.user_store.update(cx, |user_store, cx| {
3901 user_store.fetch_user(envelope.payload.requester_id, cx)
3902 })
3903 })
3904 .await?;
3905
3906 this.update(&mut cx, |_, cx| {
3907 cx.emit(Event::ContactCancelledJoinRequest(user));
3908 });
3909
3910 Ok(())
3911 }
3912
3913 async fn handle_register_worktree(
3914 this: ModelHandle<Self>,
3915 envelope: TypedEnvelope<proto::RegisterWorktree>,
3916 client: Arc<Client>,
3917 mut cx: AsyncAppContext,
3918 ) -> Result<()> {
3919 this.update(&mut cx, |this, cx| {
3920 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3921 let replica_id = this.replica_id();
3922 let worktree = proto::Worktree {
3923 id: envelope.payload.worktree_id,
3924 root_name: envelope.payload.root_name,
3925 entries: Default::default(),
3926 diagnostic_summaries: Default::default(),
3927 visible: envelope.payload.visible,
3928 scan_id: 0,
3929 };
3930 let (worktree, load_task) =
3931 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3932 this.add_worktree(&worktree, cx);
3933 load_task.detach();
3934 Ok(())
3935 })
3936 }
3937
3938 async fn handle_unregister_worktree(
3939 this: ModelHandle<Self>,
3940 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3941 _: Arc<Client>,
3942 mut cx: AsyncAppContext,
3943 ) -> Result<()> {
3944 this.update(&mut cx, |this, cx| {
3945 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3946 this.remove_worktree(worktree_id, cx);
3947 Ok(())
3948 })
3949 }
3950
3951 async fn handle_update_worktree(
3952 this: ModelHandle<Self>,
3953 envelope: TypedEnvelope<proto::UpdateWorktree>,
3954 _: Arc<Client>,
3955 mut cx: AsyncAppContext,
3956 ) -> Result<()> {
3957 this.update(&mut cx, |this, cx| {
3958 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3959 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3960 worktree.update(cx, |worktree, _| {
3961 let worktree = worktree.as_remote_mut().unwrap();
3962 worktree.update_from_remote(envelope)
3963 })?;
3964 }
3965 Ok(())
3966 })
3967 }
3968
3969 async fn handle_create_project_entry(
3970 this: ModelHandle<Self>,
3971 envelope: TypedEnvelope<proto::CreateProjectEntry>,
3972 _: Arc<Client>,
3973 mut cx: AsyncAppContext,
3974 ) -> Result<proto::ProjectEntryResponse> {
3975 let worktree = this.update(&mut cx, |this, cx| {
3976 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3977 this.worktree_for_id(worktree_id, cx)
3978 .ok_or_else(|| anyhow!("worktree not found"))
3979 })?;
3980 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3981 let entry = worktree
3982 .update(&mut cx, |worktree, cx| {
3983 let worktree = worktree.as_local_mut().unwrap();
3984 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
3985 worktree.create_entry(path, envelope.payload.is_directory, cx)
3986 })
3987 .await?;
3988 Ok(proto::ProjectEntryResponse {
3989 entry: Some((&entry).into()),
3990 worktree_scan_id: worktree_scan_id as u64,
3991 })
3992 }
3993
3994 async fn handle_rename_project_entry(
3995 this: ModelHandle<Self>,
3996 envelope: TypedEnvelope<proto::RenameProjectEntry>,
3997 _: Arc<Client>,
3998 mut cx: AsyncAppContext,
3999 ) -> Result<proto::ProjectEntryResponse> {
4000 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4001 let worktree = this.read_with(&cx, |this, cx| {
4002 this.worktree_for_entry(entry_id, cx)
4003 .ok_or_else(|| anyhow!("worktree not found"))
4004 })?;
4005 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4006 let entry = worktree
4007 .update(&mut cx, |worktree, cx| {
4008 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4009 worktree
4010 .as_local_mut()
4011 .unwrap()
4012 .rename_entry(entry_id, new_path, cx)
4013 .ok_or_else(|| anyhow!("invalid entry"))
4014 })?
4015 .await?;
4016 Ok(proto::ProjectEntryResponse {
4017 entry: Some((&entry).into()),
4018 worktree_scan_id: worktree_scan_id as u64,
4019 })
4020 }
4021
4022 async fn handle_delete_project_entry(
4023 this: ModelHandle<Self>,
4024 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4025 _: Arc<Client>,
4026 mut cx: AsyncAppContext,
4027 ) -> Result<proto::ProjectEntryResponse> {
4028 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4029 let worktree = this.read_with(&cx, |this, cx| {
4030 this.worktree_for_entry(entry_id, cx)
4031 .ok_or_else(|| anyhow!("worktree not found"))
4032 })?;
4033 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4034 worktree
4035 .update(&mut cx, |worktree, cx| {
4036 worktree
4037 .as_local_mut()
4038 .unwrap()
4039 .delete_entry(entry_id, cx)
4040 .ok_or_else(|| anyhow!("invalid entry"))
4041 })?
4042 .await?;
4043 Ok(proto::ProjectEntryResponse {
4044 entry: None,
4045 worktree_scan_id: worktree_scan_id as u64,
4046 })
4047 }
4048
4049 async fn handle_update_diagnostic_summary(
4050 this: ModelHandle<Self>,
4051 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4052 _: Arc<Client>,
4053 mut cx: AsyncAppContext,
4054 ) -> Result<()> {
4055 this.update(&mut cx, |this, cx| {
4056 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4057 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4058 if let Some(summary) = envelope.payload.summary {
4059 let project_path = ProjectPath {
4060 worktree_id,
4061 path: Path::new(&summary.path).into(),
4062 };
4063 worktree.update(cx, |worktree, _| {
4064 worktree
4065 .as_remote_mut()
4066 .unwrap()
4067 .update_diagnostic_summary(project_path.path.clone(), &summary);
4068 });
4069 cx.emit(Event::DiagnosticsUpdated(project_path));
4070 }
4071 }
4072 Ok(())
4073 })
4074 }
4075
4076 async fn handle_start_language_server(
4077 this: ModelHandle<Self>,
4078 envelope: TypedEnvelope<proto::StartLanguageServer>,
4079 _: Arc<Client>,
4080 mut cx: AsyncAppContext,
4081 ) -> Result<()> {
4082 let server = envelope
4083 .payload
4084 .server
4085 .ok_or_else(|| anyhow!("invalid server"))?;
4086 this.update(&mut cx, |this, cx| {
4087 this.language_server_statuses.insert(
4088 server.id as usize,
4089 LanguageServerStatus {
4090 name: server.name,
4091 pending_work: Default::default(),
4092 pending_diagnostic_updates: 0,
4093 },
4094 );
4095 cx.notify();
4096 });
4097 Ok(())
4098 }
4099
4100 async fn handle_update_language_server(
4101 this: ModelHandle<Self>,
4102 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4103 _: Arc<Client>,
4104 mut cx: AsyncAppContext,
4105 ) -> Result<()> {
4106 let language_server_id = envelope.payload.language_server_id as usize;
4107 match envelope
4108 .payload
4109 .variant
4110 .ok_or_else(|| anyhow!("invalid variant"))?
4111 {
4112 proto::update_language_server::Variant::WorkStart(payload) => {
4113 this.update(&mut cx, |this, cx| {
4114 this.on_lsp_work_start(language_server_id, payload.token, cx);
4115 })
4116 }
4117 proto::update_language_server::Variant::WorkProgress(payload) => {
4118 this.update(&mut cx, |this, cx| {
4119 this.on_lsp_work_progress(
4120 language_server_id,
4121 payload.token,
4122 LanguageServerProgress {
4123 message: payload.message,
4124 percentage: payload.percentage.map(|p| p as usize),
4125 last_update_at: Instant::now(),
4126 },
4127 cx,
4128 );
4129 })
4130 }
4131 proto::update_language_server::Variant::WorkEnd(payload) => {
4132 this.update(&mut cx, |this, cx| {
4133 this.on_lsp_work_end(language_server_id, payload.token, cx);
4134 })
4135 }
4136 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4137 this.update(&mut cx, |this, cx| {
4138 this.disk_based_diagnostics_started(cx);
4139 })
4140 }
4141 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4142 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4143 }
4144 }
4145
4146 Ok(())
4147 }
4148
4149 async fn handle_update_buffer(
4150 this: ModelHandle<Self>,
4151 envelope: TypedEnvelope<proto::UpdateBuffer>,
4152 _: Arc<Client>,
4153 mut cx: AsyncAppContext,
4154 ) -> Result<()> {
4155 this.update(&mut cx, |this, cx| {
4156 let payload = envelope.payload.clone();
4157 let buffer_id = payload.buffer_id;
4158 let ops = payload
4159 .operations
4160 .into_iter()
4161 .map(|op| language::proto::deserialize_operation(op))
4162 .collect::<Result<Vec<_>, _>>()?;
4163 let is_remote = this.is_remote();
4164 match this.opened_buffers.entry(buffer_id) {
4165 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4166 OpenBuffer::Strong(buffer) => {
4167 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4168 }
4169 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4170 OpenBuffer::Weak(_) => {}
4171 },
4172 hash_map::Entry::Vacant(e) => {
4173 assert!(
4174 is_remote,
4175 "received buffer update from {:?}",
4176 envelope.original_sender_id
4177 );
4178 e.insert(OpenBuffer::Loading(ops));
4179 }
4180 }
4181 Ok(())
4182 })
4183 }
4184
4185 async fn handle_update_buffer_file(
4186 this: ModelHandle<Self>,
4187 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4188 _: Arc<Client>,
4189 mut cx: AsyncAppContext,
4190 ) -> Result<()> {
4191 this.update(&mut cx, |this, cx| {
4192 let payload = envelope.payload.clone();
4193 let buffer_id = payload.buffer_id;
4194 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4195 let worktree = this
4196 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4197 .ok_or_else(|| anyhow!("no such worktree"))?;
4198 let file = File::from_proto(file, worktree.clone(), cx)?;
4199 let buffer = this
4200 .opened_buffers
4201 .get_mut(&buffer_id)
4202 .and_then(|b| b.upgrade(cx))
4203 .ok_or_else(|| anyhow!("no such buffer"))?;
4204 buffer.update(cx, |buffer, cx| {
4205 buffer.file_updated(Box::new(file), cx).detach();
4206 });
4207 Ok(())
4208 })
4209 }
4210
4211 async fn handle_save_buffer(
4212 this: ModelHandle<Self>,
4213 envelope: TypedEnvelope<proto::SaveBuffer>,
4214 _: Arc<Client>,
4215 mut cx: AsyncAppContext,
4216 ) -> Result<proto::BufferSaved> {
4217 let buffer_id = envelope.payload.buffer_id;
4218 let requested_version = deserialize_version(envelope.payload.version);
4219
4220 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4221 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4222 let buffer = this
4223 .opened_buffers
4224 .get(&buffer_id)
4225 .and_then(|buffer| buffer.upgrade(cx))
4226 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4227 Ok::<_, anyhow::Error>((project_id, buffer))
4228 })?;
4229 buffer
4230 .update(&mut cx, |buffer, _| {
4231 buffer.wait_for_version(requested_version)
4232 })
4233 .await;
4234
4235 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4236 Ok(proto::BufferSaved {
4237 project_id,
4238 buffer_id,
4239 version: serialize_version(&saved_version),
4240 mtime: Some(mtime.into()),
4241 })
4242 }
4243
4244 async fn handle_reload_buffers(
4245 this: ModelHandle<Self>,
4246 envelope: TypedEnvelope<proto::ReloadBuffers>,
4247 _: Arc<Client>,
4248 mut cx: AsyncAppContext,
4249 ) -> Result<proto::ReloadBuffersResponse> {
4250 let sender_id = envelope.original_sender_id()?;
4251 let reload = this.update(&mut cx, |this, cx| {
4252 let mut buffers = HashSet::default();
4253 for buffer_id in &envelope.payload.buffer_ids {
4254 buffers.insert(
4255 this.opened_buffers
4256 .get(buffer_id)
4257 .and_then(|buffer| buffer.upgrade(cx))
4258 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4259 );
4260 }
4261 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4262 })?;
4263
4264 let project_transaction = reload.await?;
4265 let project_transaction = this.update(&mut cx, |this, cx| {
4266 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4267 });
4268 Ok(proto::ReloadBuffersResponse {
4269 transaction: Some(project_transaction),
4270 })
4271 }
4272
4273 async fn handle_format_buffers(
4274 this: ModelHandle<Self>,
4275 envelope: TypedEnvelope<proto::FormatBuffers>,
4276 _: Arc<Client>,
4277 mut cx: AsyncAppContext,
4278 ) -> Result<proto::FormatBuffersResponse> {
4279 let sender_id = envelope.original_sender_id()?;
4280 let format = this.update(&mut cx, |this, cx| {
4281 let mut buffers = HashSet::default();
4282 for buffer_id in &envelope.payload.buffer_ids {
4283 buffers.insert(
4284 this.opened_buffers
4285 .get(buffer_id)
4286 .and_then(|buffer| buffer.upgrade(cx))
4287 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4288 );
4289 }
4290 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4291 })?;
4292
4293 let project_transaction = format.await?;
4294 let project_transaction = this.update(&mut cx, |this, cx| {
4295 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4296 });
4297 Ok(proto::FormatBuffersResponse {
4298 transaction: Some(project_transaction),
4299 })
4300 }
4301
4302 async fn handle_get_completions(
4303 this: ModelHandle<Self>,
4304 envelope: TypedEnvelope<proto::GetCompletions>,
4305 _: Arc<Client>,
4306 mut cx: AsyncAppContext,
4307 ) -> Result<proto::GetCompletionsResponse> {
4308 let position = envelope
4309 .payload
4310 .position
4311 .and_then(language::proto::deserialize_anchor)
4312 .ok_or_else(|| anyhow!("invalid position"))?;
4313 let version = deserialize_version(envelope.payload.version);
4314 let buffer = this.read_with(&cx, |this, cx| {
4315 this.opened_buffers
4316 .get(&envelope.payload.buffer_id)
4317 .and_then(|buffer| buffer.upgrade(cx))
4318 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4319 })?;
4320 buffer
4321 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4322 .await;
4323 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4324 let completions = this
4325 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4326 .await?;
4327
4328 Ok(proto::GetCompletionsResponse {
4329 completions: completions
4330 .iter()
4331 .map(language::proto::serialize_completion)
4332 .collect(),
4333 version: serialize_version(&version),
4334 })
4335 }
4336
4337 async fn handle_apply_additional_edits_for_completion(
4338 this: ModelHandle<Self>,
4339 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4340 _: Arc<Client>,
4341 mut cx: AsyncAppContext,
4342 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4343 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4344 let buffer = this
4345 .opened_buffers
4346 .get(&envelope.payload.buffer_id)
4347 .and_then(|buffer| buffer.upgrade(cx))
4348 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4349 let language = buffer.read(cx).language();
4350 let completion = language::proto::deserialize_completion(
4351 envelope
4352 .payload
4353 .completion
4354 .ok_or_else(|| anyhow!("invalid completion"))?,
4355 language,
4356 )?;
4357 Ok::<_, anyhow::Error>(
4358 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4359 )
4360 })?;
4361
4362 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4363 transaction: apply_additional_edits
4364 .await?
4365 .as_ref()
4366 .map(language::proto::serialize_transaction),
4367 })
4368 }
4369
4370 async fn handle_get_code_actions(
4371 this: ModelHandle<Self>,
4372 envelope: TypedEnvelope<proto::GetCodeActions>,
4373 _: Arc<Client>,
4374 mut cx: AsyncAppContext,
4375 ) -> Result<proto::GetCodeActionsResponse> {
4376 let start = envelope
4377 .payload
4378 .start
4379 .and_then(language::proto::deserialize_anchor)
4380 .ok_or_else(|| anyhow!("invalid start"))?;
4381 let end = envelope
4382 .payload
4383 .end
4384 .and_then(language::proto::deserialize_anchor)
4385 .ok_or_else(|| anyhow!("invalid end"))?;
4386 let buffer = this.update(&mut cx, |this, cx| {
4387 this.opened_buffers
4388 .get(&envelope.payload.buffer_id)
4389 .and_then(|buffer| buffer.upgrade(cx))
4390 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4391 })?;
4392 buffer
4393 .update(&mut cx, |buffer, _| {
4394 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4395 })
4396 .await;
4397
4398 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4399 let code_actions = this.update(&mut cx, |this, cx| {
4400 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4401 })?;
4402
4403 Ok(proto::GetCodeActionsResponse {
4404 actions: code_actions
4405 .await?
4406 .iter()
4407 .map(language::proto::serialize_code_action)
4408 .collect(),
4409 version: serialize_version(&version),
4410 })
4411 }
4412
4413 async fn handle_apply_code_action(
4414 this: ModelHandle<Self>,
4415 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4416 _: Arc<Client>,
4417 mut cx: AsyncAppContext,
4418 ) -> Result<proto::ApplyCodeActionResponse> {
4419 let sender_id = envelope.original_sender_id()?;
4420 let action = language::proto::deserialize_code_action(
4421 envelope
4422 .payload
4423 .action
4424 .ok_or_else(|| anyhow!("invalid action"))?,
4425 )?;
4426 let apply_code_action = this.update(&mut cx, |this, cx| {
4427 let buffer = this
4428 .opened_buffers
4429 .get(&envelope.payload.buffer_id)
4430 .and_then(|buffer| buffer.upgrade(cx))
4431 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4432 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4433 })?;
4434
4435 let project_transaction = apply_code_action.await?;
4436 let project_transaction = this.update(&mut cx, |this, cx| {
4437 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4438 });
4439 Ok(proto::ApplyCodeActionResponse {
4440 transaction: Some(project_transaction),
4441 })
4442 }
4443
4444 async fn handle_lsp_command<T: LspCommand>(
4445 this: ModelHandle<Self>,
4446 envelope: TypedEnvelope<T::ProtoRequest>,
4447 _: Arc<Client>,
4448 mut cx: AsyncAppContext,
4449 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4450 where
4451 <T::LspRequest as lsp::request::Request>::Result: Send,
4452 {
4453 let sender_id = envelope.original_sender_id()?;
4454 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4455 let buffer_handle = this.read_with(&cx, |this, _| {
4456 this.opened_buffers
4457 .get(&buffer_id)
4458 .and_then(|buffer| buffer.upgrade(&cx))
4459 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4460 })?;
4461 let request = T::from_proto(
4462 envelope.payload,
4463 this.clone(),
4464 buffer_handle.clone(),
4465 cx.clone(),
4466 )
4467 .await?;
4468 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4469 let response = this
4470 .update(&mut cx, |this, cx| {
4471 this.request_lsp(buffer_handle, request, cx)
4472 })
4473 .await?;
4474 this.update(&mut cx, |this, cx| {
4475 Ok(T::response_to_proto(
4476 response,
4477 this,
4478 sender_id,
4479 &buffer_version,
4480 cx,
4481 ))
4482 })
4483 }
4484
4485 async fn handle_get_project_symbols(
4486 this: ModelHandle<Self>,
4487 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4488 _: Arc<Client>,
4489 mut cx: AsyncAppContext,
4490 ) -> Result<proto::GetProjectSymbolsResponse> {
4491 let symbols = this
4492 .update(&mut cx, |this, cx| {
4493 this.symbols(&envelope.payload.query, cx)
4494 })
4495 .await?;
4496
4497 Ok(proto::GetProjectSymbolsResponse {
4498 symbols: symbols.iter().map(serialize_symbol).collect(),
4499 })
4500 }
4501
4502 async fn handle_search_project(
4503 this: ModelHandle<Self>,
4504 envelope: TypedEnvelope<proto::SearchProject>,
4505 _: Arc<Client>,
4506 mut cx: AsyncAppContext,
4507 ) -> Result<proto::SearchProjectResponse> {
4508 let peer_id = envelope.original_sender_id()?;
4509 let query = SearchQuery::from_proto(envelope.payload)?;
4510 let result = this
4511 .update(&mut cx, |this, cx| this.search(query, cx))
4512 .await?;
4513
4514 this.update(&mut cx, |this, cx| {
4515 let mut locations = Vec::new();
4516 for (buffer, ranges) in result {
4517 for range in ranges {
4518 let start = serialize_anchor(&range.start);
4519 let end = serialize_anchor(&range.end);
4520 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4521 locations.push(proto::Location {
4522 buffer: Some(buffer),
4523 start: Some(start),
4524 end: Some(end),
4525 });
4526 }
4527 }
4528 Ok(proto::SearchProjectResponse { locations })
4529 })
4530 }
4531
4532 async fn handle_open_buffer_for_symbol(
4533 this: ModelHandle<Self>,
4534 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4535 _: Arc<Client>,
4536 mut cx: AsyncAppContext,
4537 ) -> Result<proto::OpenBufferForSymbolResponse> {
4538 let peer_id = envelope.original_sender_id()?;
4539 let symbol = envelope
4540 .payload
4541 .symbol
4542 .ok_or_else(|| anyhow!("invalid symbol"))?;
4543 let symbol = this.read_with(&cx, |this, _| {
4544 let symbol = this.deserialize_symbol(symbol)?;
4545 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4546 if signature == symbol.signature {
4547 Ok(symbol)
4548 } else {
4549 Err(anyhow!("invalid symbol signature"))
4550 }
4551 })?;
4552 let buffer = this
4553 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4554 .await?;
4555
4556 Ok(proto::OpenBufferForSymbolResponse {
4557 buffer: Some(this.update(&mut cx, |this, cx| {
4558 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4559 })),
4560 })
4561 }
4562
4563 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4564 let mut hasher = Sha256::new();
4565 hasher.update(worktree_id.to_proto().to_be_bytes());
4566 hasher.update(path.to_string_lossy().as_bytes());
4567 hasher.update(self.nonce.to_be_bytes());
4568 hasher.finalize().as_slice().try_into().unwrap()
4569 }
4570
4571 async fn handle_open_buffer_by_id(
4572 this: ModelHandle<Self>,
4573 envelope: TypedEnvelope<proto::OpenBufferById>,
4574 _: Arc<Client>,
4575 mut cx: AsyncAppContext,
4576 ) -> Result<proto::OpenBufferResponse> {
4577 let peer_id = envelope.original_sender_id()?;
4578 let buffer = this
4579 .update(&mut cx, |this, cx| {
4580 this.open_buffer_by_id(envelope.payload.id, cx)
4581 })
4582 .await?;
4583 this.update(&mut cx, |this, cx| {
4584 Ok(proto::OpenBufferResponse {
4585 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4586 })
4587 })
4588 }
4589
4590 async fn handle_open_buffer_by_path(
4591 this: ModelHandle<Self>,
4592 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4593 _: Arc<Client>,
4594 mut cx: AsyncAppContext,
4595 ) -> Result<proto::OpenBufferResponse> {
4596 let peer_id = envelope.original_sender_id()?;
4597 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4598 let open_buffer = this.update(&mut cx, |this, cx| {
4599 this.open_buffer(
4600 ProjectPath {
4601 worktree_id,
4602 path: PathBuf::from(envelope.payload.path).into(),
4603 },
4604 cx,
4605 )
4606 });
4607
4608 let buffer = open_buffer.await?;
4609 this.update(&mut cx, |this, cx| {
4610 Ok(proto::OpenBufferResponse {
4611 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4612 })
4613 })
4614 }
4615
4616 fn serialize_project_transaction_for_peer(
4617 &mut self,
4618 project_transaction: ProjectTransaction,
4619 peer_id: PeerId,
4620 cx: &AppContext,
4621 ) -> proto::ProjectTransaction {
4622 let mut serialized_transaction = proto::ProjectTransaction {
4623 buffers: Default::default(),
4624 transactions: Default::default(),
4625 };
4626 for (buffer, transaction) in project_transaction.0 {
4627 serialized_transaction
4628 .buffers
4629 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4630 serialized_transaction
4631 .transactions
4632 .push(language::proto::serialize_transaction(&transaction));
4633 }
4634 serialized_transaction
4635 }
4636
4637 fn deserialize_project_transaction(
4638 &mut self,
4639 message: proto::ProjectTransaction,
4640 push_to_history: bool,
4641 cx: &mut ModelContext<Self>,
4642 ) -> Task<Result<ProjectTransaction>> {
4643 cx.spawn(|this, mut cx| async move {
4644 let mut project_transaction = ProjectTransaction::default();
4645 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4646 let buffer = this
4647 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4648 .await?;
4649 let transaction = language::proto::deserialize_transaction(transaction)?;
4650 project_transaction.0.insert(buffer, transaction);
4651 }
4652
4653 for (buffer, transaction) in &project_transaction.0 {
4654 buffer
4655 .update(&mut cx, |buffer, _| {
4656 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4657 })
4658 .await;
4659
4660 if push_to_history {
4661 buffer.update(&mut cx, |buffer, _| {
4662 buffer.push_transaction(transaction.clone(), Instant::now());
4663 });
4664 }
4665 }
4666
4667 Ok(project_transaction)
4668 })
4669 }
4670
4671 fn serialize_buffer_for_peer(
4672 &mut self,
4673 buffer: &ModelHandle<Buffer>,
4674 peer_id: PeerId,
4675 cx: &AppContext,
4676 ) -> proto::Buffer {
4677 let buffer_id = buffer.read(cx).remote_id();
4678 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4679 if shared_buffers.insert(buffer_id) {
4680 proto::Buffer {
4681 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4682 }
4683 } else {
4684 proto::Buffer {
4685 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4686 }
4687 }
4688 }
4689
4690 fn deserialize_buffer(
4691 &mut self,
4692 buffer: proto::Buffer,
4693 cx: &mut ModelContext<Self>,
4694 ) -> Task<Result<ModelHandle<Buffer>>> {
4695 let replica_id = self.replica_id();
4696
4697 let opened_buffer_tx = self.opened_buffer.0.clone();
4698 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4699 cx.spawn(|this, mut cx| async move {
4700 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4701 proto::buffer::Variant::Id(id) => {
4702 let buffer = loop {
4703 let buffer = this.read_with(&cx, |this, cx| {
4704 this.opened_buffers
4705 .get(&id)
4706 .and_then(|buffer| buffer.upgrade(cx))
4707 });
4708 if let Some(buffer) = buffer {
4709 break buffer;
4710 }
4711 opened_buffer_rx
4712 .next()
4713 .await
4714 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4715 };
4716 Ok(buffer)
4717 }
4718 proto::buffer::Variant::State(mut buffer) => {
4719 let mut buffer_worktree = None;
4720 let mut buffer_file = None;
4721 if let Some(file) = buffer.file.take() {
4722 this.read_with(&cx, |this, cx| {
4723 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4724 let worktree =
4725 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4726 anyhow!("no worktree found for id {}", file.worktree_id)
4727 })?;
4728 buffer_file =
4729 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4730 as Box<dyn language::File>);
4731 buffer_worktree = Some(worktree);
4732 Ok::<_, anyhow::Error>(())
4733 })?;
4734 }
4735
4736 let buffer = cx.add_model(|cx| {
4737 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4738 });
4739
4740 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4741
4742 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4743 Ok(buffer)
4744 }
4745 }
4746 })
4747 }
4748
4749 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4750 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4751 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4752 let start = serialized_symbol
4753 .start
4754 .ok_or_else(|| anyhow!("invalid start"))?;
4755 let end = serialized_symbol
4756 .end
4757 .ok_or_else(|| anyhow!("invalid end"))?;
4758 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4759 let path = PathBuf::from(serialized_symbol.path);
4760 let language = self.languages.select_language(&path);
4761 Ok(Symbol {
4762 source_worktree_id,
4763 worktree_id,
4764 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4765 label: language
4766 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4767 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4768 name: serialized_symbol.name,
4769 path,
4770 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4771 kind,
4772 signature: serialized_symbol
4773 .signature
4774 .try_into()
4775 .map_err(|_| anyhow!("invalid signature"))?,
4776 })
4777 }
4778
4779 async fn handle_buffer_saved(
4780 this: ModelHandle<Self>,
4781 envelope: TypedEnvelope<proto::BufferSaved>,
4782 _: Arc<Client>,
4783 mut cx: AsyncAppContext,
4784 ) -> Result<()> {
4785 let version = deserialize_version(envelope.payload.version);
4786 let mtime = envelope
4787 .payload
4788 .mtime
4789 .ok_or_else(|| anyhow!("missing mtime"))?
4790 .into();
4791
4792 this.update(&mut cx, |this, cx| {
4793 let buffer = this
4794 .opened_buffers
4795 .get(&envelope.payload.buffer_id)
4796 .and_then(|buffer| buffer.upgrade(cx));
4797 if let Some(buffer) = buffer {
4798 buffer.update(cx, |buffer, cx| {
4799 buffer.did_save(version, mtime, None, cx);
4800 });
4801 }
4802 Ok(())
4803 })
4804 }
4805
4806 async fn handle_buffer_reloaded(
4807 this: ModelHandle<Self>,
4808 envelope: TypedEnvelope<proto::BufferReloaded>,
4809 _: Arc<Client>,
4810 mut cx: AsyncAppContext,
4811 ) -> Result<()> {
4812 let payload = envelope.payload.clone();
4813 let version = deserialize_version(payload.version);
4814 let mtime = payload
4815 .mtime
4816 .ok_or_else(|| anyhow!("missing mtime"))?
4817 .into();
4818 this.update(&mut cx, |this, cx| {
4819 let buffer = this
4820 .opened_buffers
4821 .get(&payload.buffer_id)
4822 .and_then(|buffer| buffer.upgrade(cx));
4823 if let Some(buffer) = buffer {
4824 buffer.update(cx, |buffer, cx| {
4825 buffer.did_reload(version, mtime, cx);
4826 });
4827 }
4828 Ok(())
4829 })
4830 }
4831
4832 pub fn match_paths<'a>(
4833 &self,
4834 query: &'a str,
4835 include_ignored: bool,
4836 smart_case: bool,
4837 max_results: usize,
4838 cancel_flag: &'a AtomicBool,
4839 cx: &AppContext,
4840 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4841 let worktrees = self
4842 .worktrees(cx)
4843 .filter(|worktree| worktree.read(cx).is_visible())
4844 .collect::<Vec<_>>();
4845 let include_root_name = worktrees.len() > 1;
4846 let candidate_sets = worktrees
4847 .into_iter()
4848 .map(|worktree| CandidateSet {
4849 snapshot: worktree.read(cx).snapshot(),
4850 include_ignored,
4851 include_root_name,
4852 })
4853 .collect::<Vec<_>>();
4854
4855 let background = cx.background().clone();
4856 async move {
4857 fuzzy::match_paths(
4858 candidate_sets.as_slice(),
4859 query,
4860 smart_case,
4861 max_results,
4862 cancel_flag,
4863 background,
4864 )
4865 .await
4866 }
4867 }
4868
4869 fn edits_from_lsp(
4870 &mut self,
4871 buffer: &ModelHandle<Buffer>,
4872 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4873 version: Option<i32>,
4874 cx: &mut ModelContext<Self>,
4875 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4876 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4877 cx.background().spawn(async move {
4878 let snapshot = snapshot?;
4879 let mut lsp_edits = lsp_edits
4880 .into_iter()
4881 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4882 .peekable();
4883
4884 let mut edits = Vec::new();
4885 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4886 // Combine any LSP edits that are adjacent.
4887 //
4888 // Also, combine LSP edits that are separated from each other by only
4889 // a newline. This is important because for some code actions,
4890 // Rust-analyzer rewrites the entire buffer via a series of edits that
4891 // are separated by unchanged newline characters.
4892 //
4893 // In order for the diffing logic below to work properly, any edits that
4894 // cancel each other out must be combined into one.
4895 while let Some((next_range, next_text)) = lsp_edits.peek() {
4896 if next_range.start > range.end {
4897 if next_range.start.row > range.end.row + 1
4898 || next_range.start.column > 0
4899 || snapshot.clip_point_utf16(
4900 PointUtf16::new(range.end.row, u32::MAX),
4901 Bias::Left,
4902 ) > range.end
4903 {
4904 break;
4905 }
4906 new_text.push('\n');
4907 }
4908 range.end = next_range.end;
4909 new_text.push_str(&next_text);
4910 lsp_edits.next();
4911 }
4912
4913 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4914 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4915 {
4916 return Err(anyhow!("invalid edits received from language server"));
4917 }
4918
4919 // For multiline edits, perform a diff of the old and new text so that
4920 // we can identify the changes more precisely, preserving the locations
4921 // of any anchors positioned in the unchanged regions.
4922 if range.end.row > range.start.row {
4923 let mut offset = range.start.to_offset(&snapshot);
4924 let old_text = snapshot.text_for_range(range).collect::<String>();
4925
4926 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4927 let mut moved_since_edit = true;
4928 for change in diff.iter_all_changes() {
4929 let tag = change.tag();
4930 let value = change.value();
4931 match tag {
4932 ChangeTag::Equal => {
4933 offset += value.len();
4934 moved_since_edit = true;
4935 }
4936 ChangeTag::Delete => {
4937 let start = snapshot.anchor_after(offset);
4938 let end = snapshot.anchor_before(offset + value.len());
4939 if moved_since_edit {
4940 edits.push((start..end, String::new()));
4941 } else {
4942 edits.last_mut().unwrap().0.end = end;
4943 }
4944 offset += value.len();
4945 moved_since_edit = false;
4946 }
4947 ChangeTag::Insert => {
4948 if moved_since_edit {
4949 let anchor = snapshot.anchor_after(offset);
4950 edits.push((anchor.clone()..anchor, value.to_string()));
4951 } else {
4952 edits.last_mut().unwrap().1.push_str(value);
4953 }
4954 moved_since_edit = false;
4955 }
4956 }
4957 }
4958 } else if range.end == range.start {
4959 let anchor = snapshot.anchor_after(range.start);
4960 edits.push((anchor.clone()..anchor, new_text));
4961 } else {
4962 let edit_start = snapshot.anchor_after(range.start);
4963 let edit_end = snapshot.anchor_before(range.end);
4964 edits.push((edit_start..edit_end, new_text));
4965 }
4966 }
4967
4968 Ok(edits)
4969 })
4970 }
4971
4972 fn buffer_snapshot_for_lsp_version(
4973 &mut self,
4974 buffer: &ModelHandle<Buffer>,
4975 version: Option<i32>,
4976 cx: &AppContext,
4977 ) -> Result<TextBufferSnapshot> {
4978 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4979
4980 if let Some(version) = version {
4981 let buffer_id = buffer.read(cx).remote_id();
4982 let snapshots = self
4983 .buffer_snapshots
4984 .get_mut(&buffer_id)
4985 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4986 let mut found_snapshot = None;
4987 snapshots.retain(|(snapshot_version, snapshot)| {
4988 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4989 false
4990 } else {
4991 if *snapshot_version == version {
4992 found_snapshot = Some(snapshot.clone());
4993 }
4994 true
4995 }
4996 });
4997
4998 found_snapshot.ok_or_else(|| {
4999 anyhow!(
5000 "snapshot not found for buffer {} at version {}",
5001 buffer_id,
5002 version
5003 )
5004 })
5005 } else {
5006 Ok((buffer.read(cx)).text_snapshot())
5007 }
5008 }
5009
5010 fn language_server_for_buffer(
5011 &self,
5012 buffer: &Buffer,
5013 cx: &AppContext,
5014 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5015 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5016 let worktree_id = file.worktree_id(cx);
5017 self.language_servers
5018 .get(&(worktree_id, language.lsp_adapter()?.name()))
5019 } else {
5020 None
5021 }
5022 }
5023}
5024
5025impl WorktreeHandle {
5026 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5027 match self {
5028 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5029 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5030 }
5031 }
5032}
5033
5034impl OpenBuffer {
5035 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5036 match self {
5037 OpenBuffer::Strong(handle) => Some(handle.clone()),
5038 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5039 OpenBuffer::Loading(_) => None,
5040 }
5041 }
5042}
5043
5044struct CandidateSet {
5045 snapshot: Snapshot,
5046 include_ignored: bool,
5047 include_root_name: bool,
5048}
5049
5050impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5051 type Candidates = CandidateSetIter<'a>;
5052
5053 fn id(&self) -> usize {
5054 self.snapshot.id().to_usize()
5055 }
5056
5057 fn len(&self) -> usize {
5058 if self.include_ignored {
5059 self.snapshot.file_count()
5060 } else {
5061 self.snapshot.visible_file_count()
5062 }
5063 }
5064
5065 fn prefix(&self) -> Arc<str> {
5066 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5067 self.snapshot.root_name().into()
5068 } else if self.include_root_name {
5069 format!("{}/", self.snapshot.root_name()).into()
5070 } else {
5071 "".into()
5072 }
5073 }
5074
5075 fn candidates(&'a self, start: usize) -> Self::Candidates {
5076 CandidateSetIter {
5077 traversal: self.snapshot.files(self.include_ignored, start),
5078 }
5079 }
5080}
5081
5082struct CandidateSetIter<'a> {
5083 traversal: Traversal<'a>,
5084}
5085
5086impl<'a> Iterator for CandidateSetIter<'a> {
5087 type Item = PathMatchCandidate<'a>;
5088
5089 fn next(&mut self) -> Option<Self::Item> {
5090 self.traversal.next().map(|entry| {
5091 if let EntryKind::File(char_bag) = entry.kind {
5092 PathMatchCandidate {
5093 path: &entry.path,
5094 char_bag,
5095 }
5096 } else {
5097 unreachable!()
5098 }
5099 })
5100 }
5101}
5102
5103impl Entity for Project {
5104 type Event = Event;
5105
5106 fn release(&mut self, _: &mut gpui::MutableAppContext) {
5107 match &self.client_state {
5108 ProjectClientState::Local { remote_id_rx, .. } => {
5109 if let Some(project_id) = *remote_id_rx.borrow() {
5110 self.client
5111 .send(proto::UnregisterProject { project_id })
5112 .log_err();
5113 }
5114 }
5115 ProjectClientState::Remote { remote_id, .. } => {
5116 self.client
5117 .send(proto::LeaveProject {
5118 project_id: *remote_id,
5119 })
5120 .log_err();
5121 }
5122 }
5123 }
5124
5125 fn app_will_quit(
5126 &mut self,
5127 _: &mut MutableAppContext,
5128 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5129 let shutdown_futures = self
5130 .language_servers
5131 .drain()
5132 .filter_map(|(_, (_, server))| server.shutdown())
5133 .collect::<Vec<_>>();
5134 Some(
5135 async move {
5136 futures::future::join_all(shutdown_futures).await;
5137 }
5138 .boxed(),
5139 )
5140 }
5141}
5142
5143impl Collaborator {
5144 fn from_proto(
5145 message: proto::Collaborator,
5146 user_store: &ModelHandle<UserStore>,
5147 cx: &mut AsyncAppContext,
5148 ) -> impl Future<Output = Result<Self>> {
5149 let user = user_store.update(cx, |user_store, cx| {
5150 user_store.fetch_user(message.user_id, cx)
5151 });
5152
5153 async move {
5154 Ok(Self {
5155 peer_id: PeerId(message.peer_id),
5156 user: user.await?,
5157 replica_id: message.replica_id as ReplicaId,
5158 })
5159 }
5160 }
5161}
5162
5163impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5164 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5165 Self {
5166 worktree_id,
5167 path: path.as_ref().into(),
5168 }
5169 }
5170}
5171
5172impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5173 fn from(options: lsp::CreateFileOptions) -> Self {
5174 Self {
5175 overwrite: options.overwrite.unwrap_or(false),
5176 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5177 }
5178 }
5179}
5180
5181impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5182 fn from(options: lsp::RenameFileOptions) -> Self {
5183 Self {
5184 overwrite: options.overwrite.unwrap_or(false),
5185 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5186 }
5187 }
5188}
5189
5190impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5191 fn from(options: lsp::DeleteFileOptions) -> Self {
5192 Self {
5193 recursive: options.recursive.unwrap_or(false),
5194 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5195 }
5196 }
5197}
5198
5199fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5200 proto::Symbol {
5201 source_worktree_id: symbol.source_worktree_id.to_proto(),
5202 worktree_id: symbol.worktree_id.to_proto(),
5203 language_server_name: symbol.language_server_name.0.to_string(),
5204 name: symbol.name.clone(),
5205 kind: unsafe { mem::transmute(symbol.kind) },
5206 path: symbol.path.to_string_lossy().to_string(),
5207 start: Some(proto::Point {
5208 row: symbol.range.start.row,
5209 column: symbol.range.start.column,
5210 }),
5211 end: Some(proto::Point {
5212 row: symbol.range.end.row,
5213 column: symbol.range.end.column,
5214 }),
5215 signature: symbol.signature.to_vec(),
5216 }
5217}
5218
5219fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5220 let mut path_components = path.components();
5221 let mut base_components = base.components();
5222 let mut components: Vec<Component> = Vec::new();
5223 loop {
5224 match (path_components.next(), base_components.next()) {
5225 (None, None) => break,
5226 (Some(a), None) => {
5227 components.push(a);
5228 components.extend(path_components.by_ref());
5229 break;
5230 }
5231 (None, _) => components.push(Component::ParentDir),
5232 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5233 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5234 (Some(a), Some(_)) => {
5235 components.push(Component::ParentDir);
5236 for _ in base_components {
5237 components.push(Component::ParentDir);
5238 }
5239 components.push(a);
5240 components.extend(path_components.by_ref());
5241 break;
5242 }
5243 }
5244 }
5245 components.iter().map(|c| c.as_os_str()).collect()
5246}
5247
5248impl Item for Buffer {
5249 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5250 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5251 }
5252}
5253
5254#[cfg(test)]
5255mod tests {
5256 use crate::worktree::WorktreeHandle;
5257
5258 use super::{Event, *};
5259 use fs::RealFs;
5260 use futures::{future, StreamExt};
5261 use gpui::test::subscribe;
5262 use language::{
5263 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5264 OffsetRangeExt, Point, ToPoint,
5265 };
5266 use lsp::Url;
5267 use serde_json::json;
5268 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5269 use unindent::Unindent as _;
5270 use util::{assert_set_eq, test::temp_tree};
5271
5272 #[gpui::test]
5273 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5274 let dir = temp_tree(json!({
5275 "root": {
5276 "apple": "",
5277 "banana": {
5278 "carrot": {
5279 "date": "",
5280 "endive": "",
5281 }
5282 },
5283 "fennel": {
5284 "grape": "",
5285 }
5286 }
5287 }));
5288
5289 let root_link_path = dir.path().join("root_link");
5290 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5291 unix::fs::symlink(
5292 &dir.path().join("root/fennel"),
5293 &dir.path().join("root/finnochio"),
5294 )
5295 .unwrap();
5296
5297 let project = Project::test(Arc::new(RealFs), [root_link_path], cx).await;
5298
5299 project.read_with(cx, |project, cx| {
5300 let tree = project.worktrees(cx).next().unwrap().read(cx);
5301 assert_eq!(tree.file_count(), 5);
5302 assert_eq!(
5303 tree.inode_for_path("fennel/grape"),
5304 tree.inode_for_path("finnochio/grape")
5305 );
5306 });
5307
5308 let cancel_flag = Default::default();
5309 let results = project
5310 .read_with(cx, |project, cx| {
5311 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5312 })
5313 .await;
5314 assert_eq!(
5315 results
5316 .into_iter()
5317 .map(|result| result.path)
5318 .collect::<Vec<Arc<Path>>>(),
5319 vec![
5320 PathBuf::from("banana/carrot/date").into(),
5321 PathBuf::from("banana/carrot/endive").into(),
5322 ]
5323 );
5324 }
5325
5326 #[gpui::test]
5327 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5328 cx.foreground().forbid_parking();
5329
5330 let mut rust_language = Language::new(
5331 LanguageConfig {
5332 name: "Rust".into(),
5333 path_suffixes: vec!["rs".to_string()],
5334 ..Default::default()
5335 },
5336 Some(tree_sitter_rust::language()),
5337 );
5338 let mut json_language = Language::new(
5339 LanguageConfig {
5340 name: "JSON".into(),
5341 path_suffixes: vec!["json".to_string()],
5342 ..Default::default()
5343 },
5344 None,
5345 );
5346 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5347 name: "the-rust-language-server",
5348 capabilities: lsp::ServerCapabilities {
5349 completion_provider: Some(lsp::CompletionOptions {
5350 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5351 ..Default::default()
5352 }),
5353 ..Default::default()
5354 },
5355 ..Default::default()
5356 });
5357 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5358 name: "the-json-language-server",
5359 capabilities: lsp::ServerCapabilities {
5360 completion_provider: Some(lsp::CompletionOptions {
5361 trigger_characters: Some(vec![":".to_string()]),
5362 ..Default::default()
5363 }),
5364 ..Default::default()
5365 },
5366 ..Default::default()
5367 });
5368
5369 let fs = FakeFs::new(cx.background());
5370 fs.insert_tree(
5371 "/the-root",
5372 json!({
5373 "test.rs": "const A: i32 = 1;",
5374 "test2.rs": "",
5375 "Cargo.toml": "a = 1",
5376 "package.json": "{\"a\": 1}",
5377 }),
5378 )
5379 .await;
5380
5381 let project = Project::test(fs.clone(), ["/the-root"], cx).await;
5382 project.update(cx, |project, _| {
5383 project.languages.add(Arc::new(rust_language));
5384 project.languages.add(Arc::new(json_language));
5385 });
5386
5387 // Open a buffer without an associated language server.
5388 let toml_buffer = project
5389 .update(cx, |project, cx| {
5390 project.open_local_buffer("/the-root/Cargo.toml", cx)
5391 })
5392 .await
5393 .unwrap();
5394
5395 // Open a buffer with an associated language server.
5396 let rust_buffer = project
5397 .update(cx, |project, cx| {
5398 project.open_local_buffer("/the-root/test.rs", cx)
5399 })
5400 .await
5401 .unwrap();
5402
5403 // A server is started up, and it is notified about Rust files.
5404 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5405 assert_eq!(
5406 fake_rust_server
5407 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5408 .await
5409 .text_document,
5410 lsp::TextDocumentItem {
5411 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5412 version: 0,
5413 text: "const A: i32 = 1;".to_string(),
5414 language_id: Default::default()
5415 }
5416 );
5417
5418 // The buffer is configured based on the language server's capabilities.
5419 rust_buffer.read_with(cx, |buffer, _| {
5420 assert_eq!(
5421 buffer.completion_triggers(),
5422 &[".".to_string(), "::".to_string()]
5423 );
5424 });
5425 toml_buffer.read_with(cx, |buffer, _| {
5426 assert!(buffer.completion_triggers().is_empty());
5427 });
5428
5429 // Edit a buffer. The changes are reported to the language server.
5430 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5431 assert_eq!(
5432 fake_rust_server
5433 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5434 .await
5435 .text_document,
5436 lsp::VersionedTextDocumentIdentifier::new(
5437 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5438 1
5439 )
5440 );
5441
5442 // Open a third buffer with a different associated language server.
5443 let json_buffer = project
5444 .update(cx, |project, cx| {
5445 project.open_local_buffer("/the-root/package.json", cx)
5446 })
5447 .await
5448 .unwrap();
5449
5450 // A json language server is started up and is only notified about the json buffer.
5451 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5452 assert_eq!(
5453 fake_json_server
5454 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5455 .await
5456 .text_document,
5457 lsp::TextDocumentItem {
5458 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5459 version: 0,
5460 text: "{\"a\": 1}".to_string(),
5461 language_id: Default::default()
5462 }
5463 );
5464
5465 // This buffer is configured based on the second language server's
5466 // capabilities.
5467 json_buffer.read_with(cx, |buffer, _| {
5468 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5469 });
5470
5471 // When opening another buffer whose language server is already running,
5472 // it is also configured based on the existing language server's capabilities.
5473 let rust_buffer2 = project
5474 .update(cx, |project, cx| {
5475 project.open_local_buffer("/the-root/test2.rs", cx)
5476 })
5477 .await
5478 .unwrap();
5479 rust_buffer2.read_with(cx, |buffer, _| {
5480 assert_eq!(
5481 buffer.completion_triggers(),
5482 &[".".to_string(), "::".to_string()]
5483 );
5484 });
5485
5486 // Changes are reported only to servers matching the buffer's language.
5487 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5488 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5489 assert_eq!(
5490 fake_rust_server
5491 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5492 .await
5493 .text_document,
5494 lsp::VersionedTextDocumentIdentifier::new(
5495 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5496 1
5497 )
5498 );
5499
5500 // Save notifications are reported to all servers.
5501 toml_buffer
5502 .update(cx, |buffer, cx| buffer.save(cx))
5503 .await
5504 .unwrap();
5505 assert_eq!(
5506 fake_rust_server
5507 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5508 .await
5509 .text_document,
5510 lsp::TextDocumentIdentifier::new(
5511 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5512 )
5513 );
5514 assert_eq!(
5515 fake_json_server
5516 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5517 .await
5518 .text_document,
5519 lsp::TextDocumentIdentifier::new(
5520 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5521 )
5522 );
5523
5524 // Renames are reported only to servers matching the buffer's language.
5525 fs.rename(
5526 Path::new("/the-root/test2.rs"),
5527 Path::new("/the-root/test3.rs"),
5528 Default::default(),
5529 )
5530 .await
5531 .unwrap();
5532 assert_eq!(
5533 fake_rust_server
5534 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5535 .await
5536 .text_document,
5537 lsp::TextDocumentIdentifier::new(
5538 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5539 ),
5540 );
5541 assert_eq!(
5542 fake_rust_server
5543 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5544 .await
5545 .text_document,
5546 lsp::TextDocumentItem {
5547 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5548 version: 0,
5549 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5550 language_id: Default::default()
5551 },
5552 );
5553
5554 rust_buffer2.update(cx, |buffer, cx| {
5555 buffer.update_diagnostics(
5556 DiagnosticSet::from_sorted_entries(
5557 vec![DiagnosticEntry {
5558 diagnostic: Default::default(),
5559 range: Anchor::MIN..Anchor::MAX,
5560 }],
5561 &buffer.snapshot(),
5562 ),
5563 cx,
5564 );
5565 assert_eq!(
5566 buffer
5567 .snapshot()
5568 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5569 .count(),
5570 1
5571 );
5572 });
5573
5574 // When the rename changes the extension of the file, the buffer gets closed on the old
5575 // language server and gets opened on the new one.
5576 fs.rename(
5577 Path::new("/the-root/test3.rs"),
5578 Path::new("/the-root/test3.json"),
5579 Default::default(),
5580 )
5581 .await
5582 .unwrap();
5583 assert_eq!(
5584 fake_rust_server
5585 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5586 .await
5587 .text_document,
5588 lsp::TextDocumentIdentifier::new(
5589 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5590 ),
5591 );
5592 assert_eq!(
5593 fake_json_server
5594 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5595 .await
5596 .text_document,
5597 lsp::TextDocumentItem {
5598 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5599 version: 0,
5600 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5601 language_id: Default::default()
5602 },
5603 );
5604
5605 // We clear the diagnostics, since the language has changed.
5606 rust_buffer2.read_with(cx, |buffer, _| {
5607 assert_eq!(
5608 buffer
5609 .snapshot()
5610 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5611 .count(),
5612 0
5613 );
5614 });
5615
5616 // The renamed file's version resets after changing language server.
5617 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5618 assert_eq!(
5619 fake_json_server
5620 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5621 .await
5622 .text_document,
5623 lsp::VersionedTextDocumentIdentifier::new(
5624 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5625 1
5626 )
5627 );
5628
5629 // Restart language servers
5630 project.update(cx, |project, cx| {
5631 project.restart_language_servers_for_buffers(
5632 vec![rust_buffer.clone(), json_buffer.clone()],
5633 cx,
5634 );
5635 });
5636
5637 let mut rust_shutdown_requests = fake_rust_server
5638 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5639 let mut json_shutdown_requests = fake_json_server
5640 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5641 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5642
5643 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5644 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5645
5646 // Ensure rust document is reopened in new rust language server
5647 assert_eq!(
5648 fake_rust_server
5649 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5650 .await
5651 .text_document,
5652 lsp::TextDocumentItem {
5653 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5654 version: 1,
5655 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5656 language_id: Default::default()
5657 }
5658 );
5659
5660 // Ensure json documents are reopened in new json language server
5661 assert_set_eq!(
5662 [
5663 fake_json_server
5664 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5665 .await
5666 .text_document,
5667 fake_json_server
5668 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5669 .await
5670 .text_document,
5671 ],
5672 [
5673 lsp::TextDocumentItem {
5674 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5675 version: 0,
5676 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5677 language_id: Default::default()
5678 },
5679 lsp::TextDocumentItem {
5680 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5681 version: 1,
5682 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5683 language_id: Default::default()
5684 }
5685 ]
5686 );
5687
5688 // Close notifications are reported only to servers matching the buffer's language.
5689 cx.update(|_| drop(json_buffer));
5690 let close_message = lsp::DidCloseTextDocumentParams {
5691 text_document: lsp::TextDocumentIdentifier::new(
5692 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5693 ),
5694 };
5695 assert_eq!(
5696 fake_json_server
5697 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5698 .await,
5699 close_message,
5700 );
5701 }
5702
5703 #[gpui::test]
5704 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5705 cx.foreground().forbid_parking();
5706
5707 let fs = FakeFs::new(cx.background());
5708 fs.insert_tree(
5709 "/dir",
5710 json!({
5711 "a.rs": "let a = 1;",
5712 "b.rs": "let b = 2;"
5713 }),
5714 )
5715 .await;
5716
5717 let project = Project::test(fs, ["/dir/a.rs", "/dir/b.rs"], cx).await;
5718
5719 let buffer_a = project
5720 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5721 .await
5722 .unwrap();
5723 let buffer_b = project
5724 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5725 .await
5726 .unwrap();
5727
5728 project.update(cx, |project, cx| {
5729 project
5730 .update_diagnostics(
5731 lsp::PublishDiagnosticsParams {
5732 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5733 version: None,
5734 diagnostics: vec![lsp::Diagnostic {
5735 range: lsp::Range::new(
5736 lsp::Position::new(0, 4),
5737 lsp::Position::new(0, 5),
5738 ),
5739 severity: Some(lsp::DiagnosticSeverity::ERROR),
5740 message: "error 1".to_string(),
5741 ..Default::default()
5742 }],
5743 },
5744 &[],
5745 cx,
5746 )
5747 .unwrap();
5748 project
5749 .update_diagnostics(
5750 lsp::PublishDiagnosticsParams {
5751 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5752 version: None,
5753 diagnostics: vec![lsp::Diagnostic {
5754 range: lsp::Range::new(
5755 lsp::Position::new(0, 4),
5756 lsp::Position::new(0, 5),
5757 ),
5758 severity: Some(lsp::DiagnosticSeverity::WARNING),
5759 message: "error 2".to_string(),
5760 ..Default::default()
5761 }],
5762 },
5763 &[],
5764 cx,
5765 )
5766 .unwrap();
5767 });
5768
5769 buffer_a.read_with(cx, |buffer, _| {
5770 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5771 assert_eq!(
5772 chunks
5773 .iter()
5774 .map(|(s, d)| (s.as_str(), *d))
5775 .collect::<Vec<_>>(),
5776 &[
5777 ("let ", None),
5778 ("a", Some(DiagnosticSeverity::ERROR)),
5779 (" = 1;", None),
5780 ]
5781 );
5782 });
5783 buffer_b.read_with(cx, |buffer, _| {
5784 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5785 assert_eq!(
5786 chunks
5787 .iter()
5788 .map(|(s, d)| (s.as_str(), *d))
5789 .collect::<Vec<_>>(),
5790 &[
5791 ("let ", None),
5792 ("b", Some(DiagnosticSeverity::WARNING)),
5793 (" = 2;", None),
5794 ]
5795 );
5796 });
5797 }
5798
5799 #[gpui::test]
5800 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5801 cx.foreground().forbid_parking();
5802
5803 let progress_token = "the-progress-token";
5804 let mut language = Language::new(
5805 LanguageConfig {
5806 name: "Rust".into(),
5807 path_suffixes: vec!["rs".to_string()],
5808 ..Default::default()
5809 },
5810 Some(tree_sitter_rust::language()),
5811 );
5812 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5813 disk_based_diagnostics_progress_token: Some(progress_token),
5814 disk_based_diagnostics_sources: &["disk"],
5815 ..Default::default()
5816 });
5817
5818 let fs = FakeFs::new(cx.background());
5819 fs.insert_tree(
5820 "/dir",
5821 json!({
5822 "a.rs": "fn a() { A }",
5823 "b.rs": "const y: i32 = 1",
5824 }),
5825 )
5826 .await;
5827
5828 let project = Project::test(fs, ["/dir"], cx).await;
5829 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5830 let worktree_id =
5831 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5832
5833 // Cause worktree to start the fake language server
5834 let _buffer = project
5835 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5836 .await
5837 .unwrap();
5838
5839 let mut events = subscribe(&project, cx);
5840
5841 let mut fake_server = fake_servers.next().await.unwrap();
5842 fake_server.start_progress(progress_token).await;
5843 assert_eq!(
5844 events.next().await.unwrap(),
5845 Event::DiskBasedDiagnosticsStarted
5846 );
5847
5848 fake_server.start_progress(progress_token).await;
5849 fake_server.end_progress(progress_token).await;
5850 fake_server.start_progress(progress_token).await;
5851
5852 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5853 lsp::PublishDiagnosticsParams {
5854 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5855 version: None,
5856 diagnostics: vec![lsp::Diagnostic {
5857 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5858 severity: Some(lsp::DiagnosticSeverity::ERROR),
5859 message: "undefined variable 'A'".to_string(),
5860 ..Default::default()
5861 }],
5862 },
5863 );
5864 assert_eq!(
5865 events.next().await.unwrap(),
5866 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5867 );
5868
5869 fake_server.end_progress(progress_token).await;
5870 fake_server.end_progress(progress_token).await;
5871 assert_eq!(
5872 events.next().await.unwrap(),
5873 Event::DiskBasedDiagnosticsUpdated
5874 );
5875 assert_eq!(
5876 events.next().await.unwrap(),
5877 Event::DiskBasedDiagnosticsFinished
5878 );
5879
5880 let buffer = project
5881 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5882 .await
5883 .unwrap();
5884
5885 buffer.read_with(cx, |buffer, _| {
5886 let snapshot = buffer.snapshot();
5887 let diagnostics = snapshot
5888 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5889 .collect::<Vec<_>>();
5890 assert_eq!(
5891 diagnostics,
5892 &[DiagnosticEntry {
5893 range: Point::new(0, 9)..Point::new(0, 10),
5894 diagnostic: Diagnostic {
5895 severity: lsp::DiagnosticSeverity::ERROR,
5896 message: "undefined variable 'A'".to_string(),
5897 group_id: 0,
5898 is_primary: true,
5899 ..Default::default()
5900 }
5901 }]
5902 )
5903 });
5904
5905 // Ensure publishing empty diagnostics twice only results in one update event.
5906 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5907 lsp::PublishDiagnosticsParams {
5908 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5909 version: None,
5910 diagnostics: Default::default(),
5911 },
5912 );
5913 assert_eq!(
5914 events.next().await.unwrap(),
5915 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5916 );
5917
5918 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5919 lsp::PublishDiagnosticsParams {
5920 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5921 version: None,
5922 diagnostics: Default::default(),
5923 },
5924 );
5925 cx.foreground().run_until_parked();
5926 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5927 }
5928
5929 #[gpui::test]
5930 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5931 cx.foreground().forbid_parking();
5932
5933 let progress_token = "the-progress-token";
5934 let mut language = Language::new(
5935 LanguageConfig {
5936 path_suffixes: vec!["rs".to_string()],
5937 ..Default::default()
5938 },
5939 None,
5940 );
5941 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5942 disk_based_diagnostics_sources: &["disk"],
5943 disk_based_diagnostics_progress_token: Some(progress_token),
5944 ..Default::default()
5945 });
5946
5947 let fs = FakeFs::new(cx.background());
5948 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5949
5950 let project = Project::test(fs, ["/dir"], cx).await;
5951 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5952
5953 let buffer = project
5954 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5955 .await
5956 .unwrap();
5957
5958 // Simulate diagnostics starting to update.
5959 let mut fake_server = fake_servers.next().await.unwrap();
5960 fake_server.start_progress(progress_token).await;
5961
5962 // Restart the server before the diagnostics finish updating.
5963 project.update(cx, |project, cx| {
5964 project.restart_language_servers_for_buffers([buffer], cx);
5965 });
5966 let mut events = subscribe(&project, cx);
5967
5968 // Simulate the newly started server sending more diagnostics.
5969 let mut fake_server = fake_servers.next().await.unwrap();
5970 fake_server.start_progress(progress_token).await;
5971 assert_eq!(
5972 events.next().await.unwrap(),
5973 Event::DiskBasedDiagnosticsStarted
5974 );
5975
5976 // All diagnostics are considered done, despite the old server's diagnostic
5977 // task never completing.
5978 fake_server.end_progress(progress_token).await;
5979 assert_eq!(
5980 events.next().await.unwrap(),
5981 Event::DiskBasedDiagnosticsUpdated
5982 );
5983 assert_eq!(
5984 events.next().await.unwrap(),
5985 Event::DiskBasedDiagnosticsFinished
5986 );
5987 project.read_with(cx, |project, _| {
5988 assert!(!project.is_running_disk_based_diagnostics());
5989 });
5990 }
5991
5992 #[gpui::test]
5993 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5994 cx.foreground().forbid_parking();
5995
5996 let mut language = Language::new(
5997 LanguageConfig {
5998 name: "Rust".into(),
5999 path_suffixes: vec!["rs".to_string()],
6000 ..Default::default()
6001 },
6002 Some(tree_sitter_rust::language()),
6003 );
6004 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6005 disk_based_diagnostics_sources: &["disk"],
6006 ..Default::default()
6007 });
6008
6009 let text = "
6010 fn a() { A }
6011 fn b() { BB }
6012 fn c() { CCC }
6013 "
6014 .unindent();
6015
6016 let fs = FakeFs::new(cx.background());
6017 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6018
6019 let project = Project::test(fs, ["/dir"], cx).await;
6020 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6021
6022 let buffer = project
6023 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6024 .await
6025 .unwrap();
6026
6027 let mut fake_server = fake_servers.next().await.unwrap();
6028 let open_notification = fake_server
6029 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6030 .await;
6031
6032 // Edit the buffer, moving the content down
6033 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6034 let change_notification_1 = fake_server
6035 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6036 .await;
6037 assert!(
6038 change_notification_1.text_document.version > open_notification.text_document.version
6039 );
6040
6041 // Report some diagnostics for the initial version of the buffer
6042 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6043 lsp::PublishDiagnosticsParams {
6044 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6045 version: Some(open_notification.text_document.version),
6046 diagnostics: vec![
6047 lsp::Diagnostic {
6048 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6049 severity: Some(DiagnosticSeverity::ERROR),
6050 message: "undefined variable 'A'".to_string(),
6051 source: Some("disk".to_string()),
6052 ..Default::default()
6053 },
6054 lsp::Diagnostic {
6055 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6056 severity: Some(DiagnosticSeverity::ERROR),
6057 message: "undefined variable 'BB'".to_string(),
6058 source: Some("disk".to_string()),
6059 ..Default::default()
6060 },
6061 lsp::Diagnostic {
6062 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6063 severity: Some(DiagnosticSeverity::ERROR),
6064 source: Some("disk".to_string()),
6065 message: "undefined variable 'CCC'".to_string(),
6066 ..Default::default()
6067 },
6068 ],
6069 },
6070 );
6071
6072 // The diagnostics have moved down since they were created.
6073 buffer.next_notification(cx).await;
6074 buffer.read_with(cx, |buffer, _| {
6075 assert_eq!(
6076 buffer
6077 .snapshot()
6078 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6079 .collect::<Vec<_>>(),
6080 &[
6081 DiagnosticEntry {
6082 range: Point::new(3, 9)..Point::new(3, 11),
6083 diagnostic: Diagnostic {
6084 severity: DiagnosticSeverity::ERROR,
6085 message: "undefined variable 'BB'".to_string(),
6086 is_disk_based: true,
6087 group_id: 1,
6088 is_primary: true,
6089 ..Default::default()
6090 },
6091 },
6092 DiagnosticEntry {
6093 range: Point::new(4, 9)..Point::new(4, 12),
6094 diagnostic: Diagnostic {
6095 severity: DiagnosticSeverity::ERROR,
6096 message: "undefined variable 'CCC'".to_string(),
6097 is_disk_based: true,
6098 group_id: 2,
6099 is_primary: true,
6100 ..Default::default()
6101 }
6102 }
6103 ]
6104 );
6105 assert_eq!(
6106 chunks_with_diagnostics(buffer, 0..buffer.len()),
6107 [
6108 ("\n\nfn a() { ".to_string(), None),
6109 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6110 (" }\nfn b() { ".to_string(), None),
6111 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6112 (" }\nfn c() { ".to_string(), None),
6113 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6114 (" }\n".to_string(), None),
6115 ]
6116 );
6117 assert_eq!(
6118 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6119 [
6120 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6121 (" }\nfn c() { ".to_string(), None),
6122 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6123 ]
6124 );
6125 });
6126
6127 // Ensure overlapping diagnostics are highlighted correctly.
6128 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6129 lsp::PublishDiagnosticsParams {
6130 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6131 version: Some(open_notification.text_document.version),
6132 diagnostics: vec![
6133 lsp::Diagnostic {
6134 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6135 severity: Some(DiagnosticSeverity::ERROR),
6136 message: "undefined variable 'A'".to_string(),
6137 source: Some("disk".to_string()),
6138 ..Default::default()
6139 },
6140 lsp::Diagnostic {
6141 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6142 severity: Some(DiagnosticSeverity::WARNING),
6143 message: "unreachable statement".to_string(),
6144 source: Some("disk".to_string()),
6145 ..Default::default()
6146 },
6147 ],
6148 },
6149 );
6150
6151 buffer.next_notification(cx).await;
6152 buffer.read_with(cx, |buffer, _| {
6153 assert_eq!(
6154 buffer
6155 .snapshot()
6156 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6157 .collect::<Vec<_>>(),
6158 &[
6159 DiagnosticEntry {
6160 range: Point::new(2, 9)..Point::new(2, 12),
6161 diagnostic: Diagnostic {
6162 severity: DiagnosticSeverity::WARNING,
6163 message: "unreachable statement".to_string(),
6164 is_disk_based: true,
6165 group_id: 1,
6166 is_primary: true,
6167 ..Default::default()
6168 }
6169 },
6170 DiagnosticEntry {
6171 range: Point::new(2, 9)..Point::new(2, 10),
6172 diagnostic: Diagnostic {
6173 severity: DiagnosticSeverity::ERROR,
6174 message: "undefined variable 'A'".to_string(),
6175 is_disk_based: true,
6176 group_id: 0,
6177 is_primary: true,
6178 ..Default::default()
6179 },
6180 }
6181 ]
6182 );
6183 assert_eq!(
6184 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6185 [
6186 ("fn a() { ".to_string(), None),
6187 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6188 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6189 ("\n".to_string(), None),
6190 ]
6191 );
6192 assert_eq!(
6193 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6194 [
6195 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6196 ("\n".to_string(), None),
6197 ]
6198 );
6199 });
6200
6201 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6202 // changes since the last save.
6203 buffer.update(cx, |buffer, cx| {
6204 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6205 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6206 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6207 });
6208 let change_notification_2 = fake_server
6209 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6210 .await;
6211 assert!(
6212 change_notification_2.text_document.version
6213 > change_notification_1.text_document.version
6214 );
6215
6216 // Handle out-of-order diagnostics
6217 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6218 lsp::PublishDiagnosticsParams {
6219 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6220 version: Some(change_notification_2.text_document.version),
6221 diagnostics: vec![
6222 lsp::Diagnostic {
6223 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6224 severity: Some(DiagnosticSeverity::ERROR),
6225 message: "undefined variable 'BB'".to_string(),
6226 source: Some("disk".to_string()),
6227 ..Default::default()
6228 },
6229 lsp::Diagnostic {
6230 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6231 severity: Some(DiagnosticSeverity::WARNING),
6232 message: "undefined variable 'A'".to_string(),
6233 source: Some("disk".to_string()),
6234 ..Default::default()
6235 },
6236 ],
6237 },
6238 );
6239
6240 buffer.next_notification(cx).await;
6241 buffer.read_with(cx, |buffer, _| {
6242 assert_eq!(
6243 buffer
6244 .snapshot()
6245 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6246 .collect::<Vec<_>>(),
6247 &[
6248 DiagnosticEntry {
6249 range: Point::new(2, 21)..Point::new(2, 22),
6250 diagnostic: Diagnostic {
6251 severity: DiagnosticSeverity::WARNING,
6252 message: "undefined variable 'A'".to_string(),
6253 is_disk_based: true,
6254 group_id: 1,
6255 is_primary: true,
6256 ..Default::default()
6257 }
6258 },
6259 DiagnosticEntry {
6260 range: Point::new(3, 9)..Point::new(3, 14),
6261 diagnostic: Diagnostic {
6262 severity: DiagnosticSeverity::ERROR,
6263 message: "undefined variable 'BB'".to_string(),
6264 is_disk_based: true,
6265 group_id: 0,
6266 is_primary: true,
6267 ..Default::default()
6268 },
6269 }
6270 ]
6271 );
6272 });
6273 }
6274
6275 #[gpui::test]
6276 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6277 cx.foreground().forbid_parking();
6278
6279 let text = concat!(
6280 "let one = ;\n", //
6281 "let two = \n",
6282 "let three = 3;\n",
6283 );
6284
6285 let fs = FakeFs::new(cx.background());
6286 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6287
6288 let project = Project::test(fs, ["/dir"], cx).await;
6289 let buffer = project
6290 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6291 .await
6292 .unwrap();
6293
6294 project.update(cx, |project, cx| {
6295 project
6296 .update_buffer_diagnostics(
6297 &buffer,
6298 vec![
6299 DiagnosticEntry {
6300 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6301 diagnostic: Diagnostic {
6302 severity: DiagnosticSeverity::ERROR,
6303 message: "syntax error 1".to_string(),
6304 ..Default::default()
6305 },
6306 },
6307 DiagnosticEntry {
6308 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6309 diagnostic: Diagnostic {
6310 severity: DiagnosticSeverity::ERROR,
6311 message: "syntax error 2".to_string(),
6312 ..Default::default()
6313 },
6314 },
6315 ],
6316 None,
6317 cx,
6318 )
6319 .unwrap();
6320 });
6321
6322 // An empty range is extended forward to include the following character.
6323 // At the end of a line, an empty range is extended backward to include
6324 // the preceding character.
6325 buffer.read_with(cx, |buffer, _| {
6326 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6327 assert_eq!(
6328 chunks
6329 .iter()
6330 .map(|(s, d)| (s.as_str(), *d))
6331 .collect::<Vec<_>>(),
6332 &[
6333 ("let one = ", None),
6334 (";", Some(DiagnosticSeverity::ERROR)),
6335 ("\nlet two =", None),
6336 (" ", Some(DiagnosticSeverity::ERROR)),
6337 ("\nlet three = 3;\n", None)
6338 ]
6339 );
6340 });
6341 }
6342
6343 #[gpui::test]
6344 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6345 cx.foreground().forbid_parking();
6346
6347 let mut language = Language::new(
6348 LanguageConfig {
6349 name: "Rust".into(),
6350 path_suffixes: vec!["rs".to_string()],
6351 ..Default::default()
6352 },
6353 Some(tree_sitter_rust::language()),
6354 );
6355 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6356
6357 let text = "
6358 fn a() {
6359 f1();
6360 }
6361 fn b() {
6362 f2();
6363 }
6364 fn c() {
6365 f3();
6366 }
6367 "
6368 .unindent();
6369
6370 let fs = FakeFs::new(cx.background());
6371 fs.insert_tree(
6372 "/dir",
6373 json!({
6374 "a.rs": text.clone(),
6375 }),
6376 )
6377 .await;
6378
6379 let project = Project::test(fs, ["/dir"], cx).await;
6380 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6381 let buffer = project
6382 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6383 .await
6384 .unwrap();
6385
6386 let mut fake_server = fake_servers.next().await.unwrap();
6387 let lsp_document_version = fake_server
6388 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6389 .await
6390 .text_document
6391 .version;
6392
6393 // Simulate editing the buffer after the language server computes some edits.
6394 buffer.update(cx, |buffer, cx| {
6395 buffer.edit(
6396 [(
6397 Point::new(0, 0)..Point::new(0, 0),
6398 "// above first function\n",
6399 )],
6400 cx,
6401 );
6402 buffer.edit(
6403 [(
6404 Point::new(2, 0)..Point::new(2, 0),
6405 " // inside first function\n",
6406 )],
6407 cx,
6408 );
6409 buffer.edit(
6410 [(
6411 Point::new(6, 4)..Point::new(6, 4),
6412 "// inside second function ",
6413 )],
6414 cx,
6415 );
6416
6417 assert_eq!(
6418 buffer.text(),
6419 "
6420 // above first function
6421 fn a() {
6422 // inside first function
6423 f1();
6424 }
6425 fn b() {
6426 // inside second function f2();
6427 }
6428 fn c() {
6429 f3();
6430 }
6431 "
6432 .unindent()
6433 );
6434 });
6435
6436 let edits = project
6437 .update(cx, |project, cx| {
6438 project.edits_from_lsp(
6439 &buffer,
6440 vec![
6441 // replace body of first function
6442 lsp::TextEdit {
6443 range: lsp::Range::new(
6444 lsp::Position::new(0, 0),
6445 lsp::Position::new(3, 0),
6446 ),
6447 new_text: "
6448 fn a() {
6449 f10();
6450 }
6451 "
6452 .unindent(),
6453 },
6454 // edit inside second function
6455 lsp::TextEdit {
6456 range: lsp::Range::new(
6457 lsp::Position::new(4, 6),
6458 lsp::Position::new(4, 6),
6459 ),
6460 new_text: "00".into(),
6461 },
6462 // edit inside third function via two distinct edits
6463 lsp::TextEdit {
6464 range: lsp::Range::new(
6465 lsp::Position::new(7, 5),
6466 lsp::Position::new(7, 5),
6467 ),
6468 new_text: "4000".into(),
6469 },
6470 lsp::TextEdit {
6471 range: lsp::Range::new(
6472 lsp::Position::new(7, 5),
6473 lsp::Position::new(7, 6),
6474 ),
6475 new_text: "".into(),
6476 },
6477 ],
6478 Some(lsp_document_version),
6479 cx,
6480 )
6481 })
6482 .await
6483 .unwrap();
6484
6485 buffer.update(cx, |buffer, cx| {
6486 for (range, new_text) in edits {
6487 buffer.edit([(range, new_text)], cx);
6488 }
6489 assert_eq!(
6490 buffer.text(),
6491 "
6492 // above first function
6493 fn a() {
6494 // inside first function
6495 f10();
6496 }
6497 fn b() {
6498 // inside second function f200();
6499 }
6500 fn c() {
6501 f4000();
6502 }
6503 "
6504 .unindent()
6505 );
6506 });
6507 }
6508
6509 #[gpui::test]
6510 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6511 cx.foreground().forbid_parking();
6512
6513 let text = "
6514 use a::b;
6515 use a::c;
6516
6517 fn f() {
6518 b();
6519 c();
6520 }
6521 "
6522 .unindent();
6523
6524 let fs = FakeFs::new(cx.background());
6525 fs.insert_tree(
6526 "/dir",
6527 json!({
6528 "a.rs": text.clone(),
6529 }),
6530 )
6531 .await;
6532
6533 let project = Project::test(fs, ["/dir"], cx).await;
6534 let buffer = project
6535 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6536 .await
6537 .unwrap();
6538
6539 // Simulate the language server sending us a small edit in the form of a very large diff.
6540 // Rust-analyzer does this when performing a merge-imports code action.
6541 let edits = project
6542 .update(cx, |project, cx| {
6543 project.edits_from_lsp(
6544 &buffer,
6545 [
6546 // Replace the first use statement without editing the semicolon.
6547 lsp::TextEdit {
6548 range: lsp::Range::new(
6549 lsp::Position::new(0, 4),
6550 lsp::Position::new(0, 8),
6551 ),
6552 new_text: "a::{b, c}".into(),
6553 },
6554 // Reinsert the remainder of the file between the semicolon and the final
6555 // newline of the file.
6556 lsp::TextEdit {
6557 range: lsp::Range::new(
6558 lsp::Position::new(0, 9),
6559 lsp::Position::new(0, 9),
6560 ),
6561 new_text: "\n\n".into(),
6562 },
6563 lsp::TextEdit {
6564 range: lsp::Range::new(
6565 lsp::Position::new(0, 9),
6566 lsp::Position::new(0, 9),
6567 ),
6568 new_text: "
6569 fn f() {
6570 b();
6571 c();
6572 }"
6573 .unindent(),
6574 },
6575 // Delete everything after the first newline of the file.
6576 lsp::TextEdit {
6577 range: lsp::Range::new(
6578 lsp::Position::new(1, 0),
6579 lsp::Position::new(7, 0),
6580 ),
6581 new_text: "".into(),
6582 },
6583 ],
6584 None,
6585 cx,
6586 )
6587 })
6588 .await
6589 .unwrap();
6590
6591 buffer.update(cx, |buffer, cx| {
6592 let edits = edits
6593 .into_iter()
6594 .map(|(range, text)| {
6595 (
6596 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6597 text,
6598 )
6599 })
6600 .collect::<Vec<_>>();
6601
6602 assert_eq!(
6603 edits,
6604 [
6605 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6606 (Point::new(1, 0)..Point::new(2, 0), "".into())
6607 ]
6608 );
6609
6610 for (range, new_text) in edits {
6611 buffer.edit([(range, new_text)], cx);
6612 }
6613 assert_eq!(
6614 buffer.text(),
6615 "
6616 use a::{b, c};
6617
6618 fn f() {
6619 b();
6620 c();
6621 }
6622 "
6623 .unindent()
6624 );
6625 });
6626 }
6627
6628 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6629 buffer: &Buffer,
6630 range: Range<T>,
6631 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6632 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6633 for chunk in buffer.snapshot().chunks(range, true) {
6634 if chunks.last().map_or(false, |prev_chunk| {
6635 prev_chunk.1 == chunk.diagnostic_severity
6636 }) {
6637 chunks.last_mut().unwrap().0.push_str(chunk.text);
6638 } else {
6639 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6640 }
6641 }
6642 chunks
6643 }
6644
6645 #[gpui::test]
6646 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6647 let dir = temp_tree(json!({
6648 "root": {
6649 "dir1": {},
6650 "dir2": {
6651 "dir3": {}
6652 }
6653 }
6654 }));
6655
6656 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6657 let cancel_flag = Default::default();
6658 let results = project
6659 .read_with(cx, |project, cx| {
6660 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6661 })
6662 .await;
6663
6664 assert!(results.is_empty());
6665 }
6666
6667 #[gpui::test(iterations = 10)]
6668 async fn test_definition(cx: &mut gpui::TestAppContext) {
6669 let mut language = Language::new(
6670 LanguageConfig {
6671 name: "Rust".into(),
6672 path_suffixes: vec!["rs".to_string()],
6673 ..Default::default()
6674 },
6675 Some(tree_sitter_rust::language()),
6676 );
6677 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6678
6679 let fs = FakeFs::new(cx.background());
6680 fs.insert_tree(
6681 "/dir",
6682 json!({
6683 "a.rs": "const fn a() { A }",
6684 "b.rs": "const y: i32 = crate::a()",
6685 }),
6686 )
6687 .await;
6688
6689 let project = Project::test(fs, ["/dir/b.rs"], cx).await;
6690 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6691
6692 let buffer = project
6693 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6694 .await
6695 .unwrap();
6696
6697 let fake_server = fake_servers.next().await.unwrap();
6698 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6699 let params = params.text_document_position_params;
6700 assert_eq!(
6701 params.text_document.uri.to_file_path().unwrap(),
6702 Path::new("/dir/b.rs"),
6703 );
6704 assert_eq!(params.position, lsp::Position::new(0, 22));
6705
6706 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6707 lsp::Location::new(
6708 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6709 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6710 ),
6711 )))
6712 });
6713
6714 let mut definitions = project
6715 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6716 .await
6717 .unwrap();
6718
6719 assert_eq!(definitions.len(), 1);
6720 let definition = definitions.pop().unwrap();
6721 cx.update(|cx| {
6722 let target_buffer = definition.buffer.read(cx);
6723 assert_eq!(
6724 target_buffer
6725 .file()
6726 .unwrap()
6727 .as_local()
6728 .unwrap()
6729 .abs_path(cx),
6730 Path::new("/dir/a.rs"),
6731 );
6732 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6733 assert_eq!(
6734 list_worktrees(&project, cx),
6735 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6736 );
6737
6738 drop(definition);
6739 });
6740 cx.read(|cx| {
6741 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6742 });
6743
6744 fn list_worktrees<'a>(
6745 project: &'a ModelHandle<Project>,
6746 cx: &'a AppContext,
6747 ) -> Vec<(&'a Path, bool)> {
6748 project
6749 .read(cx)
6750 .worktrees(cx)
6751 .map(|worktree| {
6752 let worktree = worktree.read(cx);
6753 (
6754 worktree.as_local().unwrap().abs_path().as_ref(),
6755 worktree.is_visible(),
6756 )
6757 })
6758 .collect::<Vec<_>>()
6759 }
6760 }
6761
6762 #[gpui::test]
6763 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6764 let mut language = Language::new(
6765 LanguageConfig {
6766 name: "TypeScript".into(),
6767 path_suffixes: vec!["ts".to_string()],
6768 ..Default::default()
6769 },
6770 Some(tree_sitter_typescript::language_typescript()),
6771 );
6772 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6773
6774 let fs = FakeFs::new(cx.background());
6775 fs.insert_tree(
6776 "/dir",
6777 json!({
6778 "a.ts": "",
6779 }),
6780 )
6781 .await;
6782
6783 let project = Project::test(fs, ["/dir"], cx).await;
6784 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6785 let buffer = project
6786 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6787 .await
6788 .unwrap();
6789
6790 let fake_server = fake_language_servers.next().await.unwrap();
6791
6792 let text = "let a = b.fqn";
6793 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6794 let completions = project.update(cx, |project, cx| {
6795 project.completions(&buffer, text.len(), cx)
6796 });
6797
6798 fake_server
6799 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6800 Ok(Some(lsp::CompletionResponse::Array(vec![
6801 lsp::CompletionItem {
6802 label: "fullyQualifiedName?".into(),
6803 insert_text: Some("fullyQualifiedName".into()),
6804 ..Default::default()
6805 },
6806 ])))
6807 })
6808 .next()
6809 .await;
6810 let completions = completions.await.unwrap();
6811 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6812 assert_eq!(completions.len(), 1);
6813 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6814 assert_eq!(
6815 completions[0].old_range.to_offset(&snapshot),
6816 text.len() - 3..text.len()
6817 );
6818 }
6819
6820 #[gpui::test(iterations = 10)]
6821 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6822 let mut language = Language::new(
6823 LanguageConfig {
6824 name: "TypeScript".into(),
6825 path_suffixes: vec!["ts".to_string()],
6826 ..Default::default()
6827 },
6828 None,
6829 );
6830 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6831
6832 let fs = FakeFs::new(cx.background());
6833 fs.insert_tree(
6834 "/dir",
6835 json!({
6836 "a.ts": "a",
6837 }),
6838 )
6839 .await;
6840
6841 let project = Project::test(fs, ["/dir"], cx).await;
6842 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6843 let buffer = project
6844 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6845 .await
6846 .unwrap();
6847
6848 let fake_server = fake_language_servers.next().await.unwrap();
6849
6850 // Language server returns code actions that contain commands, and not edits.
6851 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6852 fake_server
6853 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6854 Ok(Some(vec![
6855 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6856 title: "The code action".into(),
6857 command: Some(lsp::Command {
6858 title: "The command".into(),
6859 command: "_the/command".into(),
6860 arguments: Some(vec![json!("the-argument")]),
6861 }),
6862 ..Default::default()
6863 }),
6864 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6865 title: "two".into(),
6866 ..Default::default()
6867 }),
6868 ]))
6869 })
6870 .next()
6871 .await;
6872
6873 let action = actions.await.unwrap()[0].clone();
6874 let apply = project.update(cx, |project, cx| {
6875 project.apply_code_action(buffer.clone(), action, true, cx)
6876 });
6877
6878 // Resolving the code action does not populate its edits. In absence of
6879 // edits, we must execute the given command.
6880 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6881 |action, _| async move { Ok(action) },
6882 );
6883
6884 // While executing the command, the language server sends the editor
6885 // a `workspaceEdit` request.
6886 fake_server
6887 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6888 let fake = fake_server.clone();
6889 move |params, _| {
6890 assert_eq!(params.command, "_the/command");
6891 let fake = fake.clone();
6892 async move {
6893 fake.server
6894 .request::<lsp::request::ApplyWorkspaceEdit>(
6895 lsp::ApplyWorkspaceEditParams {
6896 label: None,
6897 edit: lsp::WorkspaceEdit {
6898 changes: Some(
6899 [(
6900 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6901 vec![lsp::TextEdit {
6902 range: lsp::Range::new(
6903 lsp::Position::new(0, 0),
6904 lsp::Position::new(0, 0),
6905 ),
6906 new_text: "X".into(),
6907 }],
6908 )]
6909 .into_iter()
6910 .collect(),
6911 ),
6912 ..Default::default()
6913 },
6914 },
6915 )
6916 .await
6917 .unwrap();
6918 Ok(Some(json!(null)))
6919 }
6920 }
6921 })
6922 .next()
6923 .await;
6924
6925 // Applying the code action returns a project transaction containing the edits
6926 // sent by the language server in its `workspaceEdit` request.
6927 let transaction = apply.await.unwrap();
6928 assert!(transaction.0.contains_key(&buffer));
6929 buffer.update(cx, |buffer, cx| {
6930 assert_eq!(buffer.text(), "Xa");
6931 buffer.undo(cx);
6932 assert_eq!(buffer.text(), "a");
6933 });
6934 }
6935
6936 #[gpui::test]
6937 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6938 let fs = FakeFs::new(cx.background());
6939 fs.insert_tree(
6940 "/dir",
6941 json!({
6942 "file1": "the old contents",
6943 }),
6944 )
6945 .await;
6946
6947 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6948 let buffer = project
6949 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6950 .await
6951 .unwrap();
6952 buffer
6953 .update(cx, |buffer, cx| {
6954 assert_eq!(buffer.text(), "the old contents");
6955 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6956 buffer.save(cx)
6957 })
6958 .await
6959 .unwrap();
6960
6961 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6962 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6963 }
6964
6965 #[gpui::test]
6966 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6967 let fs = FakeFs::new(cx.background());
6968 fs.insert_tree(
6969 "/dir",
6970 json!({
6971 "file1": "the old contents",
6972 }),
6973 )
6974 .await;
6975
6976 let project = Project::test(fs.clone(), ["/dir/file1"], cx).await;
6977 let buffer = project
6978 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6979 .await
6980 .unwrap();
6981 buffer
6982 .update(cx, |buffer, cx| {
6983 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6984 buffer.save(cx)
6985 })
6986 .await
6987 .unwrap();
6988
6989 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6990 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6991 }
6992
6993 #[gpui::test]
6994 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6995 let fs = FakeFs::new(cx.background());
6996 fs.insert_tree("/dir", json!({})).await;
6997
6998 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6999 let buffer = project.update(cx, |project, cx| {
7000 project.create_buffer("", None, cx).unwrap()
7001 });
7002 buffer.update(cx, |buffer, cx| {
7003 buffer.edit([(0..0, "abc")], cx);
7004 assert!(buffer.is_dirty());
7005 assert!(!buffer.has_conflict());
7006 });
7007 project
7008 .update(cx, |project, cx| {
7009 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7010 })
7011 .await
7012 .unwrap();
7013 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7014 buffer.read_with(cx, |buffer, cx| {
7015 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7016 assert!(!buffer.is_dirty());
7017 assert!(!buffer.has_conflict());
7018 });
7019
7020 let opened_buffer = project
7021 .update(cx, |project, cx| {
7022 project.open_local_buffer("/dir/file1", cx)
7023 })
7024 .await
7025 .unwrap();
7026 assert_eq!(opened_buffer, buffer);
7027 }
7028
7029 #[gpui::test(retries = 5)]
7030 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7031 let dir = temp_tree(json!({
7032 "a": {
7033 "file1": "",
7034 "file2": "",
7035 "file3": "",
7036 },
7037 "b": {
7038 "c": {
7039 "file4": "",
7040 "file5": "",
7041 }
7042 }
7043 }));
7044
7045 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7046 let rpc = project.read_with(cx, |p, _| p.client.clone());
7047
7048 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7049 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7050 async move { buffer.await.unwrap() }
7051 };
7052 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7053 project.read_with(cx, |project, cx| {
7054 let tree = project.worktrees(cx).next().unwrap();
7055 tree.read(cx)
7056 .entry_for_path(path)
7057 .expect(&format!("no entry for path {}", path))
7058 .id
7059 })
7060 };
7061
7062 let buffer2 = buffer_for_path("a/file2", cx).await;
7063 let buffer3 = buffer_for_path("a/file3", cx).await;
7064 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7065 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7066
7067 let file2_id = id_for_path("a/file2", &cx);
7068 let file3_id = id_for_path("a/file3", &cx);
7069 let file4_id = id_for_path("b/c/file4", &cx);
7070
7071 // Create a remote copy of this worktree.
7072 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7073 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7074 let (remote, load_task) = cx.update(|cx| {
7075 Worktree::remote(
7076 1,
7077 1,
7078 initial_snapshot.to_proto(&Default::default(), true),
7079 rpc.clone(),
7080 cx,
7081 )
7082 });
7083 // tree
7084 load_task.await;
7085
7086 cx.read(|cx| {
7087 assert!(!buffer2.read(cx).is_dirty());
7088 assert!(!buffer3.read(cx).is_dirty());
7089 assert!(!buffer4.read(cx).is_dirty());
7090 assert!(!buffer5.read(cx).is_dirty());
7091 });
7092
7093 // Rename and delete files and directories.
7094 tree.flush_fs_events(&cx).await;
7095 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7096 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7097 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7098 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7099 tree.flush_fs_events(&cx).await;
7100
7101 let expected_paths = vec![
7102 "a",
7103 "a/file1",
7104 "a/file2.new",
7105 "b",
7106 "d",
7107 "d/file3",
7108 "d/file4",
7109 ];
7110
7111 cx.read(|app| {
7112 assert_eq!(
7113 tree.read(app)
7114 .paths()
7115 .map(|p| p.to_str().unwrap())
7116 .collect::<Vec<_>>(),
7117 expected_paths
7118 );
7119
7120 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7121 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7122 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7123
7124 assert_eq!(
7125 buffer2.read(app).file().unwrap().path().as_ref(),
7126 Path::new("a/file2.new")
7127 );
7128 assert_eq!(
7129 buffer3.read(app).file().unwrap().path().as_ref(),
7130 Path::new("d/file3")
7131 );
7132 assert_eq!(
7133 buffer4.read(app).file().unwrap().path().as_ref(),
7134 Path::new("d/file4")
7135 );
7136 assert_eq!(
7137 buffer5.read(app).file().unwrap().path().as_ref(),
7138 Path::new("b/c/file5")
7139 );
7140
7141 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7142 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7143 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7144 assert!(buffer5.read(app).file().unwrap().is_deleted());
7145 });
7146
7147 // Update the remote worktree. Check that it becomes consistent with the
7148 // local worktree.
7149 remote.update(cx, |remote, cx| {
7150 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7151 &initial_snapshot,
7152 1,
7153 1,
7154 true,
7155 );
7156 remote
7157 .as_remote_mut()
7158 .unwrap()
7159 .snapshot
7160 .apply_remote_update(update_message)
7161 .unwrap();
7162
7163 assert_eq!(
7164 remote
7165 .paths()
7166 .map(|p| p.to_str().unwrap())
7167 .collect::<Vec<_>>(),
7168 expected_paths
7169 );
7170 });
7171 }
7172
7173 #[gpui::test]
7174 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7175 let fs = FakeFs::new(cx.background());
7176 fs.insert_tree(
7177 "/dir",
7178 json!({
7179 "a.txt": "a-contents",
7180 "b.txt": "b-contents",
7181 }),
7182 )
7183 .await;
7184
7185 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7186
7187 // Spawn multiple tasks to open paths, repeating some paths.
7188 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7189 (
7190 p.open_local_buffer("/dir/a.txt", cx),
7191 p.open_local_buffer("/dir/b.txt", cx),
7192 p.open_local_buffer("/dir/a.txt", cx),
7193 )
7194 });
7195
7196 let buffer_a_1 = buffer_a_1.await.unwrap();
7197 let buffer_a_2 = buffer_a_2.await.unwrap();
7198 let buffer_b = buffer_b.await.unwrap();
7199 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7200 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7201
7202 // There is only one buffer per path.
7203 let buffer_a_id = buffer_a_1.id();
7204 assert_eq!(buffer_a_2.id(), buffer_a_id);
7205
7206 // Open the same path again while it is still open.
7207 drop(buffer_a_1);
7208 let buffer_a_3 = project
7209 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7210 .await
7211 .unwrap();
7212
7213 // There's still only one buffer per path.
7214 assert_eq!(buffer_a_3.id(), buffer_a_id);
7215 }
7216
7217 #[gpui::test]
7218 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7219 let fs = FakeFs::new(cx.background());
7220 fs.insert_tree(
7221 "/dir",
7222 json!({
7223 "file1": "abc",
7224 "file2": "def",
7225 "file3": "ghi",
7226 }),
7227 )
7228 .await;
7229
7230 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7231
7232 let buffer1 = project
7233 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7234 .await
7235 .unwrap();
7236 let events = Rc::new(RefCell::new(Vec::new()));
7237
7238 // initially, the buffer isn't dirty.
7239 buffer1.update(cx, |buffer, cx| {
7240 cx.subscribe(&buffer1, {
7241 let events = events.clone();
7242 move |_, _, event, _| match event {
7243 BufferEvent::Operation(_) => {}
7244 _ => events.borrow_mut().push(event.clone()),
7245 }
7246 })
7247 .detach();
7248
7249 assert!(!buffer.is_dirty());
7250 assert!(events.borrow().is_empty());
7251
7252 buffer.edit([(1..2, "")], cx);
7253 });
7254
7255 // after the first edit, the buffer is dirty, and emits a dirtied event.
7256 buffer1.update(cx, |buffer, cx| {
7257 assert!(buffer.text() == "ac");
7258 assert!(buffer.is_dirty());
7259 assert_eq!(
7260 *events.borrow(),
7261 &[language::Event::Edited, language::Event::Dirtied]
7262 );
7263 events.borrow_mut().clear();
7264 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7265 });
7266
7267 // after saving, the buffer is not dirty, and emits a saved event.
7268 buffer1.update(cx, |buffer, cx| {
7269 assert!(!buffer.is_dirty());
7270 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7271 events.borrow_mut().clear();
7272
7273 buffer.edit([(1..1, "B")], cx);
7274 buffer.edit([(2..2, "D")], cx);
7275 });
7276
7277 // after editing again, the buffer is dirty, and emits another dirty event.
7278 buffer1.update(cx, |buffer, cx| {
7279 assert!(buffer.text() == "aBDc");
7280 assert!(buffer.is_dirty());
7281 assert_eq!(
7282 *events.borrow(),
7283 &[
7284 language::Event::Edited,
7285 language::Event::Dirtied,
7286 language::Event::Edited,
7287 ],
7288 );
7289 events.borrow_mut().clear();
7290
7291 // TODO - currently, after restoring the buffer to its
7292 // previously-saved state, the is still considered dirty.
7293 buffer.edit([(1..3, "")], cx);
7294 assert!(buffer.text() == "ac");
7295 assert!(buffer.is_dirty());
7296 });
7297
7298 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7299
7300 // When a file is deleted, the buffer is considered dirty.
7301 let events = Rc::new(RefCell::new(Vec::new()));
7302 let buffer2 = project
7303 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7304 .await
7305 .unwrap();
7306 buffer2.update(cx, |_, cx| {
7307 cx.subscribe(&buffer2, {
7308 let events = events.clone();
7309 move |_, _, event, _| events.borrow_mut().push(event.clone())
7310 })
7311 .detach();
7312 });
7313
7314 fs.remove_file("/dir/file2".as_ref(), Default::default())
7315 .await
7316 .unwrap();
7317 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7318 assert_eq!(
7319 *events.borrow(),
7320 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7321 );
7322
7323 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7324 let events = Rc::new(RefCell::new(Vec::new()));
7325 let buffer3 = project
7326 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7327 .await
7328 .unwrap();
7329 buffer3.update(cx, |_, cx| {
7330 cx.subscribe(&buffer3, {
7331 let events = events.clone();
7332 move |_, _, event, _| events.borrow_mut().push(event.clone())
7333 })
7334 .detach();
7335 });
7336
7337 buffer3.update(cx, |buffer, cx| {
7338 buffer.edit([(0..0, "x")], cx);
7339 });
7340 events.borrow_mut().clear();
7341 fs.remove_file("/dir/file3".as_ref(), Default::default())
7342 .await
7343 .unwrap();
7344 buffer3
7345 .condition(&cx, |_, _| !events.borrow().is_empty())
7346 .await;
7347 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7348 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7349 }
7350
7351 #[gpui::test]
7352 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7353 let initial_contents = "aaa\nbbbbb\nc\n";
7354 let fs = FakeFs::new(cx.background());
7355 fs.insert_tree(
7356 "/dir",
7357 json!({
7358 "the-file": initial_contents,
7359 }),
7360 )
7361 .await;
7362 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7363 let buffer = project
7364 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7365 .await
7366 .unwrap();
7367
7368 let anchors = (0..3)
7369 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7370 .collect::<Vec<_>>();
7371
7372 // Change the file on disk, adding two new lines of text, and removing
7373 // one line.
7374 buffer.read_with(cx, |buffer, _| {
7375 assert!(!buffer.is_dirty());
7376 assert!(!buffer.has_conflict());
7377 });
7378 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7379 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7380 .await
7381 .unwrap();
7382
7383 // Because the buffer was not modified, it is reloaded from disk. Its
7384 // contents are edited according to the diff between the old and new
7385 // file contents.
7386 buffer
7387 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7388 .await;
7389
7390 buffer.update(cx, |buffer, _| {
7391 assert_eq!(buffer.text(), new_contents);
7392 assert!(!buffer.is_dirty());
7393 assert!(!buffer.has_conflict());
7394
7395 let anchor_positions = anchors
7396 .iter()
7397 .map(|anchor| anchor.to_point(&*buffer))
7398 .collect::<Vec<_>>();
7399 assert_eq!(
7400 anchor_positions,
7401 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7402 );
7403 });
7404
7405 // Modify the buffer
7406 buffer.update(cx, |buffer, cx| {
7407 buffer.edit([(0..0, " ")], cx);
7408 assert!(buffer.is_dirty());
7409 assert!(!buffer.has_conflict());
7410 });
7411
7412 // Change the file on disk again, adding blank lines to the beginning.
7413 fs.save(
7414 "/dir/the-file".as_ref(),
7415 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7416 )
7417 .await
7418 .unwrap();
7419
7420 // Because the buffer is modified, it doesn't reload from disk, but is
7421 // marked as having a conflict.
7422 buffer
7423 .condition(&cx, |buffer, _| buffer.has_conflict())
7424 .await;
7425 }
7426
7427 #[gpui::test]
7428 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7429 cx.foreground().forbid_parking();
7430
7431 let fs = FakeFs::new(cx.background());
7432 fs.insert_tree(
7433 "/the-dir",
7434 json!({
7435 "a.rs": "
7436 fn foo(mut v: Vec<usize>) {
7437 for x in &v {
7438 v.push(1);
7439 }
7440 }
7441 "
7442 .unindent(),
7443 }),
7444 )
7445 .await;
7446
7447 let project = Project::test(fs.clone(), ["/the-dir"], cx).await;
7448 let buffer = project
7449 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7450 .await
7451 .unwrap();
7452
7453 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7454 let message = lsp::PublishDiagnosticsParams {
7455 uri: buffer_uri.clone(),
7456 diagnostics: vec![
7457 lsp::Diagnostic {
7458 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7459 severity: Some(DiagnosticSeverity::WARNING),
7460 message: "error 1".to_string(),
7461 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7462 location: lsp::Location {
7463 uri: buffer_uri.clone(),
7464 range: lsp::Range::new(
7465 lsp::Position::new(1, 8),
7466 lsp::Position::new(1, 9),
7467 ),
7468 },
7469 message: "error 1 hint 1".to_string(),
7470 }]),
7471 ..Default::default()
7472 },
7473 lsp::Diagnostic {
7474 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7475 severity: Some(DiagnosticSeverity::HINT),
7476 message: "error 1 hint 1".to_string(),
7477 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7478 location: lsp::Location {
7479 uri: buffer_uri.clone(),
7480 range: lsp::Range::new(
7481 lsp::Position::new(1, 8),
7482 lsp::Position::new(1, 9),
7483 ),
7484 },
7485 message: "original diagnostic".to_string(),
7486 }]),
7487 ..Default::default()
7488 },
7489 lsp::Diagnostic {
7490 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7491 severity: Some(DiagnosticSeverity::ERROR),
7492 message: "error 2".to_string(),
7493 related_information: Some(vec![
7494 lsp::DiagnosticRelatedInformation {
7495 location: lsp::Location {
7496 uri: buffer_uri.clone(),
7497 range: lsp::Range::new(
7498 lsp::Position::new(1, 13),
7499 lsp::Position::new(1, 15),
7500 ),
7501 },
7502 message: "error 2 hint 1".to_string(),
7503 },
7504 lsp::DiagnosticRelatedInformation {
7505 location: lsp::Location {
7506 uri: buffer_uri.clone(),
7507 range: lsp::Range::new(
7508 lsp::Position::new(1, 13),
7509 lsp::Position::new(1, 15),
7510 ),
7511 },
7512 message: "error 2 hint 2".to_string(),
7513 },
7514 ]),
7515 ..Default::default()
7516 },
7517 lsp::Diagnostic {
7518 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7519 severity: Some(DiagnosticSeverity::HINT),
7520 message: "error 2 hint 1".to_string(),
7521 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7522 location: lsp::Location {
7523 uri: buffer_uri.clone(),
7524 range: lsp::Range::new(
7525 lsp::Position::new(2, 8),
7526 lsp::Position::new(2, 17),
7527 ),
7528 },
7529 message: "original diagnostic".to_string(),
7530 }]),
7531 ..Default::default()
7532 },
7533 lsp::Diagnostic {
7534 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7535 severity: Some(DiagnosticSeverity::HINT),
7536 message: "error 2 hint 2".to_string(),
7537 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7538 location: lsp::Location {
7539 uri: buffer_uri.clone(),
7540 range: lsp::Range::new(
7541 lsp::Position::new(2, 8),
7542 lsp::Position::new(2, 17),
7543 ),
7544 },
7545 message: "original diagnostic".to_string(),
7546 }]),
7547 ..Default::default()
7548 },
7549 ],
7550 version: None,
7551 };
7552
7553 project
7554 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7555 .unwrap();
7556 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7557
7558 assert_eq!(
7559 buffer
7560 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7561 .collect::<Vec<_>>(),
7562 &[
7563 DiagnosticEntry {
7564 range: Point::new(1, 8)..Point::new(1, 9),
7565 diagnostic: Diagnostic {
7566 severity: DiagnosticSeverity::WARNING,
7567 message: "error 1".to_string(),
7568 group_id: 0,
7569 is_primary: true,
7570 ..Default::default()
7571 }
7572 },
7573 DiagnosticEntry {
7574 range: Point::new(1, 8)..Point::new(1, 9),
7575 diagnostic: Diagnostic {
7576 severity: DiagnosticSeverity::HINT,
7577 message: "error 1 hint 1".to_string(),
7578 group_id: 0,
7579 is_primary: false,
7580 ..Default::default()
7581 }
7582 },
7583 DiagnosticEntry {
7584 range: Point::new(1, 13)..Point::new(1, 15),
7585 diagnostic: Diagnostic {
7586 severity: DiagnosticSeverity::HINT,
7587 message: "error 2 hint 1".to_string(),
7588 group_id: 1,
7589 is_primary: false,
7590 ..Default::default()
7591 }
7592 },
7593 DiagnosticEntry {
7594 range: Point::new(1, 13)..Point::new(1, 15),
7595 diagnostic: Diagnostic {
7596 severity: DiagnosticSeverity::HINT,
7597 message: "error 2 hint 2".to_string(),
7598 group_id: 1,
7599 is_primary: false,
7600 ..Default::default()
7601 }
7602 },
7603 DiagnosticEntry {
7604 range: Point::new(2, 8)..Point::new(2, 17),
7605 diagnostic: Diagnostic {
7606 severity: DiagnosticSeverity::ERROR,
7607 message: "error 2".to_string(),
7608 group_id: 1,
7609 is_primary: true,
7610 ..Default::default()
7611 }
7612 }
7613 ]
7614 );
7615
7616 assert_eq!(
7617 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7618 &[
7619 DiagnosticEntry {
7620 range: Point::new(1, 8)..Point::new(1, 9),
7621 diagnostic: Diagnostic {
7622 severity: DiagnosticSeverity::WARNING,
7623 message: "error 1".to_string(),
7624 group_id: 0,
7625 is_primary: true,
7626 ..Default::default()
7627 }
7628 },
7629 DiagnosticEntry {
7630 range: Point::new(1, 8)..Point::new(1, 9),
7631 diagnostic: Diagnostic {
7632 severity: DiagnosticSeverity::HINT,
7633 message: "error 1 hint 1".to_string(),
7634 group_id: 0,
7635 is_primary: false,
7636 ..Default::default()
7637 }
7638 },
7639 ]
7640 );
7641 assert_eq!(
7642 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7643 &[
7644 DiagnosticEntry {
7645 range: Point::new(1, 13)..Point::new(1, 15),
7646 diagnostic: Diagnostic {
7647 severity: DiagnosticSeverity::HINT,
7648 message: "error 2 hint 1".to_string(),
7649 group_id: 1,
7650 is_primary: false,
7651 ..Default::default()
7652 }
7653 },
7654 DiagnosticEntry {
7655 range: Point::new(1, 13)..Point::new(1, 15),
7656 diagnostic: Diagnostic {
7657 severity: DiagnosticSeverity::HINT,
7658 message: "error 2 hint 2".to_string(),
7659 group_id: 1,
7660 is_primary: false,
7661 ..Default::default()
7662 }
7663 },
7664 DiagnosticEntry {
7665 range: Point::new(2, 8)..Point::new(2, 17),
7666 diagnostic: Diagnostic {
7667 severity: DiagnosticSeverity::ERROR,
7668 message: "error 2".to_string(),
7669 group_id: 1,
7670 is_primary: true,
7671 ..Default::default()
7672 }
7673 }
7674 ]
7675 );
7676 }
7677
7678 #[gpui::test]
7679 async fn test_rename(cx: &mut gpui::TestAppContext) {
7680 cx.foreground().forbid_parking();
7681
7682 let mut language = Language::new(
7683 LanguageConfig {
7684 name: "Rust".into(),
7685 path_suffixes: vec!["rs".to_string()],
7686 ..Default::default()
7687 },
7688 Some(tree_sitter_rust::language()),
7689 );
7690 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7691 capabilities: lsp::ServerCapabilities {
7692 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7693 prepare_provider: Some(true),
7694 work_done_progress_options: Default::default(),
7695 })),
7696 ..Default::default()
7697 },
7698 ..Default::default()
7699 });
7700
7701 let fs = FakeFs::new(cx.background());
7702 fs.insert_tree(
7703 "/dir",
7704 json!({
7705 "one.rs": "const ONE: usize = 1;",
7706 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7707 }),
7708 )
7709 .await;
7710
7711 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7712 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7713 let buffer = project
7714 .update(cx, |project, cx| {
7715 project.open_local_buffer("/dir/one.rs", cx)
7716 })
7717 .await
7718 .unwrap();
7719
7720 let fake_server = fake_servers.next().await.unwrap();
7721
7722 let response = project.update(cx, |project, cx| {
7723 project.prepare_rename(buffer.clone(), 7, cx)
7724 });
7725 fake_server
7726 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7727 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7728 assert_eq!(params.position, lsp::Position::new(0, 7));
7729 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7730 lsp::Position::new(0, 6),
7731 lsp::Position::new(0, 9),
7732 ))))
7733 })
7734 .next()
7735 .await
7736 .unwrap();
7737 let range = response.await.unwrap().unwrap();
7738 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7739 assert_eq!(range, 6..9);
7740
7741 let response = project.update(cx, |project, cx| {
7742 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7743 });
7744 fake_server
7745 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7746 assert_eq!(
7747 params.text_document_position.text_document.uri.as_str(),
7748 "file:///dir/one.rs"
7749 );
7750 assert_eq!(
7751 params.text_document_position.position,
7752 lsp::Position::new(0, 7)
7753 );
7754 assert_eq!(params.new_name, "THREE");
7755 Ok(Some(lsp::WorkspaceEdit {
7756 changes: Some(
7757 [
7758 (
7759 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7760 vec![lsp::TextEdit::new(
7761 lsp::Range::new(
7762 lsp::Position::new(0, 6),
7763 lsp::Position::new(0, 9),
7764 ),
7765 "THREE".to_string(),
7766 )],
7767 ),
7768 (
7769 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7770 vec![
7771 lsp::TextEdit::new(
7772 lsp::Range::new(
7773 lsp::Position::new(0, 24),
7774 lsp::Position::new(0, 27),
7775 ),
7776 "THREE".to_string(),
7777 ),
7778 lsp::TextEdit::new(
7779 lsp::Range::new(
7780 lsp::Position::new(0, 35),
7781 lsp::Position::new(0, 38),
7782 ),
7783 "THREE".to_string(),
7784 ),
7785 ],
7786 ),
7787 ]
7788 .into_iter()
7789 .collect(),
7790 ),
7791 ..Default::default()
7792 }))
7793 })
7794 .next()
7795 .await
7796 .unwrap();
7797 let mut transaction = response.await.unwrap().0;
7798 assert_eq!(transaction.len(), 2);
7799 assert_eq!(
7800 transaction
7801 .remove_entry(&buffer)
7802 .unwrap()
7803 .0
7804 .read_with(cx, |buffer, _| buffer.text()),
7805 "const THREE: usize = 1;"
7806 );
7807 assert_eq!(
7808 transaction
7809 .into_keys()
7810 .next()
7811 .unwrap()
7812 .read_with(cx, |buffer, _| buffer.text()),
7813 "const TWO: usize = one::THREE + one::THREE;"
7814 );
7815 }
7816
7817 #[gpui::test]
7818 async fn test_search(cx: &mut gpui::TestAppContext) {
7819 let fs = FakeFs::new(cx.background());
7820 fs.insert_tree(
7821 "/dir",
7822 json!({
7823 "one.rs": "const ONE: usize = 1;",
7824 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7825 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7826 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7827 }),
7828 )
7829 .await;
7830 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7831 assert_eq!(
7832 search(&project, SearchQuery::text("TWO", false, true), cx)
7833 .await
7834 .unwrap(),
7835 HashMap::from_iter([
7836 ("two.rs".to_string(), vec![6..9]),
7837 ("three.rs".to_string(), vec![37..40])
7838 ])
7839 );
7840
7841 let buffer_4 = project
7842 .update(cx, |project, cx| {
7843 project.open_local_buffer("/dir/four.rs", cx)
7844 })
7845 .await
7846 .unwrap();
7847 buffer_4.update(cx, |buffer, cx| {
7848 let text = "two::TWO";
7849 buffer.edit([(20..28, text), (31..43, text)], cx);
7850 });
7851
7852 assert_eq!(
7853 search(&project, SearchQuery::text("TWO", false, true), cx)
7854 .await
7855 .unwrap(),
7856 HashMap::from_iter([
7857 ("two.rs".to_string(), vec![6..9]),
7858 ("three.rs".to_string(), vec![37..40]),
7859 ("four.rs".to_string(), vec![25..28, 36..39])
7860 ])
7861 );
7862
7863 async fn search(
7864 project: &ModelHandle<Project>,
7865 query: SearchQuery,
7866 cx: &mut gpui::TestAppContext,
7867 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7868 let results = project
7869 .update(cx, |project, cx| project.search(query, cx))
7870 .await?;
7871
7872 Ok(results
7873 .into_iter()
7874 .map(|(buffer, ranges)| {
7875 buffer.read_with(cx, |buffer, _| {
7876 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7877 let ranges = ranges
7878 .into_iter()
7879 .map(|range| range.to_offset(buffer))
7880 .collect::<Vec<_>>();
7881 (path, ranges)
7882 })
7883 })
7884 .collect())
7885 }
7886 }
7887}