1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use postage::watch;
27use rand::prelude::*;
28use search::SearchQuery;
29use sha2::{Digest, Sha256};
30use similar::{ChangeTag, TextDiff};
31use smol::block_on;
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{atomic::AtomicBool, Arc},
42 time::Instant,
43};
44use util::{post_inc, ResultExt, TryFutureExt as _};
45
46pub use fs::*;
47pub use worktree::*;
48
49pub struct Project {
50 worktrees: Vec<WorktreeHandle>,
51 active_entry: Option<ProjectEntry>,
52 languages: Arc<LanguageRegistry>,
53 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
54 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
55 client: Arc<client::Client>,
56 user_store: ModelHandle<UserStore>,
57 fs: Arc<dyn Fs>,
58 client_state: ProjectClientState,
59 collaborators: HashMap<PeerId, Collaborator>,
60 subscriptions: Vec<client::Subscription>,
61 language_servers_with_diagnostics_running: isize,
62 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
63 shared_buffers: HashMap<PeerId, HashSet<u64>>,
64 loading_buffers: HashMap<
65 ProjectPath,
66 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
67 >,
68 loading_local_worktrees:
69 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
70 opened_buffers: HashMap<u64, OpenBuffer>,
71 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
72 nonce: u128,
73}
74
75enum OpenBuffer {
76 Strong(ModelHandle<Buffer>),
77 Weak(WeakModelHandle<Buffer>),
78 Loading(Vec<Operation>),
79}
80
81enum WorktreeHandle {
82 Strong(ModelHandle<Worktree>),
83 Weak(WeakModelHandle<Worktree>),
84}
85
86enum ProjectClientState {
87 Local {
88 is_shared: bool,
89 remote_id_tx: watch::Sender<Option<u64>>,
90 remote_id_rx: watch::Receiver<Option<u64>>,
91 _maintain_remote_id_task: Task<Option<()>>,
92 },
93 Remote {
94 sharing_has_stopped: bool,
95 remote_id: u64,
96 replica_id: ReplicaId,
97 _detect_unshare_task: Task<Option<()>>,
98 },
99}
100
101#[derive(Clone, Debug)]
102pub struct Collaborator {
103 pub user: Arc<User>,
104 pub peer_id: PeerId,
105 pub replica_id: ReplicaId,
106}
107
108#[derive(Clone, Debug, PartialEq)]
109pub enum Event {
110 ActiveEntryChanged(Option<ProjectEntry>),
111 WorktreeRemoved(WorktreeId),
112 DiskBasedDiagnosticsStarted,
113 DiskBasedDiagnosticsUpdated,
114 DiskBasedDiagnosticsFinished,
115 DiagnosticsUpdated(ProjectPath),
116}
117
118#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
119pub struct ProjectPath {
120 pub worktree_id: WorktreeId,
121 pub path: Arc<Path>,
122}
123
124#[derive(Clone, Debug, Default, PartialEq)]
125pub struct DiagnosticSummary {
126 pub error_count: usize,
127 pub warning_count: usize,
128 pub info_count: usize,
129 pub hint_count: usize,
130}
131
132#[derive(Debug)]
133pub struct Location {
134 pub buffer: ModelHandle<Buffer>,
135 pub range: Range<language::Anchor>,
136}
137
138#[derive(Debug)]
139pub struct DocumentHighlight {
140 pub range: Range<language::Anchor>,
141 pub kind: DocumentHighlightKind,
142}
143
144#[derive(Clone, Debug)]
145pub struct Symbol {
146 pub source_worktree_id: WorktreeId,
147 pub worktree_id: WorktreeId,
148 pub language_name: String,
149 pub path: PathBuf,
150 pub label: CodeLabel,
151 pub name: String,
152 pub kind: lsp::SymbolKind,
153 pub range: Range<PointUtf16>,
154 pub signature: [u8; 32],
155}
156
157#[derive(Default)]
158pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
159
160impl DiagnosticSummary {
161 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
162 let mut this = Self {
163 error_count: 0,
164 warning_count: 0,
165 info_count: 0,
166 hint_count: 0,
167 };
168
169 for entry in diagnostics {
170 if entry.diagnostic.is_primary {
171 match entry.diagnostic.severity {
172 DiagnosticSeverity::ERROR => this.error_count += 1,
173 DiagnosticSeverity::WARNING => this.warning_count += 1,
174 DiagnosticSeverity::INFORMATION => this.info_count += 1,
175 DiagnosticSeverity::HINT => this.hint_count += 1,
176 _ => {}
177 }
178 }
179 }
180
181 this
182 }
183
184 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
185 proto::DiagnosticSummary {
186 path: path.to_string_lossy().to_string(),
187 error_count: self.error_count as u32,
188 warning_count: self.warning_count as u32,
189 info_count: self.info_count as u32,
190 hint_count: self.hint_count as u32,
191 }
192 }
193}
194
195#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
196pub struct ProjectEntry {
197 pub worktree_id: WorktreeId,
198 pub entry_id: usize,
199}
200
201impl Project {
202 pub fn init(client: &Arc<Client>) {
203 client.add_entity_message_handler(Self::handle_add_collaborator);
204 client.add_entity_message_handler(Self::handle_buffer_reloaded);
205 client.add_entity_message_handler(Self::handle_buffer_saved);
206 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
207 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
208 client.add_entity_message_handler(Self::handle_remove_collaborator);
209 client.add_entity_message_handler(Self::handle_register_worktree);
210 client.add_entity_message_handler(Self::handle_unregister_worktree);
211 client.add_entity_message_handler(Self::handle_unshare_project);
212 client.add_entity_message_handler(Self::handle_update_buffer_file);
213 client.add_entity_message_handler(Self::handle_update_buffer);
214 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
215 client.add_entity_message_handler(Self::handle_update_worktree);
216 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
217 client.add_entity_request_handler(Self::handle_apply_code_action);
218 client.add_entity_request_handler(Self::handle_format_buffers);
219 client.add_entity_request_handler(Self::handle_get_code_actions);
220 client.add_entity_request_handler(Self::handle_get_completions);
221 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
222 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
223 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
224 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
225 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
226 client.add_entity_request_handler(Self::handle_search_project);
227 client.add_entity_request_handler(Self::handle_get_project_symbols);
228 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
229 client.add_entity_request_handler(Self::handle_open_buffer);
230 client.add_entity_request_handler(Self::handle_save_buffer);
231 }
232
233 pub fn local(
234 client: Arc<Client>,
235 user_store: ModelHandle<UserStore>,
236 languages: Arc<LanguageRegistry>,
237 fs: Arc<dyn Fs>,
238 cx: &mut MutableAppContext,
239 ) -> ModelHandle<Self> {
240 cx.add_model(|cx: &mut ModelContext<Self>| {
241 let (remote_id_tx, remote_id_rx) = watch::channel();
242 let _maintain_remote_id_task = cx.spawn_weak({
243 let rpc = client.clone();
244 move |this, mut cx| {
245 async move {
246 let mut status = rpc.status();
247 while let Some(status) = status.next().await {
248 if let Some(this) = this.upgrade(&cx) {
249 let remote_id = if status.is_connected() {
250 let response = rpc.request(proto::RegisterProject {}).await?;
251 Some(response.project_id)
252 } else {
253 None
254 };
255
256 if let Some(project_id) = remote_id {
257 let mut registrations = Vec::new();
258 this.update(&mut cx, |this, cx| {
259 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
260 registrations.push(worktree.update(
261 cx,
262 |worktree, cx| {
263 let worktree = worktree.as_local_mut().unwrap();
264 worktree.register(project_id, cx)
265 },
266 ));
267 }
268 });
269 for registration in registrations {
270 registration.await?;
271 }
272 }
273 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
274 }
275 }
276 Ok(())
277 }
278 .log_err()
279 }
280 });
281
282 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
283 Self {
284 worktrees: Default::default(),
285 collaborators: Default::default(),
286 opened_buffers: Default::default(),
287 shared_buffers: Default::default(),
288 loading_buffers: Default::default(),
289 loading_local_worktrees: Default::default(),
290 buffer_snapshots: Default::default(),
291 client_state: ProjectClientState::Local {
292 is_shared: false,
293 remote_id_tx,
294 remote_id_rx,
295 _maintain_remote_id_task,
296 },
297 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
298 subscriptions: Vec::new(),
299 active_entry: None,
300 languages,
301 client,
302 user_store,
303 fs,
304 language_servers_with_diagnostics_running: 0,
305 language_servers: Default::default(),
306 started_language_servers: Default::default(),
307 nonce: StdRng::from_entropy().gen(),
308 }
309 })
310 }
311
312 pub async fn remote(
313 remote_id: u64,
314 client: Arc<Client>,
315 user_store: ModelHandle<UserStore>,
316 languages: Arc<LanguageRegistry>,
317 fs: Arc<dyn Fs>,
318 cx: &mut AsyncAppContext,
319 ) -> Result<ModelHandle<Self>> {
320 client.authenticate_and_connect(&cx).await?;
321
322 let response = client
323 .request(proto::JoinProject {
324 project_id: remote_id,
325 })
326 .await?;
327
328 let replica_id = response.replica_id as ReplicaId;
329
330 let mut worktrees = Vec::new();
331 for worktree in response.worktrees {
332 let (worktree, load_task) = cx
333 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
334 worktrees.push(worktree);
335 load_task.detach();
336 }
337
338 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
339 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
340 let mut this = Self {
341 worktrees: Vec::new(),
342 loading_buffers: Default::default(),
343 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
344 shared_buffers: Default::default(),
345 loading_local_worktrees: Default::default(),
346 active_entry: None,
347 collaborators: Default::default(),
348 languages,
349 user_store: user_store.clone(),
350 fs,
351 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
352 client: client.clone(),
353 client_state: ProjectClientState::Remote {
354 sharing_has_stopped: false,
355 remote_id,
356 replica_id,
357 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
358 async move {
359 let mut status = client.status();
360 let is_connected =
361 status.next().await.map_or(false, |s| s.is_connected());
362 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
363 if !is_connected || status.next().await.is_some() {
364 if let Some(this) = this.upgrade(&cx) {
365 this.update(&mut cx, |this, cx| this.project_unshared(cx))
366 }
367 }
368 Ok(())
369 }
370 .log_err()
371 }),
372 },
373 language_servers_with_diagnostics_running: 0,
374 language_servers: Default::default(),
375 started_language_servers: Default::default(),
376 opened_buffers: Default::default(),
377 buffer_snapshots: Default::default(),
378 nonce: StdRng::from_entropy().gen(),
379 };
380 for worktree in worktrees {
381 this.add_worktree(&worktree, cx);
382 }
383 this
384 });
385
386 let user_ids = response
387 .collaborators
388 .iter()
389 .map(|peer| peer.user_id)
390 .collect();
391 user_store
392 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
393 .await?;
394 let mut collaborators = HashMap::default();
395 for message in response.collaborators {
396 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
397 collaborators.insert(collaborator.peer_id, collaborator);
398 }
399
400 this.update(cx, |this, _| {
401 this.collaborators = collaborators;
402 });
403
404 Ok(this)
405 }
406
407 #[cfg(any(test, feature = "test-support"))]
408 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
409 let languages = Arc::new(LanguageRegistry::test());
410 let http_client = client::test::FakeHttpClient::with_404_response();
411 let client = client::Client::new(http_client.clone());
412 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
413 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
414 }
415
416 #[cfg(any(test, feature = "test-support"))]
417 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
418 self.opened_buffers
419 .get(&remote_id)
420 .and_then(|buffer| buffer.upgrade(cx))
421 }
422
423 #[cfg(any(test, feature = "test-support"))]
424 pub fn languages(&self) -> &Arc<LanguageRegistry> {
425 &self.languages
426 }
427
428 #[cfg(any(test, feature = "test-support"))]
429 pub fn check_invariants(&self, cx: &AppContext) {
430 if self.is_local() {
431 let mut worktree_root_paths = HashMap::default();
432 for worktree in self.worktrees(cx) {
433 let worktree = worktree.read(cx);
434 let abs_path = worktree.as_local().unwrap().abs_path().clone();
435 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
436 assert_eq!(
437 prev_worktree_id,
438 None,
439 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
440 abs_path,
441 worktree.id(),
442 prev_worktree_id
443 )
444 }
445 } else {
446 let replica_id = self.replica_id();
447 for buffer in self.opened_buffers.values() {
448 if let Some(buffer) = buffer.upgrade(cx) {
449 let buffer = buffer.read(cx);
450 assert_eq!(
451 buffer.deferred_ops_len(),
452 0,
453 "replica {}, buffer {} has deferred operations",
454 replica_id,
455 buffer.remote_id()
456 );
457 }
458 }
459 }
460 }
461
462 #[cfg(any(test, feature = "test-support"))]
463 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
464 let path = path.into();
465 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
466 self.opened_buffers.iter().any(|(_, buffer)| {
467 if let Some(buffer) = buffer.upgrade(cx) {
468 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
469 if file.worktree == worktree && file.path() == &path.path {
470 return true;
471 }
472 }
473 }
474 false
475 })
476 } else {
477 false
478 }
479 }
480
481 pub fn fs(&self) -> &Arc<dyn Fs> {
482 &self.fs
483 }
484
485 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
486 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
487 *remote_id_tx.borrow_mut() = remote_id;
488 }
489
490 self.subscriptions.clear();
491 if let Some(remote_id) = remote_id {
492 self.subscriptions
493 .push(self.client.add_model_for_remote_entity(remote_id, cx));
494 }
495 }
496
497 pub fn remote_id(&self) -> Option<u64> {
498 match &self.client_state {
499 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
500 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
501 }
502 }
503
504 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
505 let mut id = None;
506 let mut watch = None;
507 match &self.client_state {
508 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
509 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
510 }
511
512 async move {
513 if let Some(id) = id {
514 return id;
515 }
516 let mut watch = watch.unwrap();
517 loop {
518 let id = *watch.borrow();
519 if let Some(id) = id {
520 return id;
521 }
522 watch.next().await;
523 }
524 }
525 }
526
527 pub fn replica_id(&self) -> ReplicaId {
528 match &self.client_state {
529 ProjectClientState::Local { .. } => 0,
530 ProjectClientState::Remote { replica_id, .. } => *replica_id,
531 }
532 }
533
534 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
535 &self.collaborators
536 }
537
538 pub fn worktrees<'a>(
539 &'a self,
540 cx: &'a AppContext,
541 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
542 self.worktrees
543 .iter()
544 .filter_map(move |worktree| worktree.upgrade(cx))
545 }
546
547 pub fn visible_worktrees<'a>(
548 &'a self,
549 cx: &'a AppContext,
550 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
551 self.worktrees.iter().filter_map(|worktree| {
552 worktree.upgrade(cx).and_then(|worktree| {
553 if worktree.read(cx).is_visible() {
554 Some(worktree)
555 } else {
556 None
557 }
558 })
559 })
560 }
561
562 pub fn worktree_for_id(
563 &self,
564 id: WorktreeId,
565 cx: &AppContext,
566 ) -> Option<ModelHandle<Worktree>> {
567 self.worktrees(cx)
568 .find(|worktree| worktree.read(cx).id() == id)
569 }
570
571 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
572 let rpc = self.client.clone();
573 cx.spawn(|this, mut cx| async move {
574 let project_id = this.update(&mut cx, |this, cx| {
575 if let ProjectClientState::Local {
576 is_shared,
577 remote_id_rx,
578 ..
579 } = &mut this.client_state
580 {
581 *is_shared = true;
582
583 for open_buffer in this.opened_buffers.values_mut() {
584 match open_buffer {
585 OpenBuffer::Strong(_) => {}
586 OpenBuffer::Weak(buffer) => {
587 if let Some(buffer) = buffer.upgrade(cx) {
588 *open_buffer = OpenBuffer::Strong(buffer);
589 }
590 }
591 OpenBuffer::Loading(_) => unreachable!(),
592 }
593 }
594
595 for worktree_handle in this.worktrees.iter_mut() {
596 match worktree_handle {
597 WorktreeHandle::Strong(_) => {}
598 WorktreeHandle::Weak(worktree) => {
599 if let Some(worktree) = worktree.upgrade(cx) {
600 *worktree_handle = WorktreeHandle::Strong(worktree);
601 }
602 }
603 }
604 }
605
606 remote_id_rx
607 .borrow()
608 .ok_or_else(|| anyhow!("no project id"))
609 } else {
610 Err(anyhow!("can't share a remote project"))
611 }
612 })?;
613
614 rpc.request(proto::ShareProject { project_id }).await?;
615
616 let mut tasks = Vec::new();
617 this.update(&mut cx, |this, cx| {
618 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
619 worktree.update(cx, |worktree, cx| {
620 let worktree = worktree.as_local_mut().unwrap();
621 tasks.push(worktree.share(project_id, cx));
622 });
623 }
624 });
625 for task in tasks {
626 task.await?;
627 }
628 this.update(&mut cx, |_, cx| cx.notify());
629 Ok(())
630 })
631 }
632
633 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
634 let rpc = self.client.clone();
635 cx.spawn(|this, mut cx| async move {
636 let project_id = this.update(&mut cx, |this, cx| {
637 if let ProjectClientState::Local {
638 is_shared,
639 remote_id_rx,
640 ..
641 } = &mut this.client_state
642 {
643 *is_shared = false;
644
645 for open_buffer in this.opened_buffers.values_mut() {
646 match open_buffer {
647 OpenBuffer::Strong(buffer) => {
648 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
649 }
650 _ => {}
651 }
652 }
653
654 for worktree_handle in this.worktrees.iter_mut() {
655 match worktree_handle {
656 WorktreeHandle::Strong(worktree) => {
657 if !worktree.read(cx).is_visible() {
658 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
659 }
660 }
661 _ => {}
662 }
663 }
664
665 remote_id_rx
666 .borrow()
667 .ok_or_else(|| anyhow!("no project id"))
668 } else {
669 Err(anyhow!("can't share a remote project"))
670 }
671 })?;
672
673 rpc.send(proto::UnshareProject { project_id })?;
674 this.update(&mut cx, |this, cx| {
675 this.collaborators.clear();
676 this.shared_buffers.clear();
677 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
678 worktree.update(cx, |worktree, _| {
679 worktree.as_local_mut().unwrap().unshare();
680 });
681 }
682 cx.notify()
683 });
684 Ok(())
685 })
686 }
687
688 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
689 if let ProjectClientState::Remote {
690 sharing_has_stopped,
691 ..
692 } = &mut self.client_state
693 {
694 *sharing_has_stopped = true;
695 self.collaborators.clear();
696 cx.notify();
697 }
698 }
699
700 pub fn is_read_only(&self) -> bool {
701 match &self.client_state {
702 ProjectClientState::Local { .. } => false,
703 ProjectClientState::Remote {
704 sharing_has_stopped,
705 ..
706 } => *sharing_has_stopped,
707 }
708 }
709
710 pub fn is_local(&self) -> bool {
711 match &self.client_state {
712 ProjectClientState::Local { .. } => true,
713 ProjectClientState::Remote { .. } => false,
714 }
715 }
716
717 pub fn is_remote(&self) -> bool {
718 !self.is_local()
719 }
720
721 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
722 if self.is_remote() {
723 return Err(anyhow!("creating buffers as a guest is not supported yet"));
724 }
725
726 let buffer = cx.add_model(|cx| {
727 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
728 });
729 self.register_buffer(&buffer, cx)?;
730 Ok(buffer)
731 }
732
733 pub fn open_buffer(
734 &mut self,
735 path: impl Into<ProjectPath>,
736 cx: &mut ModelContext<Self>,
737 ) -> Task<Result<ModelHandle<Buffer>>> {
738 let project_path = path.into();
739 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
740 worktree
741 } else {
742 return Task::ready(Err(anyhow!("no such worktree")));
743 };
744
745 // If there is already a buffer for the given path, then return it.
746 let existing_buffer = self.get_open_buffer(&project_path, cx);
747 if let Some(existing_buffer) = existing_buffer {
748 return Task::ready(Ok(existing_buffer));
749 }
750
751 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
752 // If the given path is already being loaded, then wait for that existing
753 // task to complete and return the same buffer.
754 hash_map::Entry::Occupied(e) => e.get().clone(),
755
756 // Otherwise, record the fact that this path is now being loaded.
757 hash_map::Entry::Vacant(entry) => {
758 let (mut tx, rx) = postage::watch::channel();
759 entry.insert(rx.clone());
760
761 let load_buffer = if worktree.read(cx).is_local() {
762 self.open_local_buffer(&project_path.path, &worktree, cx)
763 } else {
764 self.open_remote_buffer(&project_path.path, &worktree, cx)
765 };
766
767 cx.spawn(move |this, mut cx| async move {
768 let load_result = load_buffer.await;
769 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
770 // Record the fact that the buffer is no longer loading.
771 this.loading_buffers.remove(&project_path);
772 let buffer = load_result.map_err(Arc::new)?;
773 Ok(buffer)
774 }));
775 })
776 .detach();
777 rx
778 }
779 };
780
781 cx.foreground().spawn(async move {
782 loop {
783 if let Some(result) = loading_watch.borrow().as_ref() {
784 match result {
785 Ok(buffer) => return Ok(buffer.clone()),
786 Err(error) => return Err(anyhow!("{}", error)),
787 }
788 }
789 loading_watch.next().await;
790 }
791 })
792 }
793
794 fn open_local_buffer(
795 &mut self,
796 path: &Arc<Path>,
797 worktree: &ModelHandle<Worktree>,
798 cx: &mut ModelContext<Self>,
799 ) -> Task<Result<ModelHandle<Buffer>>> {
800 let load_buffer = worktree.update(cx, |worktree, cx| {
801 let worktree = worktree.as_local_mut().unwrap();
802 worktree.load_buffer(path, cx)
803 });
804 cx.spawn(|this, mut cx| async move {
805 let buffer = load_buffer.await?;
806 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
807 Ok(buffer)
808 })
809 }
810
811 fn open_remote_buffer(
812 &mut self,
813 path: &Arc<Path>,
814 worktree: &ModelHandle<Worktree>,
815 cx: &mut ModelContext<Self>,
816 ) -> Task<Result<ModelHandle<Buffer>>> {
817 let rpc = self.client.clone();
818 let project_id = self.remote_id().unwrap();
819 let remote_worktree_id = worktree.read(cx).id();
820 let path = path.clone();
821 let path_string = path.to_string_lossy().to_string();
822 cx.spawn(|this, mut cx| async move {
823 let response = rpc
824 .request(proto::OpenBuffer {
825 project_id,
826 worktree_id: remote_worktree_id.to_proto(),
827 path: path_string,
828 })
829 .await?;
830 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
831 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
832 .await
833 })
834 }
835
836 fn open_local_buffer_via_lsp(
837 &mut self,
838 abs_path: lsp::Url,
839 lang_name: Arc<str>,
840 lang_server: Arc<LanguageServer>,
841 cx: &mut ModelContext<Self>,
842 ) -> Task<Result<ModelHandle<Buffer>>> {
843 cx.spawn(|this, mut cx| async move {
844 let abs_path = abs_path
845 .to_file_path()
846 .map_err(|_| anyhow!("can't convert URI to path"))?;
847 let (worktree, relative_path) = if let Some(result) =
848 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
849 {
850 result
851 } else {
852 let worktree = this
853 .update(&mut cx, |this, cx| {
854 this.create_local_worktree(&abs_path, false, cx)
855 })
856 .await?;
857 this.update(&mut cx, |this, cx| {
858 this.language_servers
859 .insert((worktree.read(cx).id(), lang_name), lang_server);
860 });
861 (worktree, PathBuf::new())
862 };
863
864 let project_path = ProjectPath {
865 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
866 path: relative_path.into(),
867 };
868 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
869 .await
870 })
871 }
872
873 pub fn save_buffer_as(
874 &mut self,
875 buffer: ModelHandle<Buffer>,
876 abs_path: PathBuf,
877 cx: &mut ModelContext<Project>,
878 ) -> Task<Result<()>> {
879 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
880 cx.spawn(|this, mut cx| async move {
881 let (worktree, path) = worktree_task.await?;
882 worktree
883 .update(&mut cx, |worktree, cx| {
884 worktree
885 .as_local_mut()
886 .unwrap()
887 .save_buffer_as(buffer.clone(), path, cx)
888 })
889 .await?;
890 this.update(&mut cx, |this, cx| {
891 this.assign_language_to_buffer(&buffer, cx);
892 this.register_buffer_with_language_server(&buffer, cx);
893 });
894 Ok(())
895 })
896 }
897
898 pub fn get_open_buffer(
899 &mut self,
900 path: &ProjectPath,
901 cx: &mut ModelContext<Self>,
902 ) -> Option<ModelHandle<Buffer>> {
903 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
904 self.opened_buffers.values().find_map(|buffer| {
905 let buffer = buffer.upgrade(cx)?;
906 let file = File::from_dyn(buffer.read(cx).file())?;
907 if file.worktree == worktree && file.path() == &path.path {
908 Some(buffer)
909 } else {
910 None
911 }
912 })
913 }
914
915 fn register_buffer(
916 &mut self,
917 buffer: &ModelHandle<Buffer>,
918 cx: &mut ModelContext<Self>,
919 ) -> Result<()> {
920 let remote_id = buffer.read(cx).remote_id();
921 let open_buffer = if self.is_remote() || self.is_shared() {
922 OpenBuffer::Strong(buffer.clone())
923 } else {
924 OpenBuffer::Weak(buffer.downgrade())
925 };
926
927 match self.opened_buffers.insert(remote_id, open_buffer) {
928 None => {}
929 Some(OpenBuffer::Loading(operations)) => {
930 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
931 }
932 Some(OpenBuffer::Weak(existing_handle)) => {
933 if existing_handle.upgrade(cx).is_some() {
934 Err(anyhow!(
935 "already registered buffer with remote id {}",
936 remote_id
937 ))?
938 }
939 }
940 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
941 "already registered buffer with remote id {}",
942 remote_id
943 ))?,
944 }
945 cx.become_delegate(buffer, |this, buffer, event, cx| {
946 this.on_buffer_event(buffer, event, cx);
947 })
948 .detach();
949
950 self.assign_language_to_buffer(buffer, cx);
951 self.register_buffer_with_language_server(buffer, cx);
952
953 Ok(())
954 }
955
956 fn register_buffer_with_language_server(
957 &mut self,
958 buffer_handle: &ModelHandle<Buffer>,
959 cx: &mut ModelContext<Self>,
960 ) {
961 let buffer = buffer_handle.read(cx);
962 let buffer_id = buffer.remote_id();
963 if let Some(file) = File::from_dyn(buffer.file()) {
964 if file.is_local() {
965 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
966 let initial_snapshot = buffer.text_snapshot();
967 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
968
969 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
970 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
971 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
972 .log_err();
973 }
974 }
975
976 if let Some(server) = language_server {
977 server
978 .notify::<lsp::notification::DidOpenTextDocument>(
979 lsp::DidOpenTextDocumentParams {
980 text_document: lsp::TextDocumentItem::new(
981 uri,
982 Default::default(),
983 0,
984 initial_snapshot.text(),
985 ),
986 }
987 .clone(),
988 )
989 .log_err();
990 buffer_handle.update(cx, |buffer, cx| {
991 buffer.set_completion_triggers(
992 server
993 .capabilities()
994 .completion_provider
995 .as_ref()
996 .and_then(|provider| provider.trigger_characters.clone())
997 .unwrap_or(Vec::new()),
998 cx,
999 )
1000 });
1001 self.buffer_snapshots
1002 .insert(buffer_id, vec![(0, initial_snapshot)]);
1003 }
1004
1005 cx.observe_release(buffer_handle, |this, buffer, cx| {
1006 if let Some(file) = File::from_dyn(buffer.file()) {
1007 if file.is_local() {
1008 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1009 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1010 server
1011 .notify::<lsp::notification::DidCloseTextDocument>(
1012 lsp::DidCloseTextDocumentParams {
1013 text_document: lsp::TextDocumentIdentifier::new(
1014 uri.clone(),
1015 ),
1016 },
1017 )
1018 .log_err();
1019 }
1020 }
1021 }
1022 })
1023 .detach();
1024 }
1025 }
1026 }
1027
1028 fn on_buffer_event(
1029 &mut self,
1030 buffer: ModelHandle<Buffer>,
1031 event: BufferEvent,
1032 cx: &mut ModelContext<Self>,
1033 ) -> Option<()> {
1034 match event {
1035 BufferEvent::Operation(operation) => {
1036 let project_id = self.remote_id()?;
1037 let request = self.client.request(proto::UpdateBuffer {
1038 project_id,
1039 buffer_id: buffer.read(cx).remote_id(),
1040 operations: vec![language::proto::serialize_operation(&operation)],
1041 });
1042 cx.background().spawn(request).detach_and_log_err(cx);
1043 }
1044 BufferEvent::Edited => {
1045 let language_server = self
1046 .language_server_for_buffer(buffer.read(cx), cx)?
1047 .clone();
1048 let buffer = buffer.read(cx);
1049 let file = File::from_dyn(buffer.file())?;
1050 let abs_path = file.as_local()?.abs_path(cx);
1051 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1052 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1053 let (version, prev_snapshot) = buffer_snapshots.last()?;
1054 let next_snapshot = buffer.text_snapshot();
1055 let next_version = version + 1;
1056
1057 let content_changes = buffer
1058 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1059 .map(|edit| {
1060 let edit_start = edit.new.start.0;
1061 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1062 let new_text = next_snapshot
1063 .text_for_range(edit.new.start.1..edit.new.end.1)
1064 .collect();
1065 lsp::TextDocumentContentChangeEvent {
1066 range: Some(lsp::Range::new(
1067 edit_start.to_lsp_position(),
1068 edit_end.to_lsp_position(),
1069 )),
1070 range_length: None,
1071 text: new_text,
1072 }
1073 })
1074 .collect();
1075
1076 buffer_snapshots.push((next_version, next_snapshot));
1077
1078 language_server
1079 .notify::<lsp::notification::DidChangeTextDocument>(
1080 lsp::DidChangeTextDocumentParams {
1081 text_document: lsp::VersionedTextDocumentIdentifier::new(
1082 uri,
1083 next_version,
1084 ),
1085 content_changes,
1086 },
1087 )
1088 .log_err();
1089 }
1090 BufferEvent::Saved => {
1091 let file = File::from_dyn(buffer.read(cx).file())?;
1092 let worktree_id = file.worktree_id(cx);
1093 let abs_path = file.as_local()?.abs_path(cx);
1094 let text_document = lsp::TextDocumentIdentifier {
1095 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1096 };
1097
1098 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1099 server
1100 .notify::<lsp::notification::DidSaveTextDocument>(
1101 lsp::DidSaveTextDocumentParams {
1102 text_document: text_document.clone(),
1103 text: None,
1104 },
1105 )
1106 .log_err();
1107 }
1108 }
1109 _ => {}
1110 }
1111
1112 None
1113 }
1114
1115 fn language_servers_for_worktree(
1116 &self,
1117 worktree_id: WorktreeId,
1118 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1119 self.language_servers.iter().filter_map(
1120 move |((language_server_worktree_id, language_name), server)| {
1121 if *language_server_worktree_id == worktree_id {
1122 Some((language_name.as_ref(), server))
1123 } else {
1124 None
1125 }
1126 },
1127 )
1128 }
1129
1130 fn assign_language_to_buffer(
1131 &mut self,
1132 buffer: &ModelHandle<Buffer>,
1133 cx: &mut ModelContext<Self>,
1134 ) -> Option<()> {
1135 // If the buffer has a language, set it and start the language server if we haven't already.
1136 let full_path = buffer.read(cx).file()?.full_path(cx);
1137 let language = self.languages.select_language(&full_path)?;
1138 buffer.update(cx, |buffer, cx| {
1139 buffer.set_language(Some(language.clone()), cx);
1140 });
1141
1142 let file = File::from_dyn(buffer.read(cx).file())?;
1143 let worktree = file.worktree.read(cx).as_local()?;
1144 let worktree_id = worktree.id();
1145 let worktree_abs_path = worktree.abs_path().clone();
1146 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1147
1148 None
1149 }
1150
1151 fn start_language_server(
1152 &mut self,
1153 worktree_id: WorktreeId,
1154 worktree_path: Arc<Path>,
1155 language: Arc<Language>,
1156 cx: &mut ModelContext<Self>,
1157 ) {
1158 enum LspEvent {
1159 DiagnosticsStart,
1160 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
1161 DiagnosticsFinish,
1162 }
1163
1164 let key = (worktree_id, language.name());
1165 self.started_language_servers
1166 .entry(key.clone())
1167 .or_insert_with(|| {
1168 let language_server = self.languages.start_language_server(
1169 language.clone(),
1170 worktree_path,
1171 self.client.http_client(),
1172 cx,
1173 );
1174 let rpc = self.client.clone();
1175 cx.spawn_weak(|this, mut cx| async move {
1176 let language_server = language_server?.await.log_err()?;
1177 let this = this.upgrade(&cx)?;
1178 this.update(&mut cx, |this, cx| {
1179 this.language_servers
1180 .insert(key.clone(), language_server.clone());
1181
1182 // Tell the language server about every open buffer in the worktree that matches the language.
1183 for buffer in this.opened_buffers.values() {
1184 if let Some(buffer_handle) = buffer.upgrade(cx) {
1185 let buffer = buffer_handle.read(cx);
1186 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1187 file
1188 } else {
1189 continue;
1190 };
1191 let language = if let Some(language) = buffer.language() {
1192 language
1193 } else {
1194 continue;
1195 };
1196 if (file.worktree.read(cx).id(), language.name()) != key {
1197 continue;
1198 }
1199
1200 let file = file.as_local()?;
1201 let versions = this
1202 .buffer_snapshots
1203 .entry(buffer.remote_id())
1204 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1205 let (version, initial_snapshot) = versions.last().unwrap();
1206 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1207 language_server
1208 .notify::<lsp::notification::DidOpenTextDocument>(
1209 lsp::DidOpenTextDocumentParams {
1210 text_document: lsp::TextDocumentItem::new(
1211 uri,
1212 Default::default(),
1213 *version,
1214 initial_snapshot.text(),
1215 ),
1216 },
1217 )
1218 .log_err()?;
1219 buffer_handle.update(cx, |buffer, cx| {
1220 buffer.set_completion_triggers(
1221 language_server
1222 .capabilities()
1223 .completion_provider
1224 .as_ref()
1225 .and_then(|provider| {
1226 provider.trigger_characters.clone()
1227 })
1228 .unwrap_or(Vec::new()),
1229 cx,
1230 )
1231 });
1232 }
1233 }
1234
1235 Some(())
1236 });
1237
1238 let disk_based_sources = language
1239 .disk_based_diagnostic_sources()
1240 .cloned()
1241 .unwrap_or_default();
1242 let disk_based_diagnostics_progress_token =
1243 language.disk_based_diagnostics_progress_token().cloned();
1244 let has_disk_based_diagnostic_progress_token =
1245 disk_based_diagnostics_progress_token.is_some();
1246 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
1247
1248 // Listen for `PublishDiagnostics` notifications.
1249 language_server
1250 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1251 let diagnostics_tx = diagnostics_tx.clone();
1252 move |params| {
1253 if !has_disk_based_diagnostic_progress_token {
1254 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
1255 }
1256 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
1257 .ok();
1258 if !has_disk_based_diagnostic_progress_token {
1259 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
1260 }
1261 }
1262 })
1263 .detach();
1264
1265 // Listen for `Progress` notifications. Send an event when the language server
1266 // transitions between running jobs and not running any jobs.
1267 let mut running_jobs_for_this_server: i32 = 0;
1268 language_server
1269 .on_notification::<lsp::notification::Progress, _>(move |params| {
1270 let token = match params.token {
1271 lsp::NumberOrString::Number(_) => None,
1272 lsp::NumberOrString::String(token) => Some(token),
1273 };
1274
1275 if token == disk_based_diagnostics_progress_token {
1276 match params.value {
1277 lsp::ProgressParamsValue::WorkDone(progress) => {
1278 match progress {
1279 lsp::WorkDoneProgress::Begin(_) => {
1280 running_jobs_for_this_server += 1;
1281 if running_jobs_for_this_server == 1 {
1282 block_on(
1283 diagnostics_tx
1284 .send(LspEvent::DiagnosticsStart),
1285 )
1286 .ok();
1287 }
1288 }
1289 lsp::WorkDoneProgress::End(_) => {
1290 running_jobs_for_this_server -= 1;
1291 if running_jobs_for_this_server == 0 {
1292 block_on(
1293 diagnostics_tx
1294 .send(LspEvent::DiagnosticsFinish),
1295 )
1296 .ok();
1297 }
1298 }
1299 _ => {}
1300 }
1301 }
1302 }
1303 }
1304 })
1305 .detach();
1306
1307 // Process all the LSP events.
1308 let this = this.downgrade();
1309 cx.spawn(|mut cx| async move {
1310 while let Ok(message) = diagnostics_rx.recv().await {
1311 let this = this.upgrade(&cx)?;
1312 match message {
1313 LspEvent::DiagnosticsStart => {
1314 this.update(&mut cx, |this, cx| {
1315 this.disk_based_diagnostics_started(cx);
1316 if let Some(project_id) = this.remote_id() {
1317 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1318 project_id,
1319 })
1320 .log_err();
1321 }
1322 });
1323 }
1324 LspEvent::DiagnosticsUpdate(mut params) => {
1325 language.process_diagnostics(&mut params);
1326 this.update(&mut cx, |this, cx| {
1327 this.update_diagnostics(params, &disk_based_sources, cx)
1328 .log_err();
1329 });
1330 }
1331 LspEvent::DiagnosticsFinish => {
1332 this.update(&mut cx, |this, cx| {
1333 this.disk_based_diagnostics_finished(cx);
1334 if let Some(project_id) = this.remote_id() {
1335 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1336 project_id,
1337 })
1338 .log_err();
1339 }
1340 });
1341 }
1342 }
1343 }
1344 Some(())
1345 })
1346 .detach();
1347
1348 Some(language_server)
1349 })
1350 });
1351 }
1352
1353 pub fn update_diagnostics(
1354 &mut self,
1355 params: lsp::PublishDiagnosticsParams,
1356 disk_based_sources: &HashSet<String>,
1357 cx: &mut ModelContext<Self>,
1358 ) -> Result<()> {
1359 let abs_path = params
1360 .uri
1361 .to_file_path()
1362 .map_err(|_| anyhow!("URI is not a file"))?;
1363 let mut next_group_id = 0;
1364 let mut diagnostics = Vec::default();
1365 let mut primary_diagnostic_group_ids = HashMap::default();
1366 let mut sources_by_group_id = HashMap::default();
1367 let mut supporting_diagnostic_severities = HashMap::default();
1368 for diagnostic in ¶ms.diagnostics {
1369 let source = diagnostic.source.as_ref();
1370 let code = diagnostic.code.as_ref().map(|code| match code {
1371 lsp::NumberOrString::Number(code) => code.to_string(),
1372 lsp::NumberOrString::String(code) => code.clone(),
1373 });
1374 let range = range_from_lsp(diagnostic.range);
1375 let is_supporting = diagnostic
1376 .related_information
1377 .as_ref()
1378 .map_or(false, |infos| {
1379 infos.iter().any(|info| {
1380 primary_diagnostic_group_ids.contains_key(&(
1381 source,
1382 code.clone(),
1383 range_from_lsp(info.location.range),
1384 ))
1385 })
1386 });
1387
1388 if is_supporting {
1389 if let Some(severity) = diagnostic.severity {
1390 supporting_diagnostic_severities
1391 .insert((source, code.clone(), range), severity);
1392 }
1393 } else {
1394 let group_id = post_inc(&mut next_group_id);
1395 let is_disk_based =
1396 source.map_or(false, |source| disk_based_sources.contains(source));
1397
1398 sources_by_group_id.insert(group_id, source);
1399 primary_diagnostic_group_ids
1400 .insert((source, code.clone(), range.clone()), group_id);
1401
1402 diagnostics.push(DiagnosticEntry {
1403 range,
1404 diagnostic: Diagnostic {
1405 code: code.clone(),
1406 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1407 message: diagnostic.message.clone(),
1408 group_id,
1409 is_primary: true,
1410 is_valid: true,
1411 is_disk_based,
1412 },
1413 });
1414 if let Some(infos) = &diagnostic.related_information {
1415 for info in infos {
1416 if info.location.uri == params.uri && !info.message.is_empty() {
1417 let range = range_from_lsp(info.location.range);
1418 diagnostics.push(DiagnosticEntry {
1419 range,
1420 diagnostic: Diagnostic {
1421 code: code.clone(),
1422 severity: DiagnosticSeverity::INFORMATION,
1423 message: info.message.clone(),
1424 group_id,
1425 is_primary: false,
1426 is_valid: true,
1427 is_disk_based,
1428 },
1429 });
1430 }
1431 }
1432 }
1433 }
1434 }
1435
1436 for entry in &mut diagnostics {
1437 let diagnostic = &mut entry.diagnostic;
1438 if !diagnostic.is_primary {
1439 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1440 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1441 source,
1442 diagnostic.code.clone(),
1443 entry.range.clone(),
1444 )) {
1445 diagnostic.severity = severity;
1446 }
1447 }
1448 }
1449
1450 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1451 Ok(())
1452 }
1453
1454 pub fn update_diagnostic_entries(
1455 &mut self,
1456 abs_path: PathBuf,
1457 version: Option<i32>,
1458 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1459 cx: &mut ModelContext<Project>,
1460 ) -> Result<(), anyhow::Error> {
1461 let (worktree, relative_path) = self
1462 .find_local_worktree(&abs_path, cx)
1463 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1464 if !worktree.read(cx).is_visible() {
1465 return Ok(());
1466 }
1467
1468 let project_path = ProjectPath {
1469 worktree_id: worktree.read(cx).id(),
1470 path: relative_path.into(),
1471 };
1472
1473 for buffer in self.opened_buffers.values() {
1474 if let Some(buffer) = buffer.upgrade(cx) {
1475 if buffer
1476 .read(cx)
1477 .file()
1478 .map_or(false, |file| *file.path() == project_path.path)
1479 {
1480 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1481 break;
1482 }
1483 }
1484 }
1485 worktree.update(cx, |worktree, cx| {
1486 worktree
1487 .as_local_mut()
1488 .ok_or_else(|| anyhow!("not a local worktree"))?
1489 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1490 })?;
1491 cx.emit(Event::DiagnosticsUpdated(project_path));
1492 Ok(())
1493 }
1494
1495 fn update_buffer_diagnostics(
1496 &mut self,
1497 buffer: &ModelHandle<Buffer>,
1498 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1499 version: Option<i32>,
1500 cx: &mut ModelContext<Self>,
1501 ) -> Result<()> {
1502 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1503 Ordering::Equal
1504 .then_with(|| b.is_primary.cmp(&a.is_primary))
1505 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1506 .then_with(|| a.severity.cmp(&b.severity))
1507 .then_with(|| a.message.cmp(&b.message))
1508 }
1509
1510 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1511
1512 diagnostics.sort_unstable_by(|a, b| {
1513 Ordering::Equal
1514 .then_with(|| a.range.start.cmp(&b.range.start))
1515 .then_with(|| b.range.end.cmp(&a.range.end))
1516 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1517 });
1518
1519 let mut sanitized_diagnostics = Vec::new();
1520 let mut edits_since_save = snapshot
1521 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1522 .peekable();
1523 let mut last_edit_old_end = PointUtf16::zero();
1524 let mut last_edit_new_end = PointUtf16::zero();
1525 'outer: for entry in diagnostics {
1526 let mut start = entry.range.start;
1527 let mut end = entry.range.end;
1528
1529 // Some diagnostics are based on files on disk instead of buffers'
1530 // current contents. Adjust these diagnostics' ranges to reflect
1531 // any unsaved edits.
1532 if entry.diagnostic.is_disk_based {
1533 while let Some(edit) = edits_since_save.peek() {
1534 if edit.old.end <= start {
1535 last_edit_old_end = edit.old.end;
1536 last_edit_new_end = edit.new.end;
1537 edits_since_save.next();
1538 } else if edit.old.start <= end && edit.old.end >= start {
1539 continue 'outer;
1540 } else {
1541 break;
1542 }
1543 }
1544
1545 let start_overshoot = start - last_edit_old_end;
1546 start = last_edit_new_end;
1547 start += start_overshoot;
1548
1549 let end_overshoot = end - last_edit_old_end;
1550 end = last_edit_new_end;
1551 end += end_overshoot;
1552 }
1553
1554 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1555 ..snapshot.clip_point_utf16(end, Bias::Right);
1556
1557 // Expand empty ranges by one character
1558 if range.start == range.end {
1559 range.end.column += 1;
1560 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1561 if range.start == range.end && range.end.column > 0 {
1562 range.start.column -= 1;
1563 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1564 }
1565 }
1566
1567 sanitized_diagnostics.push(DiagnosticEntry {
1568 range,
1569 diagnostic: entry.diagnostic,
1570 });
1571 }
1572 drop(edits_since_save);
1573
1574 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1575 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1576 Ok(())
1577 }
1578
1579 pub fn format(
1580 &self,
1581 buffers: HashSet<ModelHandle<Buffer>>,
1582 push_to_history: bool,
1583 cx: &mut ModelContext<Project>,
1584 ) -> Task<Result<ProjectTransaction>> {
1585 let mut local_buffers = Vec::new();
1586 let mut remote_buffers = None;
1587 for buffer_handle in buffers {
1588 let buffer = buffer_handle.read(cx);
1589 if let Some(file) = File::from_dyn(buffer.file()) {
1590 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1591 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1592 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1593 }
1594 } else {
1595 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1596 }
1597 } else {
1598 return Task::ready(Ok(Default::default()));
1599 }
1600 }
1601
1602 let remote_buffers = self.remote_id().zip(remote_buffers);
1603 let client = self.client.clone();
1604
1605 cx.spawn(|this, mut cx| async move {
1606 let mut project_transaction = ProjectTransaction::default();
1607
1608 if let Some((project_id, remote_buffers)) = remote_buffers {
1609 let response = client
1610 .request(proto::FormatBuffers {
1611 project_id,
1612 buffer_ids: remote_buffers
1613 .iter()
1614 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1615 .collect(),
1616 })
1617 .await?
1618 .transaction
1619 .ok_or_else(|| anyhow!("missing transaction"))?;
1620 project_transaction = this
1621 .update(&mut cx, |this, cx| {
1622 this.deserialize_project_transaction(response, push_to_history, cx)
1623 })
1624 .await?;
1625 }
1626
1627 for (buffer, buffer_abs_path, language_server) in local_buffers {
1628 let text_document = lsp::TextDocumentIdentifier::new(
1629 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1630 );
1631 let capabilities = &language_server.capabilities();
1632 let lsp_edits = if capabilities
1633 .document_formatting_provider
1634 .as_ref()
1635 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1636 {
1637 language_server
1638 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1639 text_document,
1640 options: Default::default(),
1641 work_done_progress_params: Default::default(),
1642 })
1643 .await?
1644 } else if capabilities
1645 .document_range_formatting_provider
1646 .as_ref()
1647 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1648 {
1649 let buffer_start = lsp::Position::new(0, 0);
1650 let buffer_end = buffer
1651 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1652 .to_lsp_position();
1653 language_server
1654 .request::<lsp::request::RangeFormatting>(
1655 lsp::DocumentRangeFormattingParams {
1656 text_document,
1657 range: lsp::Range::new(buffer_start, buffer_end),
1658 options: Default::default(),
1659 work_done_progress_params: Default::default(),
1660 },
1661 )
1662 .await?
1663 } else {
1664 continue;
1665 };
1666
1667 if let Some(lsp_edits) = lsp_edits {
1668 let edits = this
1669 .update(&mut cx, |this, cx| {
1670 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1671 })
1672 .await?;
1673 buffer.update(&mut cx, |buffer, cx| {
1674 buffer.finalize_last_transaction();
1675 buffer.start_transaction();
1676 for (range, text) in edits {
1677 buffer.edit([range], text, cx);
1678 }
1679 if buffer.end_transaction(cx).is_some() {
1680 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1681 if !push_to_history {
1682 buffer.forget_transaction(transaction.id);
1683 }
1684 project_transaction.0.insert(cx.handle(), transaction);
1685 }
1686 });
1687 }
1688 }
1689
1690 Ok(project_transaction)
1691 })
1692 }
1693
1694 pub fn definition<T: ToPointUtf16>(
1695 &self,
1696 buffer: &ModelHandle<Buffer>,
1697 position: T,
1698 cx: &mut ModelContext<Self>,
1699 ) -> Task<Result<Vec<Location>>> {
1700 let position = position.to_point_utf16(buffer.read(cx));
1701 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1702 }
1703
1704 pub fn references<T: ToPointUtf16>(
1705 &self,
1706 buffer: &ModelHandle<Buffer>,
1707 position: T,
1708 cx: &mut ModelContext<Self>,
1709 ) -> Task<Result<Vec<Location>>> {
1710 let position = position.to_point_utf16(buffer.read(cx));
1711 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1712 }
1713
1714 pub fn document_highlights<T: ToPointUtf16>(
1715 &self,
1716 buffer: &ModelHandle<Buffer>,
1717 position: T,
1718 cx: &mut ModelContext<Self>,
1719 ) -> Task<Result<Vec<DocumentHighlight>>> {
1720 let position = position.to_point_utf16(buffer.read(cx));
1721
1722 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1723 }
1724
1725 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1726 if self.is_local() {
1727 let mut language_servers = HashMap::default();
1728 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1729 if let Some((worktree, language)) = self
1730 .worktree_for_id(*worktree_id, cx)
1731 .and_then(|worktree| worktree.read(cx).as_local())
1732 .zip(self.languages.get_language(language_name))
1733 {
1734 language_servers
1735 .entry(Arc::as_ptr(language_server))
1736 .or_insert((
1737 language_server.clone(),
1738 *worktree_id,
1739 worktree.abs_path().clone(),
1740 language.clone(),
1741 ));
1742 }
1743 }
1744
1745 let mut requests = Vec::new();
1746 for (language_server, _, _, _) in language_servers.values() {
1747 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1748 lsp::WorkspaceSymbolParams {
1749 query: query.to_string(),
1750 ..Default::default()
1751 },
1752 ));
1753 }
1754
1755 cx.spawn_weak(|this, cx| async move {
1756 let responses = futures::future::try_join_all(requests).await?;
1757
1758 let mut symbols = Vec::new();
1759 if let Some(this) = this.upgrade(&cx) {
1760 this.read_with(&cx, |this, cx| {
1761 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1762 language_servers.into_values().zip(responses)
1763 {
1764 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1765 |lsp_symbol| {
1766 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1767 let mut worktree_id = source_worktree_id;
1768 let path;
1769 if let Some((worktree, rel_path)) =
1770 this.find_local_worktree(&abs_path, cx)
1771 {
1772 worktree_id = worktree.read(cx).id();
1773 path = rel_path;
1774 } else {
1775 path = relativize_path(&worktree_abs_path, &abs_path);
1776 }
1777
1778 let label = language
1779 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1780 .unwrap_or_else(|| {
1781 CodeLabel::plain(lsp_symbol.name.clone(), None)
1782 });
1783 let signature = this.symbol_signature(worktree_id, &path);
1784
1785 Some(Symbol {
1786 source_worktree_id,
1787 worktree_id,
1788 language_name: language.name().to_string(),
1789 name: lsp_symbol.name,
1790 kind: lsp_symbol.kind,
1791 label,
1792 path,
1793 range: range_from_lsp(lsp_symbol.location.range),
1794 signature,
1795 })
1796 },
1797 ));
1798 }
1799 })
1800 }
1801
1802 Ok(symbols)
1803 })
1804 } else if let Some(project_id) = self.remote_id() {
1805 let request = self.client.request(proto::GetProjectSymbols {
1806 project_id,
1807 query: query.to_string(),
1808 });
1809 cx.spawn_weak(|this, cx| async move {
1810 let response = request.await?;
1811 let mut symbols = Vec::new();
1812 if let Some(this) = this.upgrade(&cx) {
1813 this.read_with(&cx, |this, _| {
1814 symbols.extend(
1815 response
1816 .symbols
1817 .into_iter()
1818 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1819 );
1820 })
1821 }
1822 Ok(symbols)
1823 })
1824 } else {
1825 Task::ready(Ok(Default::default()))
1826 }
1827 }
1828
1829 pub fn open_buffer_for_symbol(
1830 &mut self,
1831 symbol: &Symbol,
1832 cx: &mut ModelContext<Self>,
1833 ) -> Task<Result<ModelHandle<Buffer>>> {
1834 if self.is_local() {
1835 let language_server = if let Some(server) = self.language_servers.get(&(
1836 symbol.source_worktree_id,
1837 Arc::from(symbol.language_name.as_str()),
1838 )) {
1839 server.clone()
1840 } else {
1841 return Task::ready(Err(anyhow!(
1842 "language server for worktree and language not found"
1843 )));
1844 };
1845
1846 let worktree_abs_path = if let Some(worktree_abs_path) = self
1847 .worktree_for_id(symbol.worktree_id, cx)
1848 .and_then(|worktree| worktree.read(cx).as_local())
1849 .map(|local_worktree| local_worktree.abs_path())
1850 {
1851 worktree_abs_path
1852 } else {
1853 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1854 };
1855 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1856 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1857 uri
1858 } else {
1859 return Task::ready(Err(anyhow!("invalid symbol path")));
1860 };
1861
1862 self.open_local_buffer_via_lsp(
1863 symbol_uri,
1864 Arc::from(symbol.language_name.as_str()),
1865 language_server,
1866 cx,
1867 )
1868 } else if let Some(project_id) = self.remote_id() {
1869 let request = self.client.request(proto::OpenBufferForSymbol {
1870 project_id,
1871 symbol: Some(serialize_symbol(symbol)),
1872 });
1873 cx.spawn(|this, mut cx| async move {
1874 let response = request.await?;
1875 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1876 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1877 .await
1878 })
1879 } else {
1880 Task::ready(Err(anyhow!("project does not have a remote id")))
1881 }
1882 }
1883
1884 pub fn completions<T: ToPointUtf16>(
1885 &self,
1886 source_buffer_handle: &ModelHandle<Buffer>,
1887 position: T,
1888 cx: &mut ModelContext<Self>,
1889 ) -> Task<Result<Vec<Completion>>> {
1890 let source_buffer_handle = source_buffer_handle.clone();
1891 let source_buffer = source_buffer_handle.read(cx);
1892 let buffer_id = source_buffer.remote_id();
1893 let language = source_buffer.language().cloned();
1894 let worktree;
1895 let buffer_abs_path;
1896 if let Some(file) = File::from_dyn(source_buffer.file()) {
1897 worktree = file.worktree.clone();
1898 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1899 } else {
1900 return Task::ready(Ok(Default::default()));
1901 };
1902
1903 let position = position.to_point_utf16(source_buffer);
1904 let anchor = source_buffer.anchor_after(position);
1905
1906 if worktree.read(cx).as_local().is_some() {
1907 let buffer_abs_path = buffer_abs_path.unwrap();
1908 let lang_server =
1909 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
1910 server.clone()
1911 } else {
1912 return Task::ready(Ok(Default::default()));
1913 };
1914
1915 cx.spawn(|_, cx| async move {
1916 let completions = lang_server
1917 .request::<lsp::request::Completion>(lsp::CompletionParams {
1918 text_document_position: lsp::TextDocumentPositionParams::new(
1919 lsp::TextDocumentIdentifier::new(
1920 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1921 ),
1922 position.to_lsp_position(),
1923 ),
1924 context: Default::default(),
1925 work_done_progress_params: Default::default(),
1926 partial_result_params: Default::default(),
1927 })
1928 .await
1929 .context("lsp completion request failed")?;
1930
1931 let completions = if let Some(completions) = completions {
1932 match completions {
1933 lsp::CompletionResponse::Array(completions) => completions,
1934 lsp::CompletionResponse::List(list) => list.items,
1935 }
1936 } else {
1937 Default::default()
1938 };
1939
1940 source_buffer_handle.read_with(&cx, |this, _| {
1941 Ok(completions
1942 .into_iter()
1943 .filter_map(|lsp_completion| {
1944 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1945 lsp::CompletionTextEdit::Edit(edit) => {
1946 (range_from_lsp(edit.range), edit.new_text.clone())
1947 }
1948 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1949 log::info!("unsupported insert/replace completion");
1950 return None;
1951 }
1952 };
1953
1954 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1955 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1956 if clipped_start == old_range.start && clipped_end == old_range.end {
1957 Some(Completion {
1958 old_range: this.anchor_before(old_range.start)
1959 ..this.anchor_after(old_range.end),
1960 new_text,
1961 label: language
1962 .as_ref()
1963 .and_then(|l| l.label_for_completion(&lsp_completion))
1964 .unwrap_or_else(|| {
1965 CodeLabel::plain(
1966 lsp_completion.label.clone(),
1967 lsp_completion.filter_text.as_deref(),
1968 )
1969 }),
1970 lsp_completion,
1971 })
1972 } else {
1973 None
1974 }
1975 })
1976 .collect())
1977 })
1978 })
1979 } else if let Some(project_id) = self.remote_id() {
1980 let rpc = self.client.clone();
1981 let message = proto::GetCompletions {
1982 project_id,
1983 buffer_id,
1984 position: Some(language::proto::serialize_anchor(&anchor)),
1985 version: serialize_version(&source_buffer.version()),
1986 };
1987 cx.spawn_weak(|_, mut cx| async move {
1988 let response = rpc.request(message).await?;
1989
1990 source_buffer_handle
1991 .update(&mut cx, |buffer, _| {
1992 buffer.wait_for_version(deserialize_version(response.version))
1993 })
1994 .await;
1995
1996 response
1997 .completions
1998 .into_iter()
1999 .map(|completion| {
2000 language::proto::deserialize_completion(completion, language.as_ref())
2001 })
2002 .collect()
2003 })
2004 } else {
2005 Task::ready(Ok(Default::default()))
2006 }
2007 }
2008
2009 pub fn apply_additional_edits_for_completion(
2010 &self,
2011 buffer_handle: ModelHandle<Buffer>,
2012 completion: Completion,
2013 push_to_history: bool,
2014 cx: &mut ModelContext<Self>,
2015 ) -> Task<Result<Option<Transaction>>> {
2016 let buffer = buffer_handle.read(cx);
2017 let buffer_id = buffer.remote_id();
2018
2019 if self.is_local() {
2020 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2021 server.clone()
2022 } else {
2023 return Task::ready(Ok(Default::default()));
2024 };
2025
2026 cx.spawn(|this, mut cx| async move {
2027 let resolved_completion = lang_server
2028 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2029 .await?;
2030 if let Some(edits) = resolved_completion.additional_text_edits {
2031 let edits = this
2032 .update(&mut cx, |this, cx| {
2033 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2034 })
2035 .await?;
2036 buffer_handle.update(&mut cx, |buffer, cx| {
2037 buffer.finalize_last_transaction();
2038 buffer.start_transaction();
2039 for (range, text) in edits {
2040 buffer.edit([range], text, cx);
2041 }
2042 let transaction = if buffer.end_transaction(cx).is_some() {
2043 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2044 if !push_to_history {
2045 buffer.forget_transaction(transaction.id);
2046 }
2047 Some(transaction)
2048 } else {
2049 None
2050 };
2051 Ok(transaction)
2052 })
2053 } else {
2054 Ok(None)
2055 }
2056 })
2057 } else if let Some(project_id) = self.remote_id() {
2058 let client = self.client.clone();
2059 cx.spawn(|_, mut cx| async move {
2060 let response = client
2061 .request(proto::ApplyCompletionAdditionalEdits {
2062 project_id,
2063 buffer_id,
2064 completion: Some(language::proto::serialize_completion(&completion)),
2065 })
2066 .await?;
2067
2068 if let Some(transaction) = response.transaction {
2069 let transaction = language::proto::deserialize_transaction(transaction)?;
2070 buffer_handle
2071 .update(&mut cx, |buffer, _| {
2072 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2073 })
2074 .await;
2075 if push_to_history {
2076 buffer_handle.update(&mut cx, |buffer, _| {
2077 buffer.push_transaction(transaction.clone(), Instant::now());
2078 });
2079 }
2080 Ok(Some(transaction))
2081 } else {
2082 Ok(None)
2083 }
2084 })
2085 } else {
2086 Task::ready(Err(anyhow!("project does not have a remote id")))
2087 }
2088 }
2089
2090 pub fn code_actions<T: ToOffset>(
2091 &self,
2092 buffer_handle: &ModelHandle<Buffer>,
2093 range: Range<T>,
2094 cx: &mut ModelContext<Self>,
2095 ) -> Task<Result<Vec<CodeAction>>> {
2096 let buffer_handle = buffer_handle.clone();
2097 let buffer = buffer_handle.read(cx);
2098 let buffer_id = buffer.remote_id();
2099 let worktree;
2100 let buffer_abs_path;
2101 if let Some(file) = File::from_dyn(buffer.file()) {
2102 worktree = file.worktree.clone();
2103 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2104 } else {
2105 return Task::ready(Ok(Default::default()));
2106 };
2107 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2108
2109 if worktree.read(cx).as_local().is_some() {
2110 let buffer_abs_path = buffer_abs_path.unwrap();
2111 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2112 server.clone()
2113 } else {
2114 return Task::ready(Ok(Default::default()));
2115 };
2116
2117 let lsp_range = lsp::Range::new(
2118 range.start.to_point_utf16(buffer).to_lsp_position(),
2119 range.end.to_point_utf16(buffer).to_lsp_position(),
2120 );
2121 cx.foreground().spawn(async move {
2122 if !lang_server.capabilities().code_action_provider.is_some() {
2123 return Ok(Default::default());
2124 }
2125
2126 Ok(lang_server
2127 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2128 text_document: lsp::TextDocumentIdentifier::new(
2129 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2130 ),
2131 range: lsp_range,
2132 work_done_progress_params: Default::default(),
2133 partial_result_params: Default::default(),
2134 context: lsp::CodeActionContext {
2135 diagnostics: Default::default(),
2136 only: Some(vec![
2137 lsp::CodeActionKind::QUICKFIX,
2138 lsp::CodeActionKind::REFACTOR,
2139 lsp::CodeActionKind::REFACTOR_EXTRACT,
2140 ]),
2141 },
2142 })
2143 .await?
2144 .unwrap_or_default()
2145 .into_iter()
2146 .filter_map(|entry| {
2147 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2148 Some(CodeAction {
2149 range: range.clone(),
2150 lsp_action,
2151 })
2152 } else {
2153 None
2154 }
2155 })
2156 .collect())
2157 })
2158 } else if let Some(project_id) = self.remote_id() {
2159 let rpc = self.client.clone();
2160 let version = buffer.version();
2161 cx.spawn_weak(|_, mut cx| async move {
2162 let response = rpc
2163 .request(proto::GetCodeActions {
2164 project_id,
2165 buffer_id,
2166 start: Some(language::proto::serialize_anchor(&range.start)),
2167 end: Some(language::proto::serialize_anchor(&range.end)),
2168 version: serialize_version(&version),
2169 })
2170 .await?;
2171
2172 buffer_handle
2173 .update(&mut cx, |buffer, _| {
2174 buffer.wait_for_version(deserialize_version(response.version))
2175 })
2176 .await;
2177
2178 response
2179 .actions
2180 .into_iter()
2181 .map(language::proto::deserialize_code_action)
2182 .collect()
2183 })
2184 } else {
2185 Task::ready(Ok(Default::default()))
2186 }
2187 }
2188
2189 pub fn apply_code_action(
2190 &self,
2191 buffer_handle: ModelHandle<Buffer>,
2192 mut action: CodeAction,
2193 push_to_history: bool,
2194 cx: &mut ModelContext<Self>,
2195 ) -> Task<Result<ProjectTransaction>> {
2196 if self.is_local() {
2197 let buffer = buffer_handle.read(cx);
2198 let lang_name = if let Some(lang) = buffer.language() {
2199 lang.name()
2200 } else {
2201 return Task::ready(Ok(Default::default()));
2202 };
2203 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2204 server.clone()
2205 } else {
2206 return Task::ready(Ok(Default::default()));
2207 };
2208 let range = action.range.to_point_utf16(buffer);
2209
2210 cx.spawn(|this, mut cx| async move {
2211 if let Some(lsp_range) = action
2212 .lsp_action
2213 .data
2214 .as_mut()
2215 .and_then(|d| d.get_mut("codeActionParams"))
2216 .and_then(|d| d.get_mut("range"))
2217 {
2218 *lsp_range = serde_json::to_value(&lsp::Range::new(
2219 range.start.to_lsp_position(),
2220 range.end.to_lsp_position(),
2221 ))
2222 .unwrap();
2223 action.lsp_action = lang_server
2224 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2225 .await?;
2226 } else {
2227 let actions = this
2228 .update(&mut cx, |this, cx| {
2229 this.code_actions(&buffer_handle, action.range, cx)
2230 })
2231 .await?;
2232 action.lsp_action = actions
2233 .into_iter()
2234 .find(|a| a.lsp_action.title == action.lsp_action.title)
2235 .ok_or_else(|| anyhow!("code action is outdated"))?
2236 .lsp_action;
2237 }
2238
2239 if let Some(edit) = action.lsp_action.edit {
2240 Self::deserialize_workspace_edit(
2241 this,
2242 edit,
2243 push_to_history,
2244 lang_name,
2245 lang_server,
2246 &mut cx,
2247 )
2248 .await
2249 } else {
2250 Ok(ProjectTransaction::default())
2251 }
2252 })
2253 } else if let Some(project_id) = self.remote_id() {
2254 let client = self.client.clone();
2255 let request = proto::ApplyCodeAction {
2256 project_id,
2257 buffer_id: buffer_handle.read(cx).remote_id(),
2258 action: Some(language::proto::serialize_code_action(&action)),
2259 };
2260 cx.spawn(|this, mut cx| async move {
2261 let response = client
2262 .request(request)
2263 .await?
2264 .transaction
2265 .ok_or_else(|| anyhow!("missing transaction"))?;
2266 this.update(&mut cx, |this, cx| {
2267 this.deserialize_project_transaction(response, push_to_history, cx)
2268 })
2269 .await
2270 })
2271 } else {
2272 Task::ready(Err(anyhow!("project does not have a remote id")))
2273 }
2274 }
2275
2276 async fn deserialize_workspace_edit(
2277 this: ModelHandle<Self>,
2278 edit: lsp::WorkspaceEdit,
2279 push_to_history: bool,
2280 language_name: Arc<str>,
2281 language_server: Arc<LanguageServer>,
2282 cx: &mut AsyncAppContext,
2283 ) -> Result<ProjectTransaction> {
2284 let fs = this.read_with(cx, |this, _| this.fs.clone());
2285 let mut operations = Vec::new();
2286 if let Some(document_changes) = edit.document_changes {
2287 match document_changes {
2288 lsp::DocumentChanges::Edits(edits) => {
2289 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2290 }
2291 lsp::DocumentChanges::Operations(ops) => operations = ops,
2292 }
2293 } else if let Some(changes) = edit.changes {
2294 operations.extend(changes.into_iter().map(|(uri, edits)| {
2295 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2296 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2297 uri,
2298 version: None,
2299 },
2300 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2301 })
2302 }));
2303 }
2304
2305 let mut project_transaction = ProjectTransaction::default();
2306 for operation in operations {
2307 match operation {
2308 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2309 let abs_path = op
2310 .uri
2311 .to_file_path()
2312 .map_err(|_| anyhow!("can't convert URI to path"))?;
2313
2314 if let Some(parent_path) = abs_path.parent() {
2315 fs.create_dir(parent_path).await?;
2316 }
2317 if abs_path.ends_with("/") {
2318 fs.create_dir(&abs_path).await?;
2319 } else {
2320 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2321 .await?;
2322 }
2323 }
2324 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2325 let source_abs_path = op
2326 .old_uri
2327 .to_file_path()
2328 .map_err(|_| anyhow!("can't convert URI to path"))?;
2329 let target_abs_path = op
2330 .new_uri
2331 .to_file_path()
2332 .map_err(|_| anyhow!("can't convert URI to path"))?;
2333 fs.rename(
2334 &source_abs_path,
2335 &target_abs_path,
2336 op.options.map(Into::into).unwrap_or_default(),
2337 )
2338 .await?;
2339 }
2340 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2341 let abs_path = op
2342 .uri
2343 .to_file_path()
2344 .map_err(|_| anyhow!("can't convert URI to path"))?;
2345 let options = op.options.map(Into::into).unwrap_or_default();
2346 if abs_path.ends_with("/") {
2347 fs.remove_dir(&abs_path, options).await?;
2348 } else {
2349 fs.remove_file(&abs_path, options).await?;
2350 }
2351 }
2352 lsp::DocumentChangeOperation::Edit(op) => {
2353 let buffer_to_edit = this
2354 .update(cx, |this, cx| {
2355 this.open_local_buffer_via_lsp(
2356 op.text_document.uri,
2357 language_name.clone(),
2358 language_server.clone(),
2359 cx,
2360 )
2361 })
2362 .await?;
2363
2364 let edits = this
2365 .update(cx, |this, cx| {
2366 let edits = op.edits.into_iter().map(|edit| match edit {
2367 lsp::OneOf::Left(edit) => edit,
2368 lsp::OneOf::Right(edit) => edit.text_edit,
2369 });
2370 this.edits_from_lsp(
2371 &buffer_to_edit,
2372 edits,
2373 op.text_document.version,
2374 cx,
2375 )
2376 })
2377 .await?;
2378
2379 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2380 buffer.finalize_last_transaction();
2381 buffer.start_transaction();
2382 for (range, text) in edits {
2383 buffer.edit([range], text, cx);
2384 }
2385 let transaction = if buffer.end_transaction(cx).is_some() {
2386 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2387 if !push_to_history {
2388 buffer.forget_transaction(transaction.id);
2389 }
2390 Some(transaction)
2391 } else {
2392 None
2393 };
2394
2395 transaction
2396 });
2397 if let Some(transaction) = transaction {
2398 project_transaction.0.insert(buffer_to_edit, transaction);
2399 }
2400 }
2401 }
2402 }
2403
2404 Ok(project_transaction)
2405 }
2406
2407 pub fn prepare_rename<T: ToPointUtf16>(
2408 &self,
2409 buffer: ModelHandle<Buffer>,
2410 position: T,
2411 cx: &mut ModelContext<Self>,
2412 ) -> Task<Result<Option<Range<Anchor>>>> {
2413 let position = position.to_point_utf16(buffer.read(cx));
2414 self.request_lsp(buffer, PrepareRename { position }, cx)
2415 }
2416
2417 pub fn perform_rename<T: ToPointUtf16>(
2418 &self,
2419 buffer: ModelHandle<Buffer>,
2420 position: T,
2421 new_name: String,
2422 push_to_history: bool,
2423 cx: &mut ModelContext<Self>,
2424 ) -> Task<Result<ProjectTransaction>> {
2425 let position = position.to_point_utf16(buffer.read(cx));
2426 self.request_lsp(
2427 buffer,
2428 PerformRename {
2429 position,
2430 new_name,
2431 push_to_history,
2432 },
2433 cx,
2434 )
2435 }
2436
2437 pub fn search(
2438 &self,
2439 query: SearchQuery,
2440 cx: &mut ModelContext<Self>,
2441 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2442 if self.is_local() {
2443 let snapshots = self
2444 .visible_worktrees(cx)
2445 .filter_map(|tree| {
2446 let tree = tree.read(cx).as_local()?;
2447 Some(tree.snapshot())
2448 })
2449 .collect::<Vec<_>>();
2450
2451 let background = cx.background().clone();
2452 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2453 if path_count == 0 {
2454 return Task::ready(Ok(Default::default()));
2455 }
2456 let workers = background.num_cpus().min(path_count);
2457 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2458 cx.background()
2459 .spawn({
2460 let fs = self.fs.clone();
2461 let background = cx.background().clone();
2462 let query = query.clone();
2463 async move {
2464 let fs = &fs;
2465 let query = &query;
2466 let matching_paths_tx = &matching_paths_tx;
2467 let paths_per_worker = (path_count + workers - 1) / workers;
2468 let snapshots = &snapshots;
2469 background
2470 .scoped(|scope| {
2471 for worker_ix in 0..workers {
2472 let worker_start_ix = worker_ix * paths_per_worker;
2473 let worker_end_ix = worker_start_ix + paths_per_worker;
2474 scope.spawn(async move {
2475 let mut snapshot_start_ix = 0;
2476 let mut abs_path = PathBuf::new();
2477 for snapshot in snapshots {
2478 let snapshot_end_ix =
2479 snapshot_start_ix + snapshot.visible_file_count();
2480 if worker_end_ix <= snapshot_start_ix {
2481 break;
2482 } else if worker_start_ix > snapshot_end_ix {
2483 snapshot_start_ix = snapshot_end_ix;
2484 continue;
2485 } else {
2486 let start_in_snapshot = worker_start_ix
2487 .saturating_sub(snapshot_start_ix);
2488 let end_in_snapshot =
2489 cmp::min(worker_end_ix, snapshot_end_ix)
2490 - snapshot_start_ix;
2491
2492 for entry in snapshot
2493 .files(false, start_in_snapshot)
2494 .take(end_in_snapshot - start_in_snapshot)
2495 {
2496 if matching_paths_tx.is_closed() {
2497 break;
2498 }
2499
2500 abs_path.clear();
2501 abs_path.push(&snapshot.abs_path());
2502 abs_path.push(&entry.path);
2503 let matches = if let Some(file) =
2504 fs.open_sync(&abs_path).await.log_err()
2505 {
2506 query.detect(file).unwrap_or(false)
2507 } else {
2508 false
2509 };
2510
2511 if matches {
2512 let project_path =
2513 (snapshot.id(), entry.path.clone());
2514 if matching_paths_tx
2515 .send(project_path)
2516 .await
2517 .is_err()
2518 {
2519 break;
2520 }
2521 }
2522 }
2523
2524 snapshot_start_ix = snapshot_end_ix;
2525 }
2526 }
2527 });
2528 }
2529 })
2530 .await;
2531 }
2532 })
2533 .detach();
2534
2535 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2536 let open_buffers = self
2537 .opened_buffers
2538 .values()
2539 .filter_map(|b| b.upgrade(cx))
2540 .collect::<HashSet<_>>();
2541 cx.spawn(|this, cx| async move {
2542 for buffer in &open_buffers {
2543 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2544 buffers_tx.send((buffer.clone(), snapshot)).await?;
2545 }
2546
2547 let open_buffers = Rc::new(RefCell::new(open_buffers));
2548 while let Some(project_path) = matching_paths_rx.next().await {
2549 if buffers_tx.is_closed() {
2550 break;
2551 }
2552
2553 let this = this.clone();
2554 let open_buffers = open_buffers.clone();
2555 let buffers_tx = buffers_tx.clone();
2556 cx.spawn(|mut cx| async move {
2557 if let Some(buffer) = this
2558 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2559 .await
2560 .log_err()
2561 {
2562 if open_buffers.borrow_mut().insert(buffer.clone()) {
2563 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2564 buffers_tx.send((buffer, snapshot)).await?;
2565 }
2566 }
2567
2568 Ok::<_, anyhow::Error>(())
2569 })
2570 .detach();
2571 }
2572
2573 Ok::<_, anyhow::Error>(())
2574 })
2575 .detach_and_log_err(cx);
2576
2577 let background = cx.background().clone();
2578 cx.background().spawn(async move {
2579 let query = &query;
2580 let mut matched_buffers = Vec::new();
2581 for _ in 0..workers {
2582 matched_buffers.push(HashMap::default());
2583 }
2584 background
2585 .scoped(|scope| {
2586 for worker_matched_buffers in matched_buffers.iter_mut() {
2587 let mut buffers_rx = buffers_rx.clone();
2588 scope.spawn(async move {
2589 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2590 let buffer_matches = query
2591 .search(snapshot.as_rope())
2592 .await
2593 .iter()
2594 .map(|range| {
2595 snapshot.anchor_before(range.start)
2596 ..snapshot.anchor_after(range.end)
2597 })
2598 .collect::<Vec<_>>();
2599 if !buffer_matches.is_empty() {
2600 worker_matched_buffers
2601 .insert(buffer.clone(), buffer_matches);
2602 }
2603 }
2604 });
2605 }
2606 })
2607 .await;
2608 Ok(matched_buffers.into_iter().flatten().collect())
2609 })
2610 } else if let Some(project_id) = self.remote_id() {
2611 let request = self.client.request(query.to_proto(project_id));
2612 cx.spawn(|this, mut cx| async move {
2613 let response = request.await?;
2614 let mut result = HashMap::default();
2615 for location in response.locations {
2616 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2617 let target_buffer = this
2618 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2619 .await?;
2620 let start = location
2621 .start
2622 .and_then(deserialize_anchor)
2623 .ok_or_else(|| anyhow!("missing target start"))?;
2624 let end = location
2625 .end
2626 .and_then(deserialize_anchor)
2627 .ok_or_else(|| anyhow!("missing target end"))?;
2628 result
2629 .entry(target_buffer)
2630 .or_insert(Vec::new())
2631 .push(start..end)
2632 }
2633 Ok(result)
2634 })
2635 } else {
2636 Task::ready(Ok(Default::default()))
2637 }
2638 }
2639
2640 fn request_lsp<R: LspCommand>(
2641 &self,
2642 buffer_handle: ModelHandle<Buffer>,
2643 request: R,
2644 cx: &mut ModelContext<Self>,
2645 ) -> Task<Result<R::Response>>
2646 where
2647 <R::LspRequest as lsp::request::Request>::Result: Send,
2648 {
2649 let buffer = buffer_handle.read(cx);
2650 if self.is_local() {
2651 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2652 if let Some((file, language_server)) =
2653 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2654 {
2655 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2656 return cx.spawn(|this, cx| async move {
2657 if !request.check_capabilities(language_server.capabilities()) {
2658 return Ok(Default::default());
2659 }
2660
2661 let response = language_server
2662 .request::<R::LspRequest>(lsp_params)
2663 .await
2664 .context("lsp request failed")?;
2665 request
2666 .response_from_lsp(response, this, buffer_handle, cx)
2667 .await
2668 });
2669 }
2670 } else if let Some(project_id) = self.remote_id() {
2671 let rpc = self.client.clone();
2672 let message = request.to_proto(project_id, buffer);
2673 return cx.spawn(|this, cx| async move {
2674 let response = rpc.request(message).await?;
2675 request
2676 .response_from_proto(response, this, buffer_handle, cx)
2677 .await
2678 });
2679 }
2680 Task::ready(Ok(Default::default()))
2681 }
2682
2683 pub fn find_or_create_local_worktree(
2684 &mut self,
2685 abs_path: impl AsRef<Path>,
2686 visible: bool,
2687 cx: &mut ModelContext<Self>,
2688 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2689 let abs_path = abs_path.as_ref();
2690 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2691 Task::ready(Ok((tree.clone(), relative_path.into())))
2692 } else {
2693 let worktree = self.create_local_worktree(abs_path, visible, cx);
2694 cx.foreground()
2695 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2696 }
2697 }
2698
2699 pub fn find_local_worktree(
2700 &self,
2701 abs_path: &Path,
2702 cx: &AppContext,
2703 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2704 for tree in self.worktrees(cx) {
2705 if let Some(relative_path) = tree
2706 .read(cx)
2707 .as_local()
2708 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2709 {
2710 return Some((tree.clone(), relative_path.into()));
2711 }
2712 }
2713 None
2714 }
2715
2716 pub fn is_shared(&self) -> bool {
2717 match &self.client_state {
2718 ProjectClientState::Local { is_shared, .. } => *is_shared,
2719 ProjectClientState::Remote { .. } => false,
2720 }
2721 }
2722
2723 fn create_local_worktree(
2724 &mut self,
2725 abs_path: impl AsRef<Path>,
2726 visible: bool,
2727 cx: &mut ModelContext<Self>,
2728 ) -> Task<Result<ModelHandle<Worktree>>> {
2729 let fs = self.fs.clone();
2730 let client = self.client.clone();
2731 let path: Arc<Path> = abs_path.as_ref().into();
2732 let task = self
2733 .loading_local_worktrees
2734 .entry(path.clone())
2735 .or_insert_with(|| {
2736 cx.spawn(|project, mut cx| {
2737 async move {
2738 let worktree =
2739 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2740 .await;
2741 project.update(&mut cx, |project, _| {
2742 project.loading_local_worktrees.remove(&path);
2743 });
2744 let worktree = worktree?;
2745
2746 let (remote_project_id, is_shared) =
2747 project.update(&mut cx, |project, cx| {
2748 project.add_worktree(&worktree, cx);
2749 (project.remote_id(), project.is_shared())
2750 });
2751
2752 if let Some(project_id) = remote_project_id {
2753 if is_shared {
2754 worktree
2755 .update(&mut cx, |worktree, cx| {
2756 worktree.as_local_mut().unwrap().share(project_id, cx)
2757 })
2758 .await?;
2759 } else {
2760 worktree
2761 .update(&mut cx, |worktree, cx| {
2762 worktree.as_local_mut().unwrap().register(project_id, cx)
2763 })
2764 .await?;
2765 }
2766 }
2767
2768 Ok(worktree)
2769 }
2770 .map_err(|err| Arc::new(err))
2771 })
2772 .shared()
2773 })
2774 .clone();
2775 cx.foreground().spawn(async move {
2776 match task.await {
2777 Ok(worktree) => Ok(worktree),
2778 Err(err) => Err(anyhow!("{}", err)),
2779 }
2780 })
2781 }
2782
2783 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2784 self.worktrees.retain(|worktree| {
2785 worktree
2786 .upgrade(cx)
2787 .map_or(false, |w| w.read(cx).id() != id)
2788 });
2789 cx.notify();
2790 }
2791
2792 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2793 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2794 if worktree.read(cx).is_local() {
2795 cx.subscribe(&worktree, |this, worktree, _, cx| {
2796 this.update_local_worktree_buffers(worktree, cx);
2797 })
2798 .detach();
2799 }
2800
2801 let push_strong_handle = {
2802 let worktree = worktree.read(cx);
2803 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2804 };
2805 if push_strong_handle {
2806 self.worktrees
2807 .push(WorktreeHandle::Strong(worktree.clone()));
2808 } else {
2809 cx.observe_release(&worktree, |this, _, cx| {
2810 this.worktrees
2811 .retain(|worktree| worktree.upgrade(cx).is_some());
2812 cx.notify();
2813 })
2814 .detach();
2815 self.worktrees
2816 .push(WorktreeHandle::Weak(worktree.downgrade()));
2817 }
2818 cx.notify();
2819 }
2820
2821 fn update_local_worktree_buffers(
2822 &mut self,
2823 worktree_handle: ModelHandle<Worktree>,
2824 cx: &mut ModelContext<Self>,
2825 ) {
2826 let snapshot = worktree_handle.read(cx).snapshot();
2827 let mut buffers_to_delete = Vec::new();
2828 for (buffer_id, buffer) in &self.opened_buffers {
2829 if let Some(buffer) = buffer.upgrade(cx) {
2830 buffer.update(cx, |buffer, cx| {
2831 if let Some(old_file) = File::from_dyn(buffer.file()) {
2832 if old_file.worktree != worktree_handle {
2833 return;
2834 }
2835
2836 let new_file = if let Some(entry) = old_file
2837 .entry_id
2838 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2839 {
2840 File {
2841 is_local: true,
2842 entry_id: Some(entry.id),
2843 mtime: entry.mtime,
2844 path: entry.path.clone(),
2845 worktree: worktree_handle.clone(),
2846 }
2847 } else if let Some(entry) =
2848 snapshot.entry_for_path(old_file.path().as_ref())
2849 {
2850 File {
2851 is_local: true,
2852 entry_id: Some(entry.id),
2853 mtime: entry.mtime,
2854 path: entry.path.clone(),
2855 worktree: worktree_handle.clone(),
2856 }
2857 } else {
2858 File {
2859 is_local: true,
2860 entry_id: None,
2861 path: old_file.path().clone(),
2862 mtime: old_file.mtime(),
2863 worktree: worktree_handle.clone(),
2864 }
2865 };
2866
2867 if let Some(project_id) = self.remote_id() {
2868 self.client
2869 .send(proto::UpdateBufferFile {
2870 project_id,
2871 buffer_id: *buffer_id as u64,
2872 file: Some(new_file.to_proto()),
2873 })
2874 .log_err();
2875 }
2876 buffer.file_updated(Box::new(new_file), cx).detach();
2877 }
2878 });
2879 } else {
2880 buffers_to_delete.push(*buffer_id);
2881 }
2882 }
2883
2884 for buffer_id in buffers_to_delete {
2885 self.opened_buffers.remove(&buffer_id);
2886 }
2887 }
2888
2889 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2890 let new_active_entry = entry.and_then(|project_path| {
2891 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2892 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2893 Some(ProjectEntry {
2894 worktree_id: project_path.worktree_id,
2895 entry_id: entry.id,
2896 })
2897 });
2898 if new_active_entry != self.active_entry {
2899 self.active_entry = new_active_entry;
2900 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2901 }
2902 }
2903
2904 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2905 self.language_servers_with_diagnostics_running > 0
2906 }
2907
2908 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2909 let mut summary = DiagnosticSummary::default();
2910 for (_, path_summary) in self.diagnostic_summaries(cx) {
2911 summary.error_count += path_summary.error_count;
2912 summary.warning_count += path_summary.warning_count;
2913 summary.info_count += path_summary.info_count;
2914 summary.hint_count += path_summary.hint_count;
2915 }
2916 summary
2917 }
2918
2919 pub fn diagnostic_summaries<'a>(
2920 &'a self,
2921 cx: &'a AppContext,
2922 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2923 self.worktrees(cx).flat_map(move |worktree| {
2924 let worktree = worktree.read(cx);
2925 let worktree_id = worktree.id();
2926 worktree
2927 .diagnostic_summaries()
2928 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2929 })
2930 }
2931
2932 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2933 self.language_servers_with_diagnostics_running += 1;
2934 if self.language_servers_with_diagnostics_running == 1 {
2935 cx.emit(Event::DiskBasedDiagnosticsStarted);
2936 }
2937 }
2938
2939 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2940 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2941 self.language_servers_with_diagnostics_running -= 1;
2942 if self.language_servers_with_diagnostics_running == 0 {
2943 cx.emit(Event::DiskBasedDiagnosticsFinished);
2944 }
2945 }
2946
2947 pub fn active_entry(&self) -> Option<ProjectEntry> {
2948 self.active_entry
2949 }
2950
2951 // RPC message handlers
2952
2953 async fn handle_unshare_project(
2954 this: ModelHandle<Self>,
2955 _: TypedEnvelope<proto::UnshareProject>,
2956 _: Arc<Client>,
2957 mut cx: AsyncAppContext,
2958 ) -> Result<()> {
2959 this.update(&mut cx, |this, cx| this.project_unshared(cx));
2960 Ok(())
2961 }
2962
2963 async fn handle_add_collaborator(
2964 this: ModelHandle<Self>,
2965 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2966 _: Arc<Client>,
2967 mut cx: AsyncAppContext,
2968 ) -> Result<()> {
2969 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2970 let collaborator = envelope
2971 .payload
2972 .collaborator
2973 .take()
2974 .ok_or_else(|| anyhow!("empty collaborator"))?;
2975
2976 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2977 this.update(&mut cx, |this, cx| {
2978 this.collaborators
2979 .insert(collaborator.peer_id, collaborator);
2980 cx.notify();
2981 });
2982
2983 Ok(())
2984 }
2985
2986 async fn handle_remove_collaborator(
2987 this: ModelHandle<Self>,
2988 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2989 _: Arc<Client>,
2990 mut cx: AsyncAppContext,
2991 ) -> Result<()> {
2992 this.update(&mut cx, |this, cx| {
2993 let peer_id = PeerId(envelope.payload.peer_id);
2994 let replica_id = this
2995 .collaborators
2996 .remove(&peer_id)
2997 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2998 .replica_id;
2999 for (_, buffer) in &this.opened_buffers {
3000 if let Some(buffer) = buffer.upgrade(cx) {
3001 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3002 }
3003 }
3004 cx.notify();
3005 Ok(())
3006 })
3007 }
3008
3009 async fn handle_register_worktree(
3010 this: ModelHandle<Self>,
3011 envelope: TypedEnvelope<proto::RegisterWorktree>,
3012 client: Arc<Client>,
3013 mut cx: AsyncAppContext,
3014 ) -> Result<()> {
3015 this.update(&mut cx, |this, cx| {
3016 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3017 let replica_id = this.replica_id();
3018 let worktree = proto::Worktree {
3019 id: envelope.payload.worktree_id,
3020 root_name: envelope.payload.root_name,
3021 entries: Default::default(),
3022 diagnostic_summaries: Default::default(),
3023 visible: envelope.payload.visible,
3024 };
3025 let (worktree, load_task) =
3026 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3027 this.add_worktree(&worktree, cx);
3028 load_task.detach();
3029 Ok(())
3030 })
3031 }
3032
3033 async fn handle_unregister_worktree(
3034 this: ModelHandle<Self>,
3035 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3036 _: Arc<Client>,
3037 mut cx: AsyncAppContext,
3038 ) -> Result<()> {
3039 this.update(&mut cx, |this, cx| {
3040 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3041 this.remove_worktree(worktree_id, cx);
3042 Ok(())
3043 })
3044 }
3045
3046 async fn handle_update_worktree(
3047 this: ModelHandle<Self>,
3048 envelope: TypedEnvelope<proto::UpdateWorktree>,
3049 _: Arc<Client>,
3050 mut cx: AsyncAppContext,
3051 ) -> Result<()> {
3052 this.update(&mut cx, |this, cx| {
3053 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3054 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3055 worktree.update(cx, |worktree, _| {
3056 let worktree = worktree.as_remote_mut().unwrap();
3057 worktree.update_from_remote(envelope)
3058 })?;
3059 }
3060 Ok(())
3061 })
3062 }
3063
3064 async fn handle_update_diagnostic_summary(
3065 this: ModelHandle<Self>,
3066 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3067 _: Arc<Client>,
3068 mut cx: AsyncAppContext,
3069 ) -> Result<()> {
3070 this.update(&mut cx, |this, cx| {
3071 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3072 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3073 if let Some(summary) = envelope.payload.summary {
3074 let project_path = ProjectPath {
3075 worktree_id,
3076 path: Path::new(&summary.path).into(),
3077 };
3078 worktree.update(cx, |worktree, _| {
3079 worktree
3080 .as_remote_mut()
3081 .unwrap()
3082 .update_diagnostic_summary(project_path.path.clone(), &summary);
3083 });
3084 cx.emit(Event::DiagnosticsUpdated(project_path));
3085 }
3086 }
3087 Ok(())
3088 })
3089 }
3090
3091 async fn handle_disk_based_diagnostics_updating(
3092 this: ModelHandle<Self>,
3093 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
3094 _: Arc<Client>,
3095 mut cx: AsyncAppContext,
3096 ) -> Result<()> {
3097 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
3098 Ok(())
3099 }
3100
3101 async fn handle_disk_based_diagnostics_updated(
3102 this: ModelHandle<Self>,
3103 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
3104 _: Arc<Client>,
3105 mut cx: AsyncAppContext,
3106 ) -> Result<()> {
3107 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3108 Ok(())
3109 }
3110
3111 async fn handle_update_buffer(
3112 this: ModelHandle<Self>,
3113 envelope: TypedEnvelope<proto::UpdateBuffer>,
3114 _: Arc<Client>,
3115 mut cx: AsyncAppContext,
3116 ) -> Result<()> {
3117 this.update(&mut cx, |this, cx| {
3118 let payload = envelope.payload.clone();
3119 let buffer_id = payload.buffer_id;
3120 let ops = payload
3121 .operations
3122 .into_iter()
3123 .map(|op| language::proto::deserialize_operation(op))
3124 .collect::<Result<Vec<_>, _>>()?;
3125 match this.opened_buffers.entry(buffer_id) {
3126 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3127 OpenBuffer::Strong(buffer) => {
3128 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3129 }
3130 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3131 OpenBuffer::Weak(_) => {}
3132 },
3133 hash_map::Entry::Vacant(e) => {
3134 e.insert(OpenBuffer::Loading(ops));
3135 }
3136 }
3137 Ok(())
3138 })
3139 }
3140
3141 async fn handle_update_buffer_file(
3142 this: ModelHandle<Self>,
3143 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3144 _: Arc<Client>,
3145 mut cx: AsyncAppContext,
3146 ) -> Result<()> {
3147 this.update(&mut cx, |this, cx| {
3148 let payload = envelope.payload.clone();
3149 let buffer_id = payload.buffer_id;
3150 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3151 let worktree = this
3152 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3153 .ok_or_else(|| anyhow!("no such worktree"))?;
3154 let file = File::from_proto(file, worktree.clone(), cx)?;
3155 let buffer = this
3156 .opened_buffers
3157 .get_mut(&buffer_id)
3158 .and_then(|b| b.upgrade(cx))
3159 .ok_or_else(|| anyhow!("no such buffer"))?;
3160 buffer.update(cx, |buffer, cx| {
3161 buffer.file_updated(Box::new(file), cx).detach();
3162 });
3163 Ok(())
3164 })
3165 }
3166
3167 async fn handle_save_buffer(
3168 this: ModelHandle<Self>,
3169 envelope: TypedEnvelope<proto::SaveBuffer>,
3170 _: Arc<Client>,
3171 mut cx: AsyncAppContext,
3172 ) -> Result<proto::BufferSaved> {
3173 let buffer_id = envelope.payload.buffer_id;
3174 let requested_version = deserialize_version(envelope.payload.version);
3175
3176 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3177 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3178 let buffer = this
3179 .opened_buffers
3180 .get(&buffer_id)
3181 .map(|buffer| buffer.upgrade(cx).unwrap())
3182 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3183 Ok::<_, anyhow::Error>((project_id, buffer))
3184 })?;
3185 buffer
3186 .update(&mut cx, |buffer, _| {
3187 buffer.wait_for_version(requested_version)
3188 })
3189 .await;
3190
3191 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3192 Ok(proto::BufferSaved {
3193 project_id,
3194 buffer_id,
3195 version: serialize_version(&saved_version),
3196 mtime: Some(mtime.into()),
3197 })
3198 }
3199
3200 async fn handle_format_buffers(
3201 this: ModelHandle<Self>,
3202 envelope: TypedEnvelope<proto::FormatBuffers>,
3203 _: Arc<Client>,
3204 mut cx: AsyncAppContext,
3205 ) -> Result<proto::FormatBuffersResponse> {
3206 let sender_id = envelope.original_sender_id()?;
3207 let format = this.update(&mut cx, |this, cx| {
3208 let mut buffers = HashSet::default();
3209 for buffer_id in &envelope.payload.buffer_ids {
3210 buffers.insert(
3211 this.opened_buffers
3212 .get(buffer_id)
3213 .map(|buffer| buffer.upgrade(cx).unwrap())
3214 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3215 );
3216 }
3217 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3218 })?;
3219
3220 let project_transaction = format.await?;
3221 let project_transaction = this.update(&mut cx, |this, cx| {
3222 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3223 });
3224 Ok(proto::FormatBuffersResponse {
3225 transaction: Some(project_transaction),
3226 })
3227 }
3228
3229 async fn handle_get_completions(
3230 this: ModelHandle<Self>,
3231 envelope: TypedEnvelope<proto::GetCompletions>,
3232 _: Arc<Client>,
3233 mut cx: AsyncAppContext,
3234 ) -> Result<proto::GetCompletionsResponse> {
3235 let position = envelope
3236 .payload
3237 .position
3238 .and_then(language::proto::deserialize_anchor)
3239 .ok_or_else(|| anyhow!("invalid position"))?;
3240 let version = deserialize_version(envelope.payload.version);
3241 let buffer = this.read_with(&cx, |this, cx| {
3242 this.opened_buffers
3243 .get(&envelope.payload.buffer_id)
3244 .map(|buffer| buffer.upgrade(cx).unwrap())
3245 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3246 })?;
3247 buffer
3248 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3249 .await;
3250 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3251 let completions = this
3252 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3253 .await?;
3254
3255 Ok(proto::GetCompletionsResponse {
3256 completions: completions
3257 .iter()
3258 .map(language::proto::serialize_completion)
3259 .collect(),
3260 version: serialize_version(&version),
3261 })
3262 }
3263
3264 async fn handle_apply_additional_edits_for_completion(
3265 this: ModelHandle<Self>,
3266 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3267 _: Arc<Client>,
3268 mut cx: AsyncAppContext,
3269 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3270 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3271 let buffer = this
3272 .opened_buffers
3273 .get(&envelope.payload.buffer_id)
3274 .map(|buffer| buffer.upgrade(cx).unwrap())
3275 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3276 let language = buffer.read(cx).language();
3277 let completion = language::proto::deserialize_completion(
3278 envelope
3279 .payload
3280 .completion
3281 .ok_or_else(|| anyhow!("invalid completion"))?,
3282 language,
3283 )?;
3284 Ok::<_, anyhow::Error>(
3285 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3286 )
3287 })?;
3288
3289 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3290 transaction: apply_additional_edits
3291 .await?
3292 .as_ref()
3293 .map(language::proto::serialize_transaction),
3294 })
3295 }
3296
3297 async fn handle_get_code_actions(
3298 this: ModelHandle<Self>,
3299 envelope: TypedEnvelope<proto::GetCodeActions>,
3300 _: Arc<Client>,
3301 mut cx: AsyncAppContext,
3302 ) -> Result<proto::GetCodeActionsResponse> {
3303 let start = envelope
3304 .payload
3305 .start
3306 .and_then(language::proto::deserialize_anchor)
3307 .ok_or_else(|| anyhow!("invalid start"))?;
3308 let end = envelope
3309 .payload
3310 .end
3311 .and_then(language::proto::deserialize_anchor)
3312 .ok_or_else(|| anyhow!("invalid end"))?;
3313 let buffer = this.update(&mut cx, |this, cx| {
3314 this.opened_buffers
3315 .get(&envelope.payload.buffer_id)
3316 .map(|buffer| buffer.upgrade(cx).unwrap())
3317 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3318 })?;
3319 buffer
3320 .update(&mut cx, |buffer, _| {
3321 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3322 })
3323 .await;
3324
3325 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3326 let code_actions = this.update(&mut cx, |this, cx| {
3327 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3328 })?;
3329
3330 Ok(proto::GetCodeActionsResponse {
3331 actions: code_actions
3332 .await?
3333 .iter()
3334 .map(language::proto::serialize_code_action)
3335 .collect(),
3336 version: serialize_version(&version),
3337 })
3338 }
3339
3340 async fn handle_apply_code_action(
3341 this: ModelHandle<Self>,
3342 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3343 _: Arc<Client>,
3344 mut cx: AsyncAppContext,
3345 ) -> Result<proto::ApplyCodeActionResponse> {
3346 let sender_id = envelope.original_sender_id()?;
3347 let action = language::proto::deserialize_code_action(
3348 envelope
3349 .payload
3350 .action
3351 .ok_or_else(|| anyhow!("invalid action"))?,
3352 )?;
3353 let apply_code_action = this.update(&mut cx, |this, cx| {
3354 let buffer = this
3355 .opened_buffers
3356 .get(&envelope.payload.buffer_id)
3357 .map(|buffer| buffer.upgrade(cx).unwrap())
3358 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3359 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3360 })?;
3361
3362 let project_transaction = apply_code_action.await?;
3363 let project_transaction = this.update(&mut cx, |this, cx| {
3364 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3365 });
3366 Ok(proto::ApplyCodeActionResponse {
3367 transaction: Some(project_transaction),
3368 })
3369 }
3370
3371 async fn handle_lsp_command<T: LspCommand>(
3372 this: ModelHandle<Self>,
3373 envelope: TypedEnvelope<T::ProtoRequest>,
3374 _: Arc<Client>,
3375 mut cx: AsyncAppContext,
3376 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3377 where
3378 <T::LspRequest as lsp::request::Request>::Result: Send,
3379 {
3380 let sender_id = envelope.original_sender_id()?;
3381 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3382 let buffer_handle = this.read_with(&cx, |this, _| {
3383 this.opened_buffers
3384 .get(&buffer_id)
3385 .map(|buffer| buffer.upgrade(&cx).unwrap())
3386 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3387 })?;
3388 let request = T::from_proto(
3389 envelope.payload,
3390 this.clone(),
3391 buffer_handle.clone(),
3392 cx.clone(),
3393 )
3394 .await?;
3395 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3396 let response = this
3397 .update(&mut cx, |this, cx| {
3398 this.request_lsp(buffer_handle, request, cx)
3399 })
3400 .await?;
3401 this.update(&mut cx, |this, cx| {
3402 Ok(T::response_to_proto(
3403 response,
3404 this,
3405 sender_id,
3406 &buffer_version,
3407 cx,
3408 ))
3409 })
3410 }
3411
3412 async fn handle_get_project_symbols(
3413 this: ModelHandle<Self>,
3414 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3415 _: Arc<Client>,
3416 mut cx: AsyncAppContext,
3417 ) -> Result<proto::GetProjectSymbolsResponse> {
3418 let symbols = this
3419 .update(&mut cx, |this, cx| {
3420 this.symbols(&envelope.payload.query, cx)
3421 })
3422 .await?;
3423
3424 Ok(proto::GetProjectSymbolsResponse {
3425 symbols: symbols.iter().map(serialize_symbol).collect(),
3426 })
3427 }
3428
3429 async fn handle_search_project(
3430 this: ModelHandle<Self>,
3431 envelope: TypedEnvelope<proto::SearchProject>,
3432 _: Arc<Client>,
3433 mut cx: AsyncAppContext,
3434 ) -> Result<proto::SearchProjectResponse> {
3435 let peer_id = envelope.original_sender_id()?;
3436 let query = SearchQuery::from_proto(envelope.payload)?;
3437 let result = this
3438 .update(&mut cx, |this, cx| this.search(query, cx))
3439 .await?;
3440
3441 this.update(&mut cx, |this, cx| {
3442 let mut locations = Vec::new();
3443 for (buffer, ranges) in result {
3444 for range in ranges {
3445 let start = serialize_anchor(&range.start);
3446 let end = serialize_anchor(&range.end);
3447 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3448 locations.push(proto::Location {
3449 buffer: Some(buffer),
3450 start: Some(start),
3451 end: Some(end),
3452 });
3453 }
3454 }
3455 Ok(proto::SearchProjectResponse { locations })
3456 })
3457 }
3458
3459 async fn handle_open_buffer_for_symbol(
3460 this: ModelHandle<Self>,
3461 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3462 _: Arc<Client>,
3463 mut cx: AsyncAppContext,
3464 ) -> Result<proto::OpenBufferForSymbolResponse> {
3465 let peer_id = envelope.original_sender_id()?;
3466 let symbol = envelope
3467 .payload
3468 .symbol
3469 .ok_or_else(|| anyhow!("invalid symbol"))?;
3470 let symbol = this.read_with(&cx, |this, _| {
3471 let symbol = this.deserialize_symbol(symbol)?;
3472 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3473 if signature == symbol.signature {
3474 Ok(symbol)
3475 } else {
3476 Err(anyhow!("invalid symbol signature"))
3477 }
3478 })?;
3479 let buffer = this
3480 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3481 .await?;
3482
3483 Ok(proto::OpenBufferForSymbolResponse {
3484 buffer: Some(this.update(&mut cx, |this, cx| {
3485 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3486 })),
3487 })
3488 }
3489
3490 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3491 let mut hasher = Sha256::new();
3492 hasher.update(worktree_id.to_proto().to_be_bytes());
3493 hasher.update(path.to_string_lossy().as_bytes());
3494 hasher.update(self.nonce.to_be_bytes());
3495 hasher.finalize().as_slice().try_into().unwrap()
3496 }
3497
3498 async fn handle_open_buffer(
3499 this: ModelHandle<Self>,
3500 envelope: TypedEnvelope<proto::OpenBuffer>,
3501 _: Arc<Client>,
3502 mut cx: AsyncAppContext,
3503 ) -> Result<proto::OpenBufferResponse> {
3504 let peer_id = envelope.original_sender_id()?;
3505 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3506 let open_buffer = this.update(&mut cx, |this, cx| {
3507 this.open_buffer(
3508 ProjectPath {
3509 worktree_id,
3510 path: PathBuf::from(envelope.payload.path).into(),
3511 },
3512 cx,
3513 )
3514 });
3515
3516 let buffer = open_buffer.await?;
3517 this.update(&mut cx, |this, cx| {
3518 Ok(proto::OpenBufferResponse {
3519 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3520 })
3521 })
3522 }
3523
3524 fn serialize_project_transaction_for_peer(
3525 &mut self,
3526 project_transaction: ProjectTransaction,
3527 peer_id: PeerId,
3528 cx: &AppContext,
3529 ) -> proto::ProjectTransaction {
3530 let mut serialized_transaction = proto::ProjectTransaction {
3531 buffers: Default::default(),
3532 transactions: Default::default(),
3533 };
3534 for (buffer, transaction) in project_transaction.0 {
3535 serialized_transaction
3536 .buffers
3537 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3538 serialized_transaction
3539 .transactions
3540 .push(language::proto::serialize_transaction(&transaction));
3541 }
3542 serialized_transaction
3543 }
3544
3545 fn deserialize_project_transaction(
3546 &mut self,
3547 message: proto::ProjectTransaction,
3548 push_to_history: bool,
3549 cx: &mut ModelContext<Self>,
3550 ) -> Task<Result<ProjectTransaction>> {
3551 cx.spawn(|this, mut cx| async move {
3552 let mut project_transaction = ProjectTransaction::default();
3553 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3554 let buffer = this
3555 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3556 .await?;
3557 let transaction = language::proto::deserialize_transaction(transaction)?;
3558 project_transaction.0.insert(buffer, transaction);
3559 }
3560
3561 for (buffer, transaction) in &project_transaction.0 {
3562 buffer
3563 .update(&mut cx, |buffer, _| {
3564 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3565 })
3566 .await;
3567
3568 if push_to_history {
3569 buffer.update(&mut cx, |buffer, _| {
3570 buffer.push_transaction(transaction.clone(), Instant::now());
3571 });
3572 }
3573 }
3574
3575 Ok(project_transaction)
3576 })
3577 }
3578
3579 fn serialize_buffer_for_peer(
3580 &mut self,
3581 buffer: &ModelHandle<Buffer>,
3582 peer_id: PeerId,
3583 cx: &AppContext,
3584 ) -> proto::Buffer {
3585 let buffer_id = buffer.read(cx).remote_id();
3586 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3587 if shared_buffers.insert(buffer_id) {
3588 proto::Buffer {
3589 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3590 }
3591 } else {
3592 proto::Buffer {
3593 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3594 }
3595 }
3596 }
3597
3598 fn deserialize_buffer(
3599 &mut self,
3600 buffer: proto::Buffer,
3601 cx: &mut ModelContext<Self>,
3602 ) -> Task<Result<ModelHandle<Buffer>>> {
3603 let replica_id = self.replica_id();
3604
3605 let opened_buffer_tx = self.opened_buffer.0.clone();
3606 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3607 cx.spawn(|this, mut cx| async move {
3608 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3609 proto::buffer::Variant::Id(id) => {
3610 let buffer = loop {
3611 let buffer = this.read_with(&cx, |this, cx| {
3612 this.opened_buffers
3613 .get(&id)
3614 .and_then(|buffer| buffer.upgrade(cx))
3615 });
3616 if let Some(buffer) = buffer {
3617 break buffer;
3618 }
3619 opened_buffer_rx
3620 .next()
3621 .await
3622 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3623 };
3624 Ok(buffer)
3625 }
3626 proto::buffer::Variant::State(mut buffer) => {
3627 let mut buffer_worktree = None;
3628 let mut buffer_file = None;
3629 if let Some(file) = buffer.file.take() {
3630 this.read_with(&cx, |this, cx| {
3631 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3632 let worktree =
3633 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3634 anyhow!("no worktree found for id {}", file.worktree_id)
3635 })?;
3636 buffer_file =
3637 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3638 as Box<dyn language::File>);
3639 buffer_worktree = Some(worktree);
3640 Ok::<_, anyhow::Error>(())
3641 })?;
3642 }
3643
3644 let buffer = cx.add_model(|cx| {
3645 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3646 });
3647
3648 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
3649
3650 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3651 Ok(buffer)
3652 }
3653 }
3654 })
3655 }
3656
3657 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3658 let language = self
3659 .languages
3660 .get_language(&serialized_symbol.language_name);
3661 let start = serialized_symbol
3662 .start
3663 .ok_or_else(|| anyhow!("invalid start"))?;
3664 let end = serialized_symbol
3665 .end
3666 .ok_or_else(|| anyhow!("invalid end"))?;
3667 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3668 Ok(Symbol {
3669 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3670 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3671 language_name: serialized_symbol.language_name.clone(),
3672 label: language
3673 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3674 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3675 name: serialized_symbol.name,
3676 path: PathBuf::from(serialized_symbol.path),
3677 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3678 kind,
3679 signature: serialized_symbol
3680 .signature
3681 .try_into()
3682 .map_err(|_| anyhow!("invalid signature"))?,
3683 })
3684 }
3685
3686 async fn handle_buffer_saved(
3687 this: ModelHandle<Self>,
3688 envelope: TypedEnvelope<proto::BufferSaved>,
3689 _: Arc<Client>,
3690 mut cx: AsyncAppContext,
3691 ) -> Result<()> {
3692 let version = deserialize_version(envelope.payload.version);
3693 let mtime = envelope
3694 .payload
3695 .mtime
3696 .ok_or_else(|| anyhow!("missing mtime"))?
3697 .into();
3698
3699 this.update(&mut cx, |this, cx| {
3700 let buffer = this
3701 .opened_buffers
3702 .get(&envelope.payload.buffer_id)
3703 .and_then(|buffer| buffer.upgrade(cx));
3704 if let Some(buffer) = buffer {
3705 buffer.update(cx, |buffer, cx| {
3706 buffer.did_save(version, mtime, None, cx);
3707 });
3708 }
3709 Ok(())
3710 })
3711 }
3712
3713 async fn handle_buffer_reloaded(
3714 this: ModelHandle<Self>,
3715 envelope: TypedEnvelope<proto::BufferReloaded>,
3716 _: Arc<Client>,
3717 mut cx: AsyncAppContext,
3718 ) -> Result<()> {
3719 let payload = envelope.payload.clone();
3720 let version = deserialize_version(payload.version);
3721 let mtime = payload
3722 .mtime
3723 .ok_or_else(|| anyhow!("missing mtime"))?
3724 .into();
3725 this.update(&mut cx, |this, cx| {
3726 let buffer = this
3727 .opened_buffers
3728 .get(&payload.buffer_id)
3729 .and_then(|buffer| buffer.upgrade(cx));
3730 if let Some(buffer) = buffer {
3731 buffer.update(cx, |buffer, cx| {
3732 buffer.did_reload(version, mtime, cx);
3733 });
3734 }
3735 Ok(())
3736 })
3737 }
3738
3739 pub fn match_paths<'a>(
3740 &self,
3741 query: &'a str,
3742 include_ignored: bool,
3743 smart_case: bool,
3744 max_results: usize,
3745 cancel_flag: &'a AtomicBool,
3746 cx: &AppContext,
3747 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3748 let worktrees = self
3749 .worktrees(cx)
3750 .filter(|worktree| worktree.read(cx).is_visible())
3751 .collect::<Vec<_>>();
3752 let include_root_name = worktrees.len() > 1;
3753 let candidate_sets = worktrees
3754 .into_iter()
3755 .map(|worktree| CandidateSet {
3756 snapshot: worktree.read(cx).snapshot(),
3757 include_ignored,
3758 include_root_name,
3759 })
3760 .collect::<Vec<_>>();
3761
3762 let background = cx.background().clone();
3763 async move {
3764 fuzzy::match_paths(
3765 candidate_sets.as_slice(),
3766 query,
3767 smart_case,
3768 max_results,
3769 cancel_flag,
3770 background,
3771 )
3772 .await
3773 }
3774 }
3775
3776 fn edits_from_lsp(
3777 &mut self,
3778 buffer: &ModelHandle<Buffer>,
3779 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
3780 version: Option<i32>,
3781 cx: &mut ModelContext<Self>,
3782 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
3783 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
3784 cx.background().spawn(async move {
3785 let snapshot = snapshot?;
3786 let mut lsp_edits = lsp_edits
3787 .into_iter()
3788 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
3789 .peekable();
3790
3791 let mut edits = Vec::new();
3792 while let Some((mut range, mut new_text)) = lsp_edits.next() {
3793 // Combine any LSP edits that are adjacent.
3794 //
3795 // Also, combine LSP edits that are separated from each other by only
3796 // a newline. This is important because for some code actions,
3797 // Rust-analyzer rewrites the entire buffer via a series of edits that
3798 // are separated by unchanged newline characters.
3799 //
3800 // In order for the diffing logic below to work properly, any edits that
3801 // cancel each other out must be combined into one.
3802 while let Some((next_range, next_text)) = lsp_edits.peek() {
3803 if next_range.start > range.end {
3804 if next_range.start.row > range.end.row + 1
3805 || next_range.start.column > 0
3806 || snapshot.clip_point_utf16(
3807 PointUtf16::new(range.end.row, u32::MAX),
3808 Bias::Left,
3809 ) > range.end
3810 {
3811 break;
3812 }
3813 new_text.push('\n');
3814 }
3815 range.end = next_range.end;
3816 new_text.push_str(&next_text);
3817 lsp_edits.next();
3818 }
3819
3820 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
3821 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
3822 {
3823 return Err(anyhow!("invalid edits received from language server"));
3824 }
3825
3826 // For multiline edits, perform a diff of the old and new text so that
3827 // we can identify the changes more precisely, preserving the locations
3828 // of any anchors positioned in the unchanged regions.
3829 if range.end.row > range.start.row {
3830 let mut offset = range.start.to_offset(&snapshot);
3831 let old_text = snapshot.text_for_range(range).collect::<String>();
3832
3833 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
3834 let mut moved_since_edit = true;
3835 for change in diff.iter_all_changes() {
3836 let tag = change.tag();
3837 let value = change.value();
3838 match tag {
3839 ChangeTag::Equal => {
3840 offset += value.len();
3841 moved_since_edit = true;
3842 }
3843 ChangeTag::Delete => {
3844 let start = snapshot.anchor_after(offset);
3845 let end = snapshot.anchor_before(offset + value.len());
3846 if moved_since_edit {
3847 edits.push((start..end, String::new()));
3848 } else {
3849 edits.last_mut().unwrap().0.end = end;
3850 }
3851 offset += value.len();
3852 moved_since_edit = false;
3853 }
3854 ChangeTag::Insert => {
3855 if moved_since_edit {
3856 let anchor = snapshot.anchor_after(offset);
3857 edits.push((anchor.clone()..anchor, value.to_string()));
3858 } else {
3859 edits.last_mut().unwrap().1.push_str(value);
3860 }
3861 moved_since_edit = false;
3862 }
3863 }
3864 }
3865 } else if range.end == range.start {
3866 let anchor = snapshot.anchor_after(range.start);
3867 edits.push((anchor.clone()..anchor, new_text));
3868 } else {
3869 let edit_start = snapshot.anchor_after(range.start);
3870 let edit_end = snapshot.anchor_before(range.end);
3871 edits.push((edit_start..edit_end, new_text));
3872 }
3873 }
3874
3875 Ok(edits)
3876 })
3877 }
3878
3879 fn buffer_snapshot_for_lsp_version(
3880 &mut self,
3881 buffer: &ModelHandle<Buffer>,
3882 version: Option<i32>,
3883 cx: &AppContext,
3884 ) -> Result<TextBufferSnapshot> {
3885 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
3886
3887 if let Some(version) = version {
3888 let buffer_id = buffer.read(cx).remote_id();
3889 let snapshots = self
3890 .buffer_snapshots
3891 .get_mut(&buffer_id)
3892 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
3893 let mut found_snapshot = None;
3894 snapshots.retain(|(snapshot_version, snapshot)| {
3895 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
3896 false
3897 } else {
3898 if *snapshot_version == version {
3899 found_snapshot = Some(snapshot.clone());
3900 }
3901 true
3902 }
3903 });
3904
3905 found_snapshot.ok_or_else(|| {
3906 anyhow!(
3907 "snapshot not found for buffer {} at version {}",
3908 buffer_id,
3909 version
3910 )
3911 })
3912 } else {
3913 Ok((buffer.read(cx)).text_snapshot())
3914 }
3915 }
3916
3917 fn language_server_for_buffer(
3918 &self,
3919 buffer: &Buffer,
3920 cx: &AppContext,
3921 ) -> Option<&Arc<LanguageServer>> {
3922 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
3923 let worktree_id = file.worktree_id(cx);
3924 self.language_servers.get(&(worktree_id, language.name()))
3925 } else {
3926 None
3927 }
3928 }
3929}
3930
3931impl WorktreeHandle {
3932 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3933 match self {
3934 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3935 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3936 }
3937 }
3938}
3939
3940impl OpenBuffer {
3941 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3942 match self {
3943 OpenBuffer::Strong(handle) => Some(handle.clone()),
3944 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3945 OpenBuffer::Loading(_) => None,
3946 }
3947 }
3948}
3949
3950struct CandidateSet {
3951 snapshot: Snapshot,
3952 include_ignored: bool,
3953 include_root_name: bool,
3954}
3955
3956impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3957 type Candidates = CandidateSetIter<'a>;
3958
3959 fn id(&self) -> usize {
3960 self.snapshot.id().to_usize()
3961 }
3962
3963 fn len(&self) -> usize {
3964 if self.include_ignored {
3965 self.snapshot.file_count()
3966 } else {
3967 self.snapshot.visible_file_count()
3968 }
3969 }
3970
3971 fn prefix(&self) -> Arc<str> {
3972 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3973 self.snapshot.root_name().into()
3974 } else if self.include_root_name {
3975 format!("{}/", self.snapshot.root_name()).into()
3976 } else {
3977 "".into()
3978 }
3979 }
3980
3981 fn candidates(&'a self, start: usize) -> Self::Candidates {
3982 CandidateSetIter {
3983 traversal: self.snapshot.files(self.include_ignored, start),
3984 }
3985 }
3986}
3987
3988struct CandidateSetIter<'a> {
3989 traversal: Traversal<'a>,
3990}
3991
3992impl<'a> Iterator for CandidateSetIter<'a> {
3993 type Item = PathMatchCandidate<'a>;
3994
3995 fn next(&mut self) -> Option<Self::Item> {
3996 self.traversal.next().map(|entry| {
3997 if let EntryKind::File(char_bag) = entry.kind {
3998 PathMatchCandidate {
3999 path: &entry.path,
4000 char_bag,
4001 }
4002 } else {
4003 unreachable!()
4004 }
4005 })
4006 }
4007}
4008
4009impl Entity for Project {
4010 type Event = Event;
4011
4012 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4013 match &self.client_state {
4014 ProjectClientState::Local { remote_id_rx, .. } => {
4015 if let Some(project_id) = *remote_id_rx.borrow() {
4016 self.client
4017 .send(proto::UnregisterProject { project_id })
4018 .log_err();
4019 }
4020 }
4021 ProjectClientState::Remote { remote_id, .. } => {
4022 self.client
4023 .send(proto::LeaveProject {
4024 project_id: *remote_id,
4025 })
4026 .log_err();
4027 }
4028 }
4029 }
4030
4031 fn app_will_quit(
4032 &mut self,
4033 _: &mut MutableAppContext,
4034 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4035 let shutdown_futures = self
4036 .language_servers
4037 .drain()
4038 .filter_map(|(_, server)| server.shutdown())
4039 .collect::<Vec<_>>();
4040 Some(
4041 async move {
4042 futures::future::join_all(shutdown_futures).await;
4043 }
4044 .boxed(),
4045 )
4046 }
4047}
4048
4049impl Collaborator {
4050 fn from_proto(
4051 message: proto::Collaborator,
4052 user_store: &ModelHandle<UserStore>,
4053 cx: &mut AsyncAppContext,
4054 ) -> impl Future<Output = Result<Self>> {
4055 let user = user_store.update(cx, |user_store, cx| {
4056 user_store.fetch_user(message.user_id, cx)
4057 });
4058
4059 async move {
4060 Ok(Self {
4061 peer_id: PeerId(message.peer_id),
4062 user: user.await?,
4063 replica_id: message.replica_id as ReplicaId,
4064 })
4065 }
4066 }
4067}
4068
4069impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4070 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4071 Self {
4072 worktree_id,
4073 path: path.as_ref().into(),
4074 }
4075 }
4076}
4077
4078impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4079 fn from(options: lsp::CreateFileOptions) -> Self {
4080 Self {
4081 overwrite: options.overwrite.unwrap_or(false),
4082 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4083 }
4084 }
4085}
4086
4087impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4088 fn from(options: lsp::RenameFileOptions) -> Self {
4089 Self {
4090 overwrite: options.overwrite.unwrap_or(false),
4091 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4092 }
4093 }
4094}
4095
4096impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4097 fn from(options: lsp::DeleteFileOptions) -> Self {
4098 Self {
4099 recursive: options.recursive.unwrap_or(false),
4100 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4101 }
4102 }
4103}
4104
4105fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4106 proto::Symbol {
4107 source_worktree_id: symbol.source_worktree_id.to_proto(),
4108 worktree_id: symbol.worktree_id.to_proto(),
4109 language_name: symbol.language_name.clone(),
4110 name: symbol.name.clone(),
4111 kind: unsafe { mem::transmute(symbol.kind) },
4112 path: symbol.path.to_string_lossy().to_string(),
4113 start: Some(proto::Point {
4114 row: symbol.range.start.row,
4115 column: symbol.range.start.column,
4116 }),
4117 end: Some(proto::Point {
4118 row: symbol.range.end.row,
4119 column: symbol.range.end.column,
4120 }),
4121 signature: symbol.signature.to_vec(),
4122 }
4123}
4124
4125fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4126 let mut path_components = path.components();
4127 let mut base_components = base.components();
4128 let mut components: Vec<Component> = Vec::new();
4129 loop {
4130 match (path_components.next(), base_components.next()) {
4131 (None, None) => break,
4132 (Some(a), None) => {
4133 components.push(a);
4134 components.extend(path_components.by_ref());
4135 break;
4136 }
4137 (None, _) => components.push(Component::ParentDir),
4138 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4139 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4140 (Some(a), Some(_)) => {
4141 components.push(Component::ParentDir);
4142 for _ in base_components {
4143 components.push(Component::ParentDir);
4144 }
4145 components.push(a);
4146 components.extend(path_components.by_ref());
4147 break;
4148 }
4149 }
4150 }
4151 components.iter().map(|c| c.as_os_str()).collect()
4152}
4153
4154#[cfg(test)]
4155mod tests {
4156 use super::{Event, *};
4157 use fs::RealFs;
4158 use futures::StreamExt;
4159 use gpui::test::subscribe;
4160 use language::{
4161 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4162 ToPoint,
4163 };
4164 use lsp::Url;
4165 use serde_json::json;
4166 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4167 use unindent::Unindent as _;
4168 use util::test::temp_tree;
4169 use worktree::WorktreeHandle as _;
4170
4171 #[gpui::test]
4172 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4173 let dir = temp_tree(json!({
4174 "root": {
4175 "apple": "",
4176 "banana": {
4177 "carrot": {
4178 "date": "",
4179 "endive": "",
4180 }
4181 },
4182 "fennel": {
4183 "grape": "",
4184 }
4185 }
4186 }));
4187
4188 let root_link_path = dir.path().join("root_link");
4189 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4190 unix::fs::symlink(
4191 &dir.path().join("root/fennel"),
4192 &dir.path().join("root/finnochio"),
4193 )
4194 .unwrap();
4195
4196 let project = Project::test(Arc::new(RealFs), cx);
4197
4198 let (tree, _) = project
4199 .update(cx, |project, cx| {
4200 project.find_or_create_local_worktree(&root_link_path, true, cx)
4201 })
4202 .await
4203 .unwrap();
4204
4205 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4206 .await;
4207 cx.read(|cx| {
4208 let tree = tree.read(cx);
4209 assert_eq!(tree.file_count(), 5);
4210 assert_eq!(
4211 tree.inode_for_path("fennel/grape"),
4212 tree.inode_for_path("finnochio/grape")
4213 );
4214 });
4215
4216 let cancel_flag = Default::default();
4217 let results = project
4218 .read_with(cx, |project, cx| {
4219 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4220 })
4221 .await;
4222 assert_eq!(
4223 results
4224 .into_iter()
4225 .map(|result| result.path)
4226 .collect::<Vec<Arc<Path>>>(),
4227 vec![
4228 PathBuf::from("banana/carrot/date").into(),
4229 PathBuf::from("banana/carrot/endive").into(),
4230 ]
4231 );
4232 }
4233
4234 #[gpui::test]
4235 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4236 cx.foreground().forbid_parking();
4237
4238 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4239 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4240 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4241 completion_provider: Some(lsp::CompletionOptions {
4242 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4243 ..Default::default()
4244 }),
4245 ..Default::default()
4246 });
4247 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4248 completion_provider: Some(lsp::CompletionOptions {
4249 trigger_characters: Some(vec![":".to_string()]),
4250 ..Default::default()
4251 }),
4252 ..Default::default()
4253 });
4254
4255 let rust_language = Arc::new(Language::new(
4256 LanguageConfig {
4257 name: "Rust".into(),
4258 path_suffixes: vec!["rs".to_string()],
4259 language_server: Some(rust_lsp_config),
4260 ..Default::default()
4261 },
4262 Some(tree_sitter_rust::language()),
4263 ));
4264 let json_language = Arc::new(Language::new(
4265 LanguageConfig {
4266 name: "JSON".into(),
4267 path_suffixes: vec!["json".to_string()],
4268 language_server: Some(json_lsp_config),
4269 ..Default::default()
4270 },
4271 None,
4272 ));
4273
4274 let fs = FakeFs::new(cx.background());
4275 fs.insert_tree(
4276 "/the-root",
4277 json!({
4278 "test.rs": "const A: i32 = 1;",
4279 "test2.rs": "",
4280 "Cargo.toml": "a = 1",
4281 "package.json": "{\"a\": 1}",
4282 }),
4283 )
4284 .await;
4285
4286 let project = Project::test(fs, cx);
4287 project.update(cx, |project, _| {
4288 project.languages.add(rust_language);
4289 project.languages.add(json_language);
4290 });
4291
4292 let worktree_id = project
4293 .update(cx, |project, cx| {
4294 project.find_or_create_local_worktree("/the-root", true, cx)
4295 })
4296 .await
4297 .unwrap()
4298 .0
4299 .read_with(cx, |tree, _| tree.id());
4300
4301 // Open a buffer without an associated language server.
4302 let toml_buffer = project
4303 .update(cx, |project, cx| {
4304 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4305 })
4306 .await
4307 .unwrap();
4308
4309 // Open a buffer with an associated language server.
4310 let rust_buffer = project
4311 .update(cx, |project, cx| {
4312 project.open_buffer((worktree_id, "test.rs"), cx)
4313 })
4314 .await
4315 .unwrap();
4316
4317 // A server is started up, and it is notified about Rust files.
4318 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4319 assert_eq!(
4320 fake_rust_server
4321 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4322 .await
4323 .text_document,
4324 lsp::TextDocumentItem {
4325 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4326 version: 0,
4327 text: "const A: i32 = 1;".to_string(),
4328 language_id: Default::default()
4329 }
4330 );
4331
4332 // The buffer is configured based on the language server's capabilities.
4333 rust_buffer.read_with(cx, |buffer, _| {
4334 assert_eq!(
4335 buffer.completion_triggers(),
4336 &[".".to_string(), "::".to_string()]
4337 );
4338 });
4339 toml_buffer.read_with(cx, |buffer, _| {
4340 assert!(buffer.completion_triggers().is_empty());
4341 });
4342
4343 // Edit a buffer. The changes are reported to the language server.
4344 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4345 assert_eq!(
4346 fake_rust_server
4347 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4348 .await
4349 .text_document,
4350 lsp::VersionedTextDocumentIdentifier::new(
4351 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4352 1
4353 )
4354 );
4355
4356 // Open a third buffer with a different associated language server.
4357 let json_buffer = project
4358 .update(cx, |project, cx| {
4359 project.open_buffer((worktree_id, "package.json"), cx)
4360 })
4361 .await
4362 .unwrap();
4363
4364 // Another language server is started up, and it is notified about
4365 // all three open buffers.
4366 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4367 assert_eq!(
4368 fake_json_server
4369 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4370 .await
4371 .text_document,
4372 lsp::TextDocumentItem {
4373 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4374 version: 0,
4375 text: "{\"a\": 1}".to_string(),
4376 language_id: Default::default()
4377 }
4378 );
4379
4380 // This buffer is configured based on the second language server's
4381 // capabilities.
4382 json_buffer.read_with(cx, |buffer, _| {
4383 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4384 });
4385
4386 // When opening another buffer whose language server is already running,
4387 // it is also configured based on the existing language server's capabilities.
4388 let rust_buffer2 = project
4389 .update(cx, |project, cx| {
4390 project.open_buffer((worktree_id, "test2.rs"), cx)
4391 })
4392 .await
4393 .unwrap();
4394 rust_buffer2.read_with(cx, |buffer, _| {
4395 assert_eq!(
4396 buffer.completion_triggers(),
4397 &[".".to_string(), "::".to_string()]
4398 );
4399 });
4400
4401 // Changes are reported only to servers matching the buffer's language.
4402 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4403 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4404 assert_eq!(
4405 fake_rust_server
4406 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4407 .await
4408 .text_document,
4409 lsp::VersionedTextDocumentIdentifier::new(
4410 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4411 1
4412 )
4413 );
4414
4415 // Save notifications are reported to all servers.
4416 toml_buffer
4417 .update(cx, |buffer, cx| buffer.save(cx))
4418 .await
4419 .unwrap();
4420 assert_eq!(
4421 fake_rust_server
4422 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4423 .await
4424 .text_document,
4425 lsp::TextDocumentIdentifier::new(
4426 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4427 )
4428 );
4429 assert_eq!(
4430 fake_json_server
4431 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4432 .await
4433 .text_document,
4434 lsp::TextDocumentIdentifier::new(
4435 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4436 )
4437 );
4438
4439 // Close notifications are reported only to servers matching the buffer's language.
4440 cx.update(|_| drop(json_buffer));
4441 let close_message = lsp::DidCloseTextDocumentParams {
4442 text_document: lsp::TextDocumentIdentifier::new(
4443 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4444 ),
4445 };
4446 assert_eq!(
4447 fake_json_server
4448 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4449 .await,
4450 close_message,
4451 );
4452 }
4453
4454 #[gpui::test]
4455 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4456 cx.foreground().forbid_parking();
4457
4458 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4459 let progress_token = language_server_config
4460 .disk_based_diagnostics_progress_token
4461 .clone()
4462 .unwrap();
4463
4464 let language = Arc::new(Language::new(
4465 LanguageConfig {
4466 name: "Rust".into(),
4467 path_suffixes: vec!["rs".to_string()],
4468 language_server: Some(language_server_config),
4469 ..Default::default()
4470 },
4471 Some(tree_sitter_rust::language()),
4472 ));
4473
4474 let fs = FakeFs::new(cx.background());
4475 fs.insert_tree(
4476 "/dir",
4477 json!({
4478 "a.rs": "fn a() { A }",
4479 "b.rs": "const y: i32 = 1",
4480 }),
4481 )
4482 .await;
4483
4484 let project = Project::test(fs, cx);
4485 project.update(cx, |project, _| project.languages.add(language));
4486
4487 let (tree, _) = project
4488 .update(cx, |project, cx| {
4489 project.find_or_create_local_worktree("/dir", true, cx)
4490 })
4491 .await
4492 .unwrap();
4493 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4494
4495 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4496 .await;
4497
4498 // Cause worktree to start the fake language server
4499 let _buffer = project
4500 .update(cx, |project, cx| {
4501 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4502 })
4503 .await
4504 .unwrap();
4505
4506 let mut events = subscribe(&project, cx);
4507
4508 let mut fake_server = fake_servers.next().await.unwrap();
4509 fake_server.start_progress(&progress_token).await;
4510 assert_eq!(
4511 events.next().await.unwrap(),
4512 Event::DiskBasedDiagnosticsStarted
4513 );
4514
4515 fake_server.start_progress(&progress_token).await;
4516 fake_server.end_progress(&progress_token).await;
4517 fake_server.start_progress(&progress_token).await;
4518
4519 fake_server
4520 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
4521 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4522 version: None,
4523 diagnostics: vec![lsp::Diagnostic {
4524 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4525 severity: Some(lsp::DiagnosticSeverity::ERROR),
4526 message: "undefined variable 'A'".to_string(),
4527 ..Default::default()
4528 }],
4529 })
4530 .await;
4531 assert_eq!(
4532 events.next().await.unwrap(),
4533 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4534 );
4535
4536 fake_server.end_progress(&progress_token).await;
4537 fake_server.end_progress(&progress_token).await;
4538 assert_eq!(
4539 events.next().await.unwrap(),
4540 Event::DiskBasedDiagnosticsUpdated
4541 );
4542 assert_eq!(
4543 events.next().await.unwrap(),
4544 Event::DiskBasedDiagnosticsFinished
4545 );
4546
4547 let buffer = project
4548 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4549 .await
4550 .unwrap();
4551
4552 buffer.read_with(cx, |buffer, _| {
4553 let snapshot = buffer.snapshot();
4554 let diagnostics = snapshot
4555 .diagnostics_in_range::<_, Point>(0..buffer.len())
4556 .collect::<Vec<_>>();
4557 assert_eq!(
4558 diagnostics,
4559 &[DiagnosticEntry {
4560 range: Point::new(0, 9)..Point::new(0, 10),
4561 diagnostic: Diagnostic {
4562 severity: lsp::DiagnosticSeverity::ERROR,
4563 message: "undefined variable 'A'".to_string(),
4564 group_id: 0,
4565 is_primary: true,
4566 ..Default::default()
4567 }
4568 }]
4569 )
4570 });
4571 }
4572
4573 #[gpui::test]
4574 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4575 cx.foreground().forbid_parking();
4576
4577 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4578 lsp_config
4579 .disk_based_diagnostic_sources
4580 .insert("disk".to_string());
4581 let language = Arc::new(Language::new(
4582 LanguageConfig {
4583 name: "Rust".into(),
4584 path_suffixes: vec!["rs".to_string()],
4585 language_server: Some(lsp_config),
4586 ..Default::default()
4587 },
4588 Some(tree_sitter_rust::language()),
4589 ));
4590
4591 let text = "
4592 fn a() { A }
4593 fn b() { BB }
4594 fn c() { CCC }
4595 "
4596 .unindent();
4597
4598 let fs = FakeFs::new(cx.background());
4599 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4600
4601 let project = Project::test(fs, cx);
4602 project.update(cx, |project, _| project.languages.add(language));
4603
4604 let worktree_id = project
4605 .update(cx, |project, cx| {
4606 project.find_or_create_local_worktree("/dir", true, cx)
4607 })
4608 .await
4609 .unwrap()
4610 .0
4611 .read_with(cx, |tree, _| tree.id());
4612
4613 let buffer = project
4614 .update(cx, |project, cx| {
4615 project.open_buffer((worktree_id, "a.rs"), cx)
4616 })
4617 .await
4618 .unwrap();
4619
4620 let mut fake_server = fake_servers.next().await.unwrap();
4621 let open_notification = fake_server
4622 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4623 .await;
4624
4625 // Edit the buffer, moving the content down
4626 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
4627 let change_notification_1 = fake_server
4628 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4629 .await;
4630 assert!(
4631 change_notification_1.text_document.version > open_notification.text_document.version
4632 );
4633
4634 // Report some diagnostics for the initial version of the buffer
4635 fake_server
4636 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
4637 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4638 version: Some(open_notification.text_document.version),
4639 diagnostics: vec![
4640 lsp::Diagnostic {
4641 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4642 severity: Some(DiagnosticSeverity::ERROR),
4643 message: "undefined variable 'A'".to_string(),
4644 source: Some("disk".to_string()),
4645 ..Default::default()
4646 },
4647 lsp::Diagnostic {
4648 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
4649 severity: Some(DiagnosticSeverity::ERROR),
4650 message: "undefined variable 'BB'".to_string(),
4651 source: Some("disk".to_string()),
4652 ..Default::default()
4653 },
4654 lsp::Diagnostic {
4655 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
4656 severity: Some(DiagnosticSeverity::ERROR),
4657 source: Some("disk".to_string()),
4658 message: "undefined variable 'CCC'".to_string(),
4659 ..Default::default()
4660 },
4661 ],
4662 })
4663 .await;
4664
4665 // The diagnostics have moved down since they were created.
4666 buffer.next_notification(cx).await;
4667 buffer.read_with(cx, |buffer, _| {
4668 assert_eq!(
4669 buffer
4670 .snapshot()
4671 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
4672 .collect::<Vec<_>>(),
4673 &[
4674 DiagnosticEntry {
4675 range: Point::new(3, 9)..Point::new(3, 11),
4676 diagnostic: Diagnostic {
4677 severity: DiagnosticSeverity::ERROR,
4678 message: "undefined variable 'BB'".to_string(),
4679 is_disk_based: true,
4680 group_id: 1,
4681 is_primary: true,
4682 ..Default::default()
4683 },
4684 },
4685 DiagnosticEntry {
4686 range: Point::new(4, 9)..Point::new(4, 12),
4687 diagnostic: Diagnostic {
4688 severity: DiagnosticSeverity::ERROR,
4689 message: "undefined variable 'CCC'".to_string(),
4690 is_disk_based: true,
4691 group_id: 2,
4692 is_primary: true,
4693 ..Default::default()
4694 }
4695 }
4696 ]
4697 );
4698 assert_eq!(
4699 chunks_with_diagnostics(buffer, 0..buffer.len()),
4700 [
4701 ("\n\nfn a() { ".to_string(), None),
4702 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
4703 (" }\nfn b() { ".to_string(), None),
4704 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
4705 (" }\nfn c() { ".to_string(), None),
4706 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
4707 (" }\n".to_string(), None),
4708 ]
4709 );
4710 assert_eq!(
4711 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
4712 [
4713 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
4714 (" }\nfn c() { ".to_string(), None),
4715 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
4716 ]
4717 );
4718 });
4719
4720 // Ensure overlapping diagnostics are highlighted correctly.
4721 fake_server
4722 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
4723 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4724 version: Some(open_notification.text_document.version),
4725 diagnostics: vec![
4726 lsp::Diagnostic {
4727 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4728 severity: Some(DiagnosticSeverity::ERROR),
4729 message: "undefined variable 'A'".to_string(),
4730 source: Some("disk".to_string()),
4731 ..Default::default()
4732 },
4733 lsp::Diagnostic {
4734 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
4735 severity: Some(DiagnosticSeverity::WARNING),
4736 message: "unreachable statement".to_string(),
4737 source: Some("disk".to_string()),
4738 ..Default::default()
4739 },
4740 ],
4741 })
4742 .await;
4743
4744 buffer.next_notification(cx).await;
4745 buffer.read_with(cx, |buffer, _| {
4746 assert_eq!(
4747 buffer
4748 .snapshot()
4749 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
4750 .collect::<Vec<_>>(),
4751 &[
4752 DiagnosticEntry {
4753 range: Point::new(2, 9)..Point::new(2, 12),
4754 diagnostic: Diagnostic {
4755 severity: DiagnosticSeverity::WARNING,
4756 message: "unreachable statement".to_string(),
4757 is_disk_based: true,
4758 group_id: 1,
4759 is_primary: true,
4760 ..Default::default()
4761 }
4762 },
4763 DiagnosticEntry {
4764 range: Point::new(2, 9)..Point::new(2, 10),
4765 diagnostic: Diagnostic {
4766 severity: DiagnosticSeverity::ERROR,
4767 message: "undefined variable 'A'".to_string(),
4768 is_disk_based: true,
4769 group_id: 0,
4770 is_primary: true,
4771 ..Default::default()
4772 },
4773 }
4774 ]
4775 );
4776 assert_eq!(
4777 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
4778 [
4779 ("fn a() { ".to_string(), None),
4780 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
4781 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
4782 ("\n".to_string(), None),
4783 ]
4784 );
4785 assert_eq!(
4786 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
4787 [
4788 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
4789 ("\n".to_string(), None),
4790 ]
4791 );
4792 });
4793
4794 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
4795 // changes since the last save.
4796 buffer.update(cx, |buffer, cx| {
4797 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
4798 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
4799 });
4800 let change_notification_2 =
4801 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
4802 assert!(
4803 change_notification_2.await.text_document.version
4804 > change_notification_1.text_document.version
4805 );
4806
4807 // Handle out-of-order diagnostics
4808 fake_server
4809 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
4810 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4811 version: Some(open_notification.text_document.version),
4812 diagnostics: vec![
4813 lsp::Diagnostic {
4814 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
4815 severity: Some(DiagnosticSeverity::ERROR),
4816 message: "undefined variable 'BB'".to_string(),
4817 source: Some("disk".to_string()),
4818 ..Default::default()
4819 },
4820 lsp::Diagnostic {
4821 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4822 severity: Some(DiagnosticSeverity::WARNING),
4823 message: "undefined variable 'A'".to_string(),
4824 source: Some("disk".to_string()),
4825 ..Default::default()
4826 },
4827 ],
4828 })
4829 .await;
4830
4831 buffer.next_notification(cx).await;
4832 buffer.read_with(cx, |buffer, _| {
4833 assert_eq!(
4834 buffer
4835 .snapshot()
4836 .diagnostics_in_range::<_, Point>(0..buffer.len())
4837 .collect::<Vec<_>>(),
4838 &[
4839 DiagnosticEntry {
4840 range: Point::new(2, 21)..Point::new(2, 22),
4841 diagnostic: Diagnostic {
4842 severity: DiagnosticSeverity::WARNING,
4843 message: "undefined variable 'A'".to_string(),
4844 is_disk_based: true,
4845 group_id: 1,
4846 is_primary: true,
4847 ..Default::default()
4848 }
4849 },
4850 DiagnosticEntry {
4851 range: Point::new(3, 9)..Point::new(3, 11),
4852 diagnostic: Diagnostic {
4853 severity: DiagnosticSeverity::ERROR,
4854 message: "undefined variable 'BB'".to_string(),
4855 is_disk_based: true,
4856 group_id: 0,
4857 is_primary: true,
4858 ..Default::default()
4859 },
4860 }
4861 ]
4862 );
4863 });
4864 }
4865
4866 #[gpui::test]
4867 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
4868 cx.foreground().forbid_parking();
4869
4870 let text = concat!(
4871 "let one = ;\n", //
4872 "let two = \n",
4873 "let three = 3;\n",
4874 );
4875
4876 let fs = FakeFs::new(cx.background());
4877 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4878
4879 let project = Project::test(fs, cx);
4880 let worktree_id = project
4881 .update(cx, |project, cx| {
4882 project.find_or_create_local_worktree("/dir", true, cx)
4883 })
4884 .await
4885 .unwrap()
4886 .0
4887 .read_with(cx, |tree, _| tree.id());
4888
4889 let buffer = project
4890 .update(cx, |project, cx| {
4891 project.open_buffer((worktree_id, "a.rs"), cx)
4892 })
4893 .await
4894 .unwrap();
4895
4896 project.update(cx, |project, cx| {
4897 project
4898 .update_buffer_diagnostics(
4899 &buffer,
4900 vec![
4901 DiagnosticEntry {
4902 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
4903 diagnostic: Diagnostic {
4904 severity: DiagnosticSeverity::ERROR,
4905 message: "syntax error 1".to_string(),
4906 ..Default::default()
4907 },
4908 },
4909 DiagnosticEntry {
4910 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
4911 diagnostic: Diagnostic {
4912 severity: DiagnosticSeverity::ERROR,
4913 message: "syntax error 2".to_string(),
4914 ..Default::default()
4915 },
4916 },
4917 ],
4918 None,
4919 cx,
4920 )
4921 .unwrap();
4922 });
4923
4924 // An empty range is extended forward to include the following character.
4925 // At the end of a line, an empty range is extended backward to include
4926 // the preceding character.
4927 buffer.read_with(cx, |buffer, _| {
4928 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
4929 assert_eq!(
4930 chunks
4931 .iter()
4932 .map(|(s, d)| (s.as_str(), *d))
4933 .collect::<Vec<_>>(),
4934 &[
4935 ("let one = ", None),
4936 (";", Some(DiagnosticSeverity::ERROR)),
4937 ("\nlet two =", None),
4938 (" ", Some(DiagnosticSeverity::ERROR)),
4939 ("\nlet three = 3;\n", None)
4940 ]
4941 );
4942 });
4943 }
4944
4945 #[gpui::test]
4946 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
4947 cx.foreground().forbid_parking();
4948
4949 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4950 let language = Arc::new(Language::new(
4951 LanguageConfig {
4952 name: "Rust".into(),
4953 path_suffixes: vec!["rs".to_string()],
4954 language_server: Some(lsp_config),
4955 ..Default::default()
4956 },
4957 Some(tree_sitter_rust::language()),
4958 ));
4959
4960 let text = "
4961 fn a() {
4962 f1();
4963 }
4964 fn b() {
4965 f2();
4966 }
4967 fn c() {
4968 f3();
4969 }
4970 "
4971 .unindent();
4972
4973 let fs = FakeFs::new(cx.background());
4974 fs.insert_tree(
4975 "/dir",
4976 json!({
4977 "a.rs": text.clone(),
4978 }),
4979 )
4980 .await;
4981
4982 let project = Project::test(fs, cx);
4983 project.update(cx, |project, _| project.languages.add(language));
4984
4985 let worktree_id = project
4986 .update(cx, |project, cx| {
4987 project.find_or_create_local_worktree("/dir", true, cx)
4988 })
4989 .await
4990 .unwrap()
4991 .0
4992 .read_with(cx, |tree, _| tree.id());
4993
4994 let buffer = project
4995 .update(cx, |project, cx| {
4996 project.open_buffer((worktree_id, "a.rs"), cx)
4997 })
4998 .await
4999 .unwrap();
5000
5001 let mut fake_server = fake_servers.next().await.unwrap();
5002 let lsp_document_version = fake_server
5003 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5004 .await
5005 .text_document
5006 .version;
5007
5008 // Simulate editing the buffer after the language server computes some edits.
5009 buffer.update(cx, |buffer, cx| {
5010 buffer.edit(
5011 [Point::new(0, 0)..Point::new(0, 0)],
5012 "// above first function\n",
5013 cx,
5014 );
5015 buffer.edit(
5016 [Point::new(2, 0)..Point::new(2, 0)],
5017 " // inside first function\n",
5018 cx,
5019 );
5020 buffer.edit(
5021 [Point::new(6, 4)..Point::new(6, 4)],
5022 "// inside second function ",
5023 cx,
5024 );
5025
5026 assert_eq!(
5027 buffer.text(),
5028 "
5029 // above first function
5030 fn a() {
5031 // inside first function
5032 f1();
5033 }
5034 fn b() {
5035 // inside second function f2();
5036 }
5037 fn c() {
5038 f3();
5039 }
5040 "
5041 .unindent()
5042 );
5043 });
5044
5045 let edits = project
5046 .update(cx, |project, cx| {
5047 project.edits_from_lsp(
5048 &buffer,
5049 vec![
5050 // replace body of first function
5051 lsp::TextEdit {
5052 range: lsp::Range::new(
5053 lsp::Position::new(0, 0),
5054 lsp::Position::new(3, 0),
5055 ),
5056 new_text: "
5057 fn a() {
5058 f10();
5059 }
5060 "
5061 .unindent(),
5062 },
5063 // edit inside second function
5064 lsp::TextEdit {
5065 range: lsp::Range::new(
5066 lsp::Position::new(4, 6),
5067 lsp::Position::new(4, 6),
5068 ),
5069 new_text: "00".into(),
5070 },
5071 // edit inside third function via two distinct edits
5072 lsp::TextEdit {
5073 range: lsp::Range::new(
5074 lsp::Position::new(7, 5),
5075 lsp::Position::new(7, 5),
5076 ),
5077 new_text: "4000".into(),
5078 },
5079 lsp::TextEdit {
5080 range: lsp::Range::new(
5081 lsp::Position::new(7, 5),
5082 lsp::Position::new(7, 6),
5083 ),
5084 new_text: "".into(),
5085 },
5086 ],
5087 Some(lsp_document_version),
5088 cx,
5089 )
5090 })
5091 .await
5092 .unwrap();
5093
5094 buffer.update(cx, |buffer, cx| {
5095 for (range, new_text) in edits {
5096 buffer.edit([range], new_text, cx);
5097 }
5098 assert_eq!(
5099 buffer.text(),
5100 "
5101 // above first function
5102 fn a() {
5103 // inside first function
5104 f10();
5105 }
5106 fn b() {
5107 // inside second function f200();
5108 }
5109 fn c() {
5110 f4000();
5111 }
5112 "
5113 .unindent()
5114 );
5115 });
5116 }
5117
5118 #[gpui::test]
5119 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5120 cx.foreground().forbid_parking();
5121
5122 let text = "
5123 use a::b;
5124 use a::c;
5125
5126 fn f() {
5127 b();
5128 c();
5129 }
5130 "
5131 .unindent();
5132
5133 let fs = FakeFs::new(cx.background());
5134 fs.insert_tree(
5135 "/dir",
5136 json!({
5137 "a.rs": text.clone(),
5138 }),
5139 )
5140 .await;
5141
5142 let project = Project::test(fs, cx);
5143 let worktree_id = project
5144 .update(cx, |project, cx| {
5145 project.find_or_create_local_worktree("/dir", true, cx)
5146 })
5147 .await
5148 .unwrap()
5149 .0
5150 .read_with(cx, |tree, _| tree.id());
5151
5152 let buffer = project
5153 .update(cx, |project, cx| {
5154 project.open_buffer((worktree_id, "a.rs"), cx)
5155 })
5156 .await
5157 .unwrap();
5158
5159 // Simulate the language server sending us a small edit in the form of a very large diff.
5160 // Rust-analyzer does this when performing a merge-imports code action.
5161 let edits = project
5162 .update(cx, |project, cx| {
5163 project.edits_from_lsp(
5164 &buffer,
5165 [
5166 // Replace the first use statement without editing the semicolon.
5167 lsp::TextEdit {
5168 range: lsp::Range::new(
5169 lsp::Position::new(0, 4),
5170 lsp::Position::new(0, 8),
5171 ),
5172 new_text: "a::{b, c}".into(),
5173 },
5174 // Reinsert the remainder of the file between the semicolon and the final
5175 // newline of the file.
5176 lsp::TextEdit {
5177 range: lsp::Range::new(
5178 lsp::Position::new(0, 9),
5179 lsp::Position::new(0, 9),
5180 ),
5181 new_text: "\n\n".into(),
5182 },
5183 lsp::TextEdit {
5184 range: lsp::Range::new(
5185 lsp::Position::new(0, 9),
5186 lsp::Position::new(0, 9),
5187 ),
5188 new_text: "
5189 fn f() {
5190 b();
5191 c();
5192 }"
5193 .unindent(),
5194 },
5195 // Delete everything after the first newline of the file.
5196 lsp::TextEdit {
5197 range: lsp::Range::new(
5198 lsp::Position::new(1, 0),
5199 lsp::Position::new(7, 0),
5200 ),
5201 new_text: "".into(),
5202 },
5203 ],
5204 None,
5205 cx,
5206 )
5207 })
5208 .await
5209 .unwrap();
5210
5211 buffer.update(cx, |buffer, cx| {
5212 let edits = edits
5213 .into_iter()
5214 .map(|(range, text)| {
5215 (
5216 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5217 text,
5218 )
5219 })
5220 .collect::<Vec<_>>();
5221
5222 assert_eq!(
5223 edits,
5224 [
5225 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5226 (Point::new(1, 0)..Point::new(2, 0), "".into())
5227 ]
5228 );
5229
5230 for (range, new_text) in edits {
5231 buffer.edit([range], new_text, cx);
5232 }
5233 assert_eq!(
5234 buffer.text(),
5235 "
5236 use a::{b, c};
5237
5238 fn f() {
5239 b();
5240 c();
5241 }
5242 "
5243 .unindent()
5244 );
5245 });
5246 }
5247
5248 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5249 buffer: &Buffer,
5250 range: Range<T>,
5251 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5252 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5253 for chunk in buffer.snapshot().chunks(range, true) {
5254 if chunks
5255 .last()
5256 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
5257 {
5258 chunks.last_mut().unwrap().0.push_str(chunk.text);
5259 } else {
5260 chunks.push((chunk.text.to_string(), chunk.diagnostic));
5261 }
5262 }
5263 chunks
5264 }
5265
5266 #[gpui::test]
5267 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5268 let dir = temp_tree(json!({
5269 "root": {
5270 "dir1": {},
5271 "dir2": {
5272 "dir3": {}
5273 }
5274 }
5275 }));
5276
5277 let project = Project::test(Arc::new(RealFs), cx);
5278 let (tree, _) = project
5279 .update(cx, |project, cx| {
5280 project.find_or_create_local_worktree(&dir.path(), true, cx)
5281 })
5282 .await
5283 .unwrap();
5284
5285 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5286 .await;
5287
5288 let cancel_flag = Default::default();
5289 let results = project
5290 .read_with(cx, |project, cx| {
5291 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5292 })
5293 .await;
5294
5295 assert!(results.is_empty());
5296 }
5297
5298 #[gpui::test]
5299 async fn test_definition(cx: &mut gpui::TestAppContext) {
5300 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5301 let language = Arc::new(Language::new(
5302 LanguageConfig {
5303 name: "Rust".into(),
5304 path_suffixes: vec!["rs".to_string()],
5305 language_server: Some(language_server_config),
5306 ..Default::default()
5307 },
5308 Some(tree_sitter_rust::language()),
5309 ));
5310
5311 let fs = FakeFs::new(cx.background());
5312 fs.insert_tree(
5313 "/dir",
5314 json!({
5315 "a.rs": "const fn a() { A }",
5316 "b.rs": "const y: i32 = crate::a()",
5317 }),
5318 )
5319 .await;
5320
5321 let project = Project::test(fs, cx);
5322 project.update(cx, |project, _| {
5323 Arc::get_mut(&mut project.languages).unwrap().add(language);
5324 });
5325
5326 let (tree, _) = project
5327 .update(cx, |project, cx| {
5328 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5329 })
5330 .await
5331 .unwrap();
5332 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5333 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5334 .await;
5335
5336 let buffer = project
5337 .update(cx, |project, cx| {
5338 project.open_buffer(
5339 ProjectPath {
5340 worktree_id,
5341 path: Path::new("").into(),
5342 },
5343 cx,
5344 )
5345 })
5346 .await
5347 .unwrap();
5348
5349 let mut fake_server = fake_servers.next().await.unwrap();
5350 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5351 let params = params.text_document_position_params;
5352 assert_eq!(
5353 params.text_document.uri.to_file_path().unwrap(),
5354 Path::new("/dir/b.rs"),
5355 );
5356 assert_eq!(params.position, lsp::Position::new(0, 22));
5357
5358 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5359 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5360 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5361 )))
5362 });
5363
5364 let mut definitions = project
5365 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5366 .await
5367 .unwrap();
5368
5369 assert_eq!(definitions.len(), 1);
5370 let definition = definitions.pop().unwrap();
5371 cx.update(|cx| {
5372 let target_buffer = definition.buffer.read(cx);
5373 assert_eq!(
5374 target_buffer
5375 .file()
5376 .unwrap()
5377 .as_local()
5378 .unwrap()
5379 .abs_path(cx),
5380 Path::new("/dir/a.rs"),
5381 );
5382 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5383 assert_eq!(
5384 list_worktrees(&project, cx),
5385 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5386 );
5387
5388 drop(definition);
5389 });
5390 cx.read(|cx| {
5391 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5392 });
5393
5394 fn list_worktrees<'a>(
5395 project: &'a ModelHandle<Project>,
5396 cx: &'a AppContext,
5397 ) -> Vec<(&'a Path, bool)> {
5398 project
5399 .read(cx)
5400 .worktrees(cx)
5401 .map(|worktree| {
5402 let worktree = worktree.read(cx);
5403 (
5404 worktree.as_local().unwrap().abs_path().as_ref(),
5405 worktree.is_visible(),
5406 )
5407 })
5408 .collect::<Vec<_>>()
5409 }
5410 }
5411
5412 #[gpui::test]
5413 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5414 let fs = FakeFs::new(cx.background());
5415 fs.insert_tree(
5416 "/dir",
5417 json!({
5418 "file1": "the old contents",
5419 }),
5420 )
5421 .await;
5422
5423 let project = Project::test(fs.clone(), cx);
5424 let worktree_id = project
5425 .update(cx, |p, cx| {
5426 p.find_or_create_local_worktree("/dir", true, cx)
5427 })
5428 .await
5429 .unwrap()
5430 .0
5431 .read_with(cx, |tree, _| tree.id());
5432
5433 let buffer = project
5434 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5435 .await
5436 .unwrap();
5437 buffer
5438 .update(cx, |buffer, cx| {
5439 assert_eq!(buffer.text(), "the old contents");
5440 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5441 buffer.save(cx)
5442 })
5443 .await
5444 .unwrap();
5445
5446 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5447 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5448 }
5449
5450 #[gpui::test]
5451 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5452 let fs = FakeFs::new(cx.background());
5453 fs.insert_tree(
5454 "/dir",
5455 json!({
5456 "file1": "the old contents",
5457 }),
5458 )
5459 .await;
5460
5461 let project = Project::test(fs.clone(), cx);
5462 let worktree_id = project
5463 .update(cx, |p, cx| {
5464 p.find_or_create_local_worktree("/dir/file1", true, cx)
5465 })
5466 .await
5467 .unwrap()
5468 .0
5469 .read_with(cx, |tree, _| tree.id());
5470
5471 let buffer = project
5472 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5473 .await
5474 .unwrap();
5475 buffer
5476 .update(cx, |buffer, cx| {
5477 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5478 buffer.save(cx)
5479 })
5480 .await
5481 .unwrap();
5482
5483 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5484 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5485 }
5486
5487 #[gpui::test]
5488 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5489 let fs = FakeFs::new(cx.background());
5490 fs.insert_tree("/dir", json!({})).await;
5491
5492 let project = Project::test(fs.clone(), cx);
5493 let (worktree, _) = project
5494 .update(cx, |project, cx| {
5495 project.find_or_create_local_worktree("/dir", true, cx)
5496 })
5497 .await
5498 .unwrap();
5499 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5500
5501 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5502 buffer.update(cx, |buffer, cx| {
5503 buffer.edit([0..0], "abc", cx);
5504 assert!(buffer.is_dirty());
5505 assert!(!buffer.has_conflict());
5506 });
5507 project
5508 .update(cx, |project, cx| {
5509 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5510 })
5511 .await
5512 .unwrap();
5513 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5514 buffer.read_with(cx, |buffer, cx| {
5515 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5516 assert!(!buffer.is_dirty());
5517 assert!(!buffer.has_conflict());
5518 });
5519
5520 let opened_buffer = project
5521 .update(cx, |project, cx| {
5522 project.open_buffer((worktree_id, "file1"), cx)
5523 })
5524 .await
5525 .unwrap();
5526 assert_eq!(opened_buffer, buffer);
5527 }
5528
5529 #[gpui::test(retries = 5)]
5530 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5531 let dir = temp_tree(json!({
5532 "a": {
5533 "file1": "",
5534 "file2": "",
5535 "file3": "",
5536 },
5537 "b": {
5538 "c": {
5539 "file4": "",
5540 "file5": "",
5541 }
5542 }
5543 }));
5544
5545 let project = Project::test(Arc::new(RealFs), cx);
5546 let rpc = project.read_with(cx, |p, _| p.client.clone());
5547
5548 let (tree, _) = project
5549 .update(cx, |p, cx| {
5550 p.find_or_create_local_worktree(dir.path(), true, cx)
5551 })
5552 .await
5553 .unwrap();
5554 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5555
5556 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5557 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5558 async move { buffer.await.unwrap() }
5559 };
5560 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5561 tree.read_with(cx, |tree, _| {
5562 tree.entry_for_path(path)
5563 .expect(&format!("no entry for path {}", path))
5564 .id
5565 })
5566 };
5567
5568 let buffer2 = buffer_for_path("a/file2", cx).await;
5569 let buffer3 = buffer_for_path("a/file3", cx).await;
5570 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5571 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5572
5573 let file2_id = id_for_path("a/file2", &cx);
5574 let file3_id = id_for_path("a/file3", &cx);
5575 let file4_id = id_for_path("b/c/file4", &cx);
5576
5577 // Wait for the initial scan.
5578 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5579 .await;
5580
5581 // Create a remote copy of this worktree.
5582 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5583 let (remote, load_task) = cx.update(|cx| {
5584 Worktree::remote(
5585 1,
5586 1,
5587 initial_snapshot.to_proto(&Default::default(), true),
5588 rpc.clone(),
5589 cx,
5590 )
5591 });
5592 load_task.await;
5593
5594 cx.read(|cx| {
5595 assert!(!buffer2.read(cx).is_dirty());
5596 assert!(!buffer3.read(cx).is_dirty());
5597 assert!(!buffer4.read(cx).is_dirty());
5598 assert!(!buffer5.read(cx).is_dirty());
5599 });
5600
5601 // Rename and delete files and directories.
5602 tree.flush_fs_events(&cx).await;
5603 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5604 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5605 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5606 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5607 tree.flush_fs_events(&cx).await;
5608
5609 let expected_paths = vec![
5610 "a",
5611 "a/file1",
5612 "a/file2.new",
5613 "b",
5614 "d",
5615 "d/file3",
5616 "d/file4",
5617 ];
5618
5619 cx.read(|app| {
5620 assert_eq!(
5621 tree.read(app)
5622 .paths()
5623 .map(|p| p.to_str().unwrap())
5624 .collect::<Vec<_>>(),
5625 expected_paths
5626 );
5627
5628 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
5629 assert_eq!(id_for_path("d/file3", &cx), file3_id);
5630 assert_eq!(id_for_path("d/file4", &cx), file4_id);
5631
5632 assert_eq!(
5633 buffer2.read(app).file().unwrap().path().as_ref(),
5634 Path::new("a/file2.new")
5635 );
5636 assert_eq!(
5637 buffer3.read(app).file().unwrap().path().as_ref(),
5638 Path::new("d/file3")
5639 );
5640 assert_eq!(
5641 buffer4.read(app).file().unwrap().path().as_ref(),
5642 Path::new("d/file4")
5643 );
5644 assert_eq!(
5645 buffer5.read(app).file().unwrap().path().as_ref(),
5646 Path::new("b/c/file5")
5647 );
5648
5649 assert!(!buffer2.read(app).file().unwrap().is_deleted());
5650 assert!(!buffer3.read(app).file().unwrap().is_deleted());
5651 assert!(!buffer4.read(app).file().unwrap().is_deleted());
5652 assert!(buffer5.read(app).file().unwrap().is_deleted());
5653 });
5654
5655 // Update the remote worktree. Check that it becomes consistent with the
5656 // local worktree.
5657 remote.update(cx, |remote, cx| {
5658 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
5659 &initial_snapshot,
5660 1,
5661 1,
5662 true,
5663 );
5664 remote
5665 .as_remote_mut()
5666 .unwrap()
5667 .snapshot
5668 .apply_remote_update(update_message)
5669 .unwrap();
5670
5671 assert_eq!(
5672 remote
5673 .paths()
5674 .map(|p| p.to_str().unwrap())
5675 .collect::<Vec<_>>(),
5676 expected_paths
5677 );
5678 });
5679 }
5680
5681 #[gpui::test]
5682 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5683 let fs = FakeFs::new(cx.background());
5684 fs.insert_tree(
5685 "/the-dir",
5686 json!({
5687 "a.txt": "a-contents",
5688 "b.txt": "b-contents",
5689 }),
5690 )
5691 .await;
5692
5693 let project = Project::test(fs.clone(), cx);
5694 let worktree_id = project
5695 .update(cx, |p, cx| {
5696 p.find_or_create_local_worktree("/the-dir", true, cx)
5697 })
5698 .await
5699 .unwrap()
5700 .0
5701 .read_with(cx, |tree, _| tree.id());
5702
5703 // Spawn multiple tasks to open paths, repeating some paths.
5704 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5705 (
5706 p.open_buffer((worktree_id, "a.txt"), cx),
5707 p.open_buffer((worktree_id, "b.txt"), cx),
5708 p.open_buffer((worktree_id, "a.txt"), cx),
5709 )
5710 });
5711
5712 let buffer_a_1 = buffer_a_1.await.unwrap();
5713 let buffer_a_2 = buffer_a_2.await.unwrap();
5714 let buffer_b = buffer_b.await.unwrap();
5715 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
5716 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
5717
5718 // There is only one buffer per path.
5719 let buffer_a_id = buffer_a_1.id();
5720 assert_eq!(buffer_a_2.id(), buffer_a_id);
5721
5722 // Open the same path again while it is still open.
5723 drop(buffer_a_1);
5724 let buffer_a_3 = project
5725 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
5726 .await
5727 .unwrap();
5728
5729 // There's still only one buffer per path.
5730 assert_eq!(buffer_a_3.id(), buffer_a_id);
5731 }
5732
5733 #[gpui::test]
5734 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5735 use std::fs;
5736
5737 let dir = temp_tree(json!({
5738 "file1": "abc",
5739 "file2": "def",
5740 "file3": "ghi",
5741 }));
5742
5743 let project = Project::test(Arc::new(RealFs), cx);
5744 let (worktree, _) = project
5745 .update(cx, |p, cx| {
5746 p.find_or_create_local_worktree(dir.path(), true, cx)
5747 })
5748 .await
5749 .unwrap();
5750 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5751
5752 worktree.flush_fs_events(&cx).await;
5753 worktree
5754 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
5755 .await;
5756
5757 let buffer1 = project
5758 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5759 .await
5760 .unwrap();
5761 let events = Rc::new(RefCell::new(Vec::new()));
5762
5763 // initially, the buffer isn't dirty.
5764 buffer1.update(cx, |buffer, cx| {
5765 cx.subscribe(&buffer1, {
5766 let events = events.clone();
5767 move |_, _, event, _| match event {
5768 BufferEvent::Operation(_) => {}
5769 _ => events.borrow_mut().push(event.clone()),
5770 }
5771 })
5772 .detach();
5773
5774 assert!(!buffer.is_dirty());
5775 assert!(events.borrow().is_empty());
5776
5777 buffer.edit(vec![1..2], "", cx);
5778 });
5779
5780 // after the first edit, the buffer is dirty, and emits a dirtied event.
5781 buffer1.update(cx, |buffer, cx| {
5782 assert!(buffer.text() == "ac");
5783 assert!(buffer.is_dirty());
5784 assert_eq!(
5785 *events.borrow(),
5786 &[language::Event::Edited, language::Event::Dirtied]
5787 );
5788 events.borrow_mut().clear();
5789 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
5790 });
5791
5792 // after saving, the buffer is not dirty, and emits a saved event.
5793 buffer1.update(cx, |buffer, cx| {
5794 assert!(!buffer.is_dirty());
5795 assert_eq!(*events.borrow(), &[language::Event::Saved]);
5796 events.borrow_mut().clear();
5797
5798 buffer.edit(vec![1..1], "B", cx);
5799 buffer.edit(vec![2..2], "D", cx);
5800 });
5801
5802 // after editing again, the buffer is dirty, and emits another dirty event.
5803 buffer1.update(cx, |buffer, cx| {
5804 assert!(buffer.text() == "aBDc");
5805 assert!(buffer.is_dirty());
5806 assert_eq!(
5807 *events.borrow(),
5808 &[
5809 language::Event::Edited,
5810 language::Event::Dirtied,
5811 language::Event::Edited,
5812 ],
5813 );
5814 events.borrow_mut().clear();
5815
5816 // TODO - currently, after restoring the buffer to its
5817 // previously-saved state, the is still considered dirty.
5818 buffer.edit([1..3], "", cx);
5819 assert!(buffer.text() == "ac");
5820 assert!(buffer.is_dirty());
5821 });
5822
5823 assert_eq!(*events.borrow(), &[language::Event::Edited]);
5824
5825 // When a file is deleted, the buffer is considered dirty.
5826 let events = Rc::new(RefCell::new(Vec::new()));
5827 let buffer2 = project
5828 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
5829 .await
5830 .unwrap();
5831 buffer2.update(cx, |_, cx| {
5832 cx.subscribe(&buffer2, {
5833 let events = events.clone();
5834 move |_, _, event, _| events.borrow_mut().push(event.clone())
5835 })
5836 .detach();
5837 });
5838
5839 fs::remove_file(dir.path().join("file2")).unwrap();
5840 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
5841 assert_eq!(
5842 *events.borrow(),
5843 &[language::Event::Dirtied, language::Event::FileHandleChanged]
5844 );
5845
5846 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5847 let events = Rc::new(RefCell::new(Vec::new()));
5848 let buffer3 = project
5849 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
5850 .await
5851 .unwrap();
5852 buffer3.update(cx, |_, cx| {
5853 cx.subscribe(&buffer3, {
5854 let events = events.clone();
5855 move |_, _, event, _| events.borrow_mut().push(event.clone())
5856 })
5857 .detach();
5858 });
5859
5860 worktree.flush_fs_events(&cx).await;
5861 buffer3.update(cx, |buffer, cx| {
5862 buffer.edit(Some(0..0), "x", cx);
5863 });
5864 events.borrow_mut().clear();
5865 fs::remove_file(dir.path().join("file3")).unwrap();
5866 buffer3
5867 .condition(&cx, |_, _| !events.borrow().is_empty())
5868 .await;
5869 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
5870 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
5871 }
5872
5873 #[gpui::test]
5874 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5875 use std::fs;
5876
5877 let initial_contents = "aaa\nbbbbb\nc\n";
5878 let dir = temp_tree(json!({ "the-file": initial_contents }));
5879
5880 let project = Project::test(Arc::new(RealFs), cx);
5881 let (worktree, _) = project
5882 .update(cx, |p, cx| {
5883 p.find_or_create_local_worktree(dir.path(), true, cx)
5884 })
5885 .await
5886 .unwrap();
5887 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
5888
5889 worktree
5890 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
5891 .await;
5892
5893 let abs_path = dir.path().join("the-file");
5894 let buffer = project
5895 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
5896 .await
5897 .unwrap();
5898
5899 // TODO
5900 // Add a cursor on each row.
5901 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
5902 // assert!(!buffer.is_dirty());
5903 // buffer.add_selection_set(
5904 // &(0..3)
5905 // .map(|row| Selection {
5906 // id: row as usize,
5907 // start: Point::new(row, 1),
5908 // end: Point::new(row, 1),
5909 // reversed: false,
5910 // goal: SelectionGoal::None,
5911 // })
5912 // .collect::<Vec<_>>(),
5913 // cx,
5914 // )
5915 // });
5916
5917 // Change the file on disk, adding two new lines of text, and removing
5918 // one line.
5919 buffer.read_with(cx, |buffer, _| {
5920 assert!(!buffer.is_dirty());
5921 assert!(!buffer.has_conflict());
5922 });
5923 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
5924 fs::write(&abs_path, new_contents).unwrap();
5925
5926 // Because the buffer was not modified, it is reloaded from disk. Its
5927 // contents are edited according to the diff between the old and new
5928 // file contents.
5929 buffer
5930 .condition(&cx, |buffer, _| buffer.text() == new_contents)
5931 .await;
5932
5933 buffer.update(cx, |buffer, _| {
5934 assert_eq!(buffer.text(), new_contents);
5935 assert!(!buffer.is_dirty());
5936 assert!(!buffer.has_conflict());
5937
5938 // TODO
5939 // let cursor_positions = buffer
5940 // .selection_set(selection_set_id)
5941 // .unwrap()
5942 // .selections::<Point>(&*buffer)
5943 // .map(|selection| {
5944 // assert_eq!(selection.start, selection.end);
5945 // selection.start
5946 // })
5947 // .collect::<Vec<_>>();
5948 // assert_eq!(
5949 // cursor_positions,
5950 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
5951 // );
5952 });
5953
5954 // Modify the buffer
5955 buffer.update(cx, |buffer, cx| {
5956 buffer.edit(vec![0..0], " ", cx);
5957 assert!(buffer.is_dirty());
5958 assert!(!buffer.has_conflict());
5959 });
5960
5961 // Change the file on disk again, adding blank lines to the beginning.
5962 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
5963
5964 // Because the buffer is modified, it doesn't reload from disk, but is
5965 // marked as having a conflict.
5966 buffer
5967 .condition(&cx, |buffer, _| buffer.has_conflict())
5968 .await;
5969 }
5970
5971 #[gpui::test]
5972 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5973 cx.foreground().forbid_parking();
5974
5975 let fs = FakeFs::new(cx.background());
5976 fs.insert_tree(
5977 "/the-dir",
5978 json!({
5979 "a.rs": "
5980 fn foo(mut v: Vec<usize>) {
5981 for x in &v {
5982 v.push(1);
5983 }
5984 }
5985 "
5986 .unindent(),
5987 }),
5988 )
5989 .await;
5990
5991 let project = Project::test(fs.clone(), cx);
5992 let (worktree, _) = project
5993 .update(cx, |p, cx| {
5994 p.find_or_create_local_worktree("/the-dir", true, cx)
5995 })
5996 .await
5997 .unwrap();
5998 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
5999
6000 let buffer = project
6001 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6002 .await
6003 .unwrap();
6004
6005 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6006 let message = lsp::PublishDiagnosticsParams {
6007 uri: buffer_uri.clone(),
6008 diagnostics: vec![
6009 lsp::Diagnostic {
6010 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6011 severity: Some(DiagnosticSeverity::WARNING),
6012 message: "error 1".to_string(),
6013 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6014 location: lsp::Location {
6015 uri: buffer_uri.clone(),
6016 range: lsp::Range::new(
6017 lsp::Position::new(1, 8),
6018 lsp::Position::new(1, 9),
6019 ),
6020 },
6021 message: "error 1 hint 1".to_string(),
6022 }]),
6023 ..Default::default()
6024 },
6025 lsp::Diagnostic {
6026 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6027 severity: Some(DiagnosticSeverity::HINT),
6028 message: "error 1 hint 1".to_string(),
6029 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6030 location: lsp::Location {
6031 uri: buffer_uri.clone(),
6032 range: lsp::Range::new(
6033 lsp::Position::new(1, 8),
6034 lsp::Position::new(1, 9),
6035 ),
6036 },
6037 message: "original diagnostic".to_string(),
6038 }]),
6039 ..Default::default()
6040 },
6041 lsp::Diagnostic {
6042 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6043 severity: Some(DiagnosticSeverity::ERROR),
6044 message: "error 2".to_string(),
6045 related_information: Some(vec![
6046 lsp::DiagnosticRelatedInformation {
6047 location: lsp::Location {
6048 uri: buffer_uri.clone(),
6049 range: lsp::Range::new(
6050 lsp::Position::new(1, 13),
6051 lsp::Position::new(1, 15),
6052 ),
6053 },
6054 message: "error 2 hint 1".to_string(),
6055 },
6056 lsp::DiagnosticRelatedInformation {
6057 location: lsp::Location {
6058 uri: buffer_uri.clone(),
6059 range: lsp::Range::new(
6060 lsp::Position::new(1, 13),
6061 lsp::Position::new(1, 15),
6062 ),
6063 },
6064 message: "error 2 hint 2".to_string(),
6065 },
6066 ]),
6067 ..Default::default()
6068 },
6069 lsp::Diagnostic {
6070 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6071 severity: Some(DiagnosticSeverity::HINT),
6072 message: "error 2 hint 1".to_string(),
6073 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6074 location: lsp::Location {
6075 uri: buffer_uri.clone(),
6076 range: lsp::Range::new(
6077 lsp::Position::new(2, 8),
6078 lsp::Position::new(2, 17),
6079 ),
6080 },
6081 message: "original diagnostic".to_string(),
6082 }]),
6083 ..Default::default()
6084 },
6085 lsp::Diagnostic {
6086 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6087 severity: Some(DiagnosticSeverity::HINT),
6088 message: "error 2 hint 2".to_string(),
6089 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6090 location: lsp::Location {
6091 uri: buffer_uri.clone(),
6092 range: lsp::Range::new(
6093 lsp::Position::new(2, 8),
6094 lsp::Position::new(2, 17),
6095 ),
6096 },
6097 message: "original diagnostic".to_string(),
6098 }]),
6099 ..Default::default()
6100 },
6101 ],
6102 version: None,
6103 };
6104
6105 project
6106 .update(cx, |p, cx| {
6107 p.update_diagnostics(message, &Default::default(), cx)
6108 })
6109 .unwrap();
6110 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6111
6112 assert_eq!(
6113 buffer
6114 .diagnostics_in_range::<_, Point>(0..buffer.len())
6115 .collect::<Vec<_>>(),
6116 &[
6117 DiagnosticEntry {
6118 range: Point::new(1, 8)..Point::new(1, 9),
6119 diagnostic: Diagnostic {
6120 severity: DiagnosticSeverity::WARNING,
6121 message: "error 1".to_string(),
6122 group_id: 0,
6123 is_primary: true,
6124 ..Default::default()
6125 }
6126 },
6127 DiagnosticEntry {
6128 range: Point::new(1, 8)..Point::new(1, 9),
6129 diagnostic: Diagnostic {
6130 severity: DiagnosticSeverity::HINT,
6131 message: "error 1 hint 1".to_string(),
6132 group_id: 0,
6133 is_primary: false,
6134 ..Default::default()
6135 }
6136 },
6137 DiagnosticEntry {
6138 range: Point::new(1, 13)..Point::new(1, 15),
6139 diagnostic: Diagnostic {
6140 severity: DiagnosticSeverity::HINT,
6141 message: "error 2 hint 1".to_string(),
6142 group_id: 1,
6143 is_primary: false,
6144 ..Default::default()
6145 }
6146 },
6147 DiagnosticEntry {
6148 range: Point::new(1, 13)..Point::new(1, 15),
6149 diagnostic: Diagnostic {
6150 severity: DiagnosticSeverity::HINT,
6151 message: "error 2 hint 2".to_string(),
6152 group_id: 1,
6153 is_primary: false,
6154 ..Default::default()
6155 }
6156 },
6157 DiagnosticEntry {
6158 range: Point::new(2, 8)..Point::new(2, 17),
6159 diagnostic: Diagnostic {
6160 severity: DiagnosticSeverity::ERROR,
6161 message: "error 2".to_string(),
6162 group_id: 1,
6163 is_primary: true,
6164 ..Default::default()
6165 }
6166 }
6167 ]
6168 );
6169
6170 assert_eq!(
6171 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6172 &[
6173 DiagnosticEntry {
6174 range: Point::new(1, 8)..Point::new(1, 9),
6175 diagnostic: Diagnostic {
6176 severity: DiagnosticSeverity::WARNING,
6177 message: "error 1".to_string(),
6178 group_id: 0,
6179 is_primary: true,
6180 ..Default::default()
6181 }
6182 },
6183 DiagnosticEntry {
6184 range: Point::new(1, 8)..Point::new(1, 9),
6185 diagnostic: Diagnostic {
6186 severity: DiagnosticSeverity::HINT,
6187 message: "error 1 hint 1".to_string(),
6188 group_id: 0,
6189 is_primary: false,
6190 ..Default::default()
6191 }
6192 },
6193 ]
6194 );
6195 assert_eq!(
6196 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6197 &[
6198 DiagnosticEntry {
6199 range: Point::new(1, 13)..Point::new(1, 15),
6200 diagnostic: Diagnostic {
6201 severity: DiagnosticSeverity::HINT,
6202 message: "error 2 hint 1".to_string(),
6203 group_id: 1,
6204 is_primary: false,
6205 ..Default::default()
6206 }
6207 },
6208 DiagnosticEntry {
6209 range: Point::new(1, 13)..Point::new(1, 15),
6210 diagnostic: Diagnostic {
6211 severity: DiagnosticSeverity::HINT,
6212 message: "error 2 hint 2".to_string(),
6213 group_id: 1,
6214 is_primary: false,
6215 ..Default::default()
6216 }
6217 },
6218 DiagnosticEntry {
6219 range: Point::new(2, 8)..Point::new(2, 17),
6220 diagnostic: Diagnostic {
6221 severity: DiagnosticSeverity::ERROR,
6222 message: "error 2".to_string(),
6223 group_id: 1,
6224 is_primary: true,
6225 ..Default::default()
6226 }
6227 }
6228 ]
6229 );
6230 }
6231
6232 #[gpui::test]
6233 async fn test_rename(cx: &mut gpui::TestAppContext) {
6234 cx.foreground().forbid_parking();
6235
6236 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6237 let language = Arc::new(Language::new(
6238 LanguageConfig {
6239 name: "Rust".into(),
6240 path_suffixes: vec!["rs".to_string()],
6241 language_server: Some(language_server_config),
6242 ..Default::default()
6243 },
6244 Some(tree_sitter_rust::language()),
6245 ));
6246
6247 let fs = FakeFs::new(cx.background());
6248 fs.insert_tree(
6249 "/dir",
6250 json!({
6251 "one.rs": "const ONE: usize = 1;",
6252 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6253 }),
6254 )
6255 .await;
6256
6257 let project = Project::test(fs.clone(), cx);
6258 project.update(cx, |project, _| {
6259 Arc::get_mut(&mut project.languages).unwrap().add(language);
6260 });
6261
6262 let (tree, _) = project
6263 .update(cx, |project, cx| {
6264 project.find_or_create_local_worktree("/dir", true, cx)
6265 })
6266 .await
6267 .unwrap();
6268 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6269 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6270 .await;
6271
6272 let buffer = project
6273 .update(cx, |project, cx| {
6274 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6275 })
6276 .await
6277 .unwrap();
6278
6279 let mut fake_server = fake_servers.next().await.unwrap();
6280
6281 let response = project.update(cx, |project, cx| {
6282 project.prepare_rename(buffer.clone(), 7, cx)
6283 });
6284 fake_server
6285 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6286 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6287 assert_eq!(params.position, lsp::Position::new(0, 7));
6288 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6289 lsp::Position::new(0, 6),
6290 lsp::Position::new(0, 9),
6291 )))
6292 })
6293 .next()
6294 .await
6295 .unwrap();
6296 let range = response.await.unwrap().unwrap();
6297 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6298 assert_eq!(range, 6..9);
6299
6300 let response = project.update(cx, |project, cx| {
6301 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6302 });
6303 fake_server
6304 .handle_request::<lsp::request::Rename, _>(|params, _| {
6305 assert_eq!(
6306 params.text_document_position.text_document.uri.as_str(),
6307 "file:///dir/one.rs"
6308 );
6309 assert_eq!(
6310 params.text_document_position.position,
6311 lsp::Position::new(0, 7)
6312 );
6313 assert_eq!(params.new_name, "THREE");
6314 Some(lsp::WorkspaceEdit {
6315 changes: Some(
6316 [
6317 (
6318 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6319 vec![lsp::TextEdit::new(
6320 lsp::Range::new(
6321 lsp::Position::new(0, 6),
6322 lsp::Position::new(0, 9),
6323 ),
6324 "THREE".to_string(),
6325 )],
6326 ),
6327 (
6328 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6329 vec![
6330 lsp::TextEdit::new(
6331 lsp::Range::new(
6332 lsp::Position::new(0, 24),
6333 lsp::Position::new(0, 27),
6334 ),
6335 "THREE".to_string(),
6336 ),
6337 lsp::TextEdit::new(
6338 lsp::Range::new(
6339 lsp::Position::new(0, 35),
6340 lsp::Position::new(0, 38),
6341 ),
6342 "THREE".to_string(),
6343 ),
6344 ],
6345 ),
6346 ]
6347 .into_iter()
6348 .collect(),
6349 ),
6350 ..Default::default()
6351 })
6352 })
6353 .next()
6354 .await
6355 .unwrap();
6356 let mut transaction = response.await.unwrap().0;
6357 assert_eq!(transaction.len(), 2);
6358 assert_eq!(
6359 transaction
6360 .remove_entry(&buffer)
6361 .unwrap()
6362 .0
6363 .read_with(cx, |buffer, _| buffer.text()),
6364 "const THREE: usize = 1;"
6365 );
6366 assert_eq!(
6367 transaction
6368 .into_keys()
6369 .next()
6370 .unwrap()
6371 .read_with(cx, |buffer, _| buffer.text()),
6372 "const TWO: usize = one::THREE + one::THREE;"
6373 );
6374 }
6375
6376 #[gpui::test]
6377 async fn test_search(cx: &mut gpui::TestAppContext) {
6378 let fs = FakeFs::new(cx.background());
6379 fs.insert_tree(
6380 "/dir",
6381 json!({
6382 "one.rs": "const ONE: usize = 1;",
6383 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6384 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6385 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6386 }),
6387 )
6388 .await;
6389 let project = Project::test(fs.clone(), cx);
6390 let (tree, _) = project
6391 .update(cx, |project, cx| {
6392 project.find_or_create_local_worktree("/dir", true, cx)
6393 })
6394 .await
6395 .unwrap();
6396 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6397 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6398 .await;
6399
6400 assert_eq!(
6401 search(&project, SearchQuery::text("TWO", false, true), cx)
6402 .await
6403 .unwrap(),
6404 HashMap::from_iter([
6405 ("two.rs".to_string(), vec![6..9]),
6406 ("three.rs".to_string(), vec![37..40])
6407 ])
6408 );
6409
6410 let buffer_4 = project
6411 .update(cx, |project, cx| {
6412 project.open_buffer((worktree_id, "four.rs"), cx)
6413 })
6414 .await
6415 .unwrap();
6416 buffer_4.update(cx, |buffer, cx| {
6417 buffer.edit([20..28, 31..43], "two::TWO", cx);
6418 });
6419
6420 assert_eq!(
6421 search(&project, SearchQuery::text("TWO", false, true), cx)
6422 .await
6423 .unwrap(),
6424 HashMap::from_iter([
6425 ("two.rs".to_string(), vec![6..9]),
6426 ("three.rs".to_string(), vec![37..40]),
6427 ("four.rs".to_string(), vec![25..28, 36..39])
6428 ])
6429 );
6430
6431 async fn search(
6432 project: &ModelHandle<Project>,
6433 query: SearchQuery,
6434 cx: &mut gpui::TestAppContext,
6435 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6436 let results = project
6437 .update(cx, |project, cx| project.search(query, cx))
6438 .await?;
6439
6440 Ok(results
6441 .into_iter()
6442 .map(|(buffer, ranges)| {
6443 buffer.read_with(cx, |buffer, _| {
6444 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6445 let ranges = ranges
6446 .into_iter()
6447 .map(|range| range.to_offset(buffer))
6448 .collect::<Vec<_>>();
6449 (path, ranges)
6450 })
6451 })
6452 .collect())
6453 }
6454 }
6455}