1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub struct Project {
53 worktrees: Vec<WorktreeHandle>,
54 active_entry: Option<ProjectEntryId>,
55 languages: Arc<LanguageRegistry>,
56 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
57 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
58 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
59 language_server_settings: Arc<Mutex<serde_json::Value>>,
60 next_language_server_id: usize,
61 client: Arc<client::Client>,
62 next_entry_id: Arc<AtomicUsize>,
63 user_store: ModelHandle<UserStore>,
64 fs: Arc<dyn Fs>,
65 client_state: ProjectClientState,
66 collaborators: HashMap<PeerId, Collaborator>,
67 subscriptions: Vec<client::Subscription>,
68 language_servers_with_diagnostics_running: isize,
69 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
70 shared_buffers: HashMap<PeerId, HashSet<u64>>,
71 loading_buffers: HashMap<
72 ProjectPath,
73 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
74 >,
75 loading_local_worktrees:
76 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
77 opened_buffers: HashMap<u64, OpenBuffer>,
78 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
79 nonce: u128,
80}
81
82enum OpenBuffer {
83 Strong(ModelHandle<Buffer>),
84 Weak(WeakModelHandle<Buffer>),
85 Loading(Vec<Operation>),
86}
87
88enum WorktreeHandle {
89 Strong(ModelHandle<Worktree>),
90 Weak(WeakModelHandle<Worktree>),
91}
92
93enum ProjectClientState {
94 Local {
95 is_shared: bool,
96 remote_id_tx: watch::Sender<Option<u64>>,
97 remote_id_rx: watch::Receiver<Option<u64>>,
98 _maintain_remote_id_task: Task<Option<()>>,
99 },
100 Remote {
101 sharing_has_stopped: bool,
102 remote_id: u64,
103 replica_id: ReplicaId,
104 _detect_unshare_task: Task<Option<()>>,
105 },
106}
107
108#[derive(Clone, Debug)]
109pub struct Collaborator {
110 pub user: Arc<User>,
111 pub peer_id: PeerId,
112 pub replica_id: ReplicaId,
113}
114
115#[derive(Clone, Debug, PartialEq)]
116pub enum Event {
117 ActiveEntryChanged(Option<ProjectEntryId>),
118 WorktreeRemoved(WorktreeId),
119 DiskBasedDiagnosticsStarted,
120 DiskBasedDiagnosticsUpdated,
121 DiskBasedDiagnosticsFinished,
122 DiagnosticsUpdated(ProjectPath),
123}
124
125enum LanguageServerEvent {
126 WorkStart {
127 token: String,
128 },
129 WorkProgress {
130 token: String,
131 progress: LanguageServerProgress,
132 },
133 WorkEnd {
134 token: String,
135 },
136 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
137}
138
139pub struct LanguageServerStatus {
140 pub name: String,
141 pub pending_work: BTreeMap<String, LanguageServerProgress>,
142 pending_diagnostic_updates: isize,
143}
144
145#[derive(Clone, Debug)]
146pub struct LanguageServerProgress {
147 pub message: Option<String>,
148 pub percentage: Option<usize>,
149 pub last_update_at: Instant,
150}
151
152#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
153pub struct ProjectPath {
154 pub worktree_id: WorktreeId,
155 pub path: Arc<Path>,
156}
157
158#[derive(Clone, Debug, Default, PartialEq)]
159pub struct DiagnosticSummary {
160 pub error_count: usize,
161 pub warning_count: usize,
162 pub info_count: usize,
163 pub hint_count: usize,
164}
165
166#[derive(Debug)]
167pub struct Location {
168 pub buffer: ModelHandle<Buffer>,
169 pub range: Range<language::Anchor>,
170}
171
172#[derive(Debug)]
173pub struct DocumentHighlight {
174 pub range: Range<language::Anchor>,
175 pub kind: DocumentHighlightKind,
176}
177
178#[derive(Clone, Debug)]
179pub struct Symbol {
180 pub source_worktree_id: WorktreeId,
181 pub worktree_id: WorktreeId,
182 pub language_name: String,
183 pub path: PathBuf,
184 pub label: CodeLabel,
185 pub name: String,
186 pub kind: lsp::SymbolKind,
187 pub range: Range<PointUtf16>,
188 pub signature: [u8; 32],
189}
190
191#[derive(Default)]
192pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
193
194impl DiagnosticSummary {
195 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
196 let mut this = Self {
197 error_count: 0,
198 warning_count: 0,
199 info_count: 0,
200 hint_count: 0,
201 };
202
203 for entry in diagnostics {
204 if entry.diagnostic.is_primary {
205 match entry.diagnostic.severity {
206 DiagnosticSeverity::ERROR => this.error_count += 1,
207 DiagnosticSeverity::WARNING => this.warning_count += 1,
208 DiagnosticSeverity::INFORMATION => this.info_count += 1,
209 DiagnosticSeverity::HINT => this.hint_count += 1,
210 _ => {}
211 }
212 }
213 }
214
215 this
216 }
217
218 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
219 proto::DiagnosticSummary {
220 path: path.to_string_lossy().to_string(),
221 error_count: self.error_count as u32,
222 warning_count: self.warning_count as u32,
223 info_count: self.info_count as u32,
224 hint_count: self.hint_count as u32,
225 }
226 }
227}
228
229#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
230pub struct ProjectEntryId(usize);
231
232impl ProjectEntryId {
233 pub fn new(counter: &AtomicUsize) -> Self {
234 Self(counter.fetch_add(1, SeqCst))
235 }
236
237 pub fn from_proto(id: u64) -> Self {
238 Self(id as usize)
239 }
240
241 pub fn to_proto(&self) -> u64 {
242 self.0 as u64
243 }
244
245 pub fn to_usize(&self) -> usize {
246 self.0
247 }
248}
249
250impl Project {
251 pub fn init(client: &Arc<Client>) {
252 client.add_entity_message_handler(Self::handle_add_collaborator);
253 client.add_entity_message_handler(Self::handle_buffer_reloaded);
254 client.add_entity_message_handler(Self::handle_buffer_saved);
255 client.add_entity_message_handler(Self::handle_start_language_server);
256 client.add_entity_message_handler(Self::handle_update_language_server);
257 client.add_entity_message_handler(Self::handle_remove_collaborator);
258 client.add_entity_message_handler(Self::handle_register_worktree);
259 client.add_entity_message_handler(Self::handle_unregister_worktree);
260 client.add_entity_message_handler(Self::handle_unshare_project);
261 client.add_entity_message_handler(Self::handle_update_buffer_file);
262 client.add_entity_message_handler(Self::handle_update_buffer);
263 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
264 client.add_entity_message_handler(Self::handle_update_worktree);
265 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
266 client.add_entity_request_handler(Self::handle_apply_code_action);
267 client.add_entity_request_handler(Self::handle_format_buffers);
268 client.add_entity_request_handler(Self::handle_get_code_actions);
269 client.add_entity_request_handler(Self::handle_get_completions);
270 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
271 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
272 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
273 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
274 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
275 client.add_entity_request_handler(Self::handle_search_project);
276 client.add_entity_request_handler(Self::handle_get_project_symbols);
277 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
278 client.add_entity_request_handler(Self::handle_open_buffer);
279 client.add_entity_request_handler(Self::handle_save_buffer);
280 }
281
282 pub fn local(
283 client: Arc<Client>,
284 user_store: ModelHandle<UserStore>,
285 languages: Arc<LanguageRegistry>,
286 fs: Arc<dyn Fs>,
287 cx: &mut MutableAppContext,
288 ) -> ModelHandle<Self> {
289 cx.add_model(|cx: &mut ModelContext<Self>| {
290 let (remote_id_tx, remote_id_rx) = watch::channel();
291 let _maintain_remote_id_task = cx.spawn_weak({
292 let rpc = client.clone();
293 move |this, mut cx| {
294 async move {
295 let mut status = rpc.status();
296 while let Some(status) = status.next().await {
297 if let Some(this) = this.upgrade(&cx) {
298 let remote_id = if status.is_connected() {
299 let response = rpc.request(proto::RegisterProject {}).await?;
300 Some(response.project_id)
301 } else {
302 None
303 };
304
305 if let Some(project_id) = remote_id {
306 let mut registrations = Vec::new();
307 this.update(&mut cx, |this, cx| {
308 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
309 registrations.push(worktree.update(
310 cx,
311 |worktree, cx| {
312 let worktree = worktree.as_local_mut().unwrap();
313 worktree.register(project_id, cx)
314 },
315 ));
316 }
317 });
318 for registration in registrations {
319 registration.await?;
320 }
321 }
322 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
323 }
324 }
325 Ok(())
326 }
327 .log_err()
328 }
329 });
330
331 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
332 Self {
333 worktrees: Default::default(),
334 collaborators: Default::default(),
335 opened_buffers: Default::default(),
336 shared_buffers: Default::default(),
337 loading_buffers: Default::default(),
338 loading_local_worktrees: Default::default(),
339 buffer_snapshots: Default::default(),
340 client_state: ProjectClientState::Local {
341 is_shared: false,
342 remote_id_tx,
343 remote_id_rx,
344 _maintain_remote_id_task,
345 },
346 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
347 subscriptions: Vec::new(),
348 active_entry: None,
349 languages,
350 client,
351 user_store,
352 fs,
353 next_entry_id: Default::default(),
354 language_servers_with_diagnostics_running: 0,
355 language_servers: Default::default(),
356 started_language_servers: Default::default(),
357 language_server_statuses: Default::default(),
358 language_server_settings: Default::default(),
359 next_language_server_id: 0,
360 nonce: StdRng::from_entropy().gen(),
361 }
362 })
363 }
364
365 pub async fn remote(
366 remote_id: u64,
367 client: Arc<Client>,
368 user_store: ModelHandle<UserStore>,
369 languages: Arc<LanguageRegistry>,
370 fs: Arc<dyn Fs>,
371 cx: &mut AsyncAppContext,
372 ) -> Result<ModelHandle<Self>> {
373 client.authenticate_and_connect(&cx).await?;
374
375 let response = client
376 .request(proto::JoinProject {
377 project_id: remote_id,
378 })
379 .await?;
380
381 let replica_id = response.replica_id as ReplicaId;
382
383 let mut worktrees = Vec::new();
384 for worktree in response.worktrees {
385 let (worktree, load_task) = cx
386 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
387 worktrees.push(worktree);
388 load_task.detach();
389 }
390
391 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
392 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
393 let mut this = Self {
394 worktrees: Vec::new(),
395 loading_buffers: Default::default(),
396 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
397 shared_buffers: Default::default(),
398 loading_local_worktrees: Default::default(),
399 active_entry: None,
400 collaborators: Default::default(),
401 languages,
402 user_store: user_store.clone(),
403 fs,
404 next_entry_id: Default::default(),
405 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
406 client: client.clone(),
407 client_state: ProjectClientState::Remote {
408 sharing_has_stopped: false,
409 remote_id,
410 replica_id,
411 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
412 async move {
413 let mut status = client.status();
414 let is_connected =
415 status.next().await.map_or(false, |s| s.is_connected());
416 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
417 if !is_connected || status.next().await.is_some() {
418 if let Some(this) = this.upgrade(&cx) {
419 this.update(&mut cx, |this, cx| this.project_unshared(cx))
420 }
421 }
422 Ok(())
423 }
424 .log_err()
425 }),
426 },
427 language_servers_with_diagnostics_running: 0,
428 language_servers: Default::default(),
429 started_language_servers: Default::default(),
430 language_server_settings: Default::default(),
431 language_server_statuses: response
432 .language_servers
433 .into_iter()
434 .map(|server| {
435 (
436 server.id as usize,
437 LanguageServerStatus {
438 name: server.name,
439 pending_work: Default::default(),
440 pending_diagnostic_updates: 0,
441 },
442 )
443 })
444 .collect(),
445 next_language_server_id: 0,
446 opened_buffers: Default::default(),
447 buffer_snapshots: Default::default(),
448 nonce: StdRng::from_entropy().gen(),
449 };
450 for worktree in worktrees {
451 this.add_worktree(&worktree, cx);
452 }
453 this
454 });
455
456 let user_ids = response
457 .collaborators
458 .iter()
459 .map(|peer| peer.user_id)
460 .collect();
461 user_store
462 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
463 .await?;
464 let mut collaborators = HashMap::default();
465 for message in response.collaborators {
466 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
467 collaborators.insert(collaborator.peer_id, collaborator);
468 }
469
470 this.update(cx, |this, _| {
471 this.collaborators = collaborators;
472 });
473
474 Ok(this)
475 }
476
477 #[cfg(any(test, feature = "test-support"))]
478 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
479 let languages = Arc::new(LanguageRegistry::test());
480 let http_client = client::test::FakeHttpClient::with_404_response();
481 let client = client::Client::new(http_client.clone());
482 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
483 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
484 }
485
486 #[cfg(any(test, feature = "test-support"))]
487 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
488 self.opened_buffers
489 .get(&remote_id)
490 .and_then(|buffer| buffer.upgrade(cx))
491 }
492
493 #[cfg(any(test, feature = "test-support"))]
494 pub fn languages(&self) -> &Arc<LanguageRegistry> {
495 &self.languages
496 }
497
498 #[cfg(any(test, feature = "test-support"))]
499 pub fn check_invariants(&self, cx: &AppContext) {
500 if self.is_local() {
501 let mut worktree_root_paths = HashMap::default();
502 for worktree in self.worktrees(cx) {
503 let worktree = worktree.read(cx);
504 let abs_path = worktree.as_local().unwrap().abs_path().clone();
505 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
506 assert_eq!(
507 prev_worktree_id,
508 None,
509 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
510 abs_path,
511 worktree.id(),
512 prev_worktree_id
513 )
514 }
515 } else {
516 let replica_id = self.replica_id();
517 for buffer in self.opened_buffers.values() {
518 if let Some(buffer) = buffer.upgrade(cx) {
519 let buffer = buffer.read(cx);
520 assert_eq!(
521 buffer.deferred_ops_len(),
522 0,
523 "replica {}, buffer {} has deferred operations",
524 replica_id,
525 buffer.remote_id()
526 );
527 }
528 }
529 }
530 }
531
532 #[cfg(any(test, feature = "test-support"))]
533 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
534 let path = path.into();
535 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
536 self.opened_buffers.iter().any(|(_, buffer)| {
537 if let Some(buffer) = buffer.upgrade(cx) {
538 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
539 if file.worktree == worktree && file.path() == &path.path {
540 return true;
541 }
542 }
543 }
544 false
545 })
546 } else {
547 false
548 }
549 }
550
551 pub fn fs(&self) -> &Arc<dyn Fs> {
552 &self.fs
553 }
554
555 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
556 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
557 *remote_id_tx.borrow_mut() = remote_id;
558 }
559
560 self.subscriptions.clear();
561 if let Some(remote_id) = remote_id {
562 self.subscriptions
563 .push(self.client.add_model_for_remote_entity(remote_id, cx));
564 }
565 }
566
567 pub fn remote_id(&self) -> Option<u64> {
568 match &self.client_state {
569 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
570 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
571 }
572 }
573
574 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
575 let mut id = None;
576 let mut watch = None;
577 match &self.client_state {
578 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
579 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
580 }
581
582 async move {
583 if let Some(id) = id {
584 return id;
585 }
586 let mut watch = watch.unwrap();
587 loop {
588 let id = *watch.borrow();
589 if let Some(id) = id {
590 return id;
591 }
592 watch.next().await;
593 }
594 }
595 }
596
597 pub fn replica_id(&self) -> ReplicaId {
598 match &self.client_state {
599 ProjectClientState::Local { .. } => 0,
600 ProjectClientState::Remote { replica_id, .. } => *replica_id,
601 }
602 }
603
604 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
605 &self.collaborators
606 }
607
608 pub fn worktrees<'a>(
609 &'a self,
610 cx: &'a AppContext,
611 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
612 self.worktrees
613 .iter()
614 .filter_map(move |worktree| worktree.upgrade(cx))
615 }
616
617 pub fn visible_worktrees<'a>(
618 &'a self,
619 cx: &'a AppContext,
620 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
621 self.worktrees.iter().filter_map(|worktree| {
622 worktree.upgrade(cx).and_then(|worktree| {
623 if worktree.read(cx).is_visible() {
624 Some(worktree)
625 } else {
626 None
627 }
628 })
629 })
630 }
631
632 pub fn worktree_for_id(
633 &self,
634 id: WorktreeId,
635 cx: &AppContext,
636 ) -> Option<ModelHandle<Worktree>> {
637 self.worktrees(cx)
638 .find(|worktree| worktree.read(cx).id() == id)
639 }
640
641 pub fn worktree_for_entry(
642 &self,
643 entry_id: ProjectEntryId,
644 cx: &AppContext,
645 ) -> Option<ModelHandle<Worktree>> {
646 self.worktrees(cx)
647 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
648 }
649
650 pub fn worktree_id_for_entry(
651 &self,
652 entry_id: ProjectEntryId,
653 cx: &AppContext,
654 ) -> Option<WorktreeId> {
655 self.worktree_for_entry(entry_id, cx)
656 .map(|worktree| worktree.read(cx).id())
657 }
658
659 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
660 let rpc = self.client.clone();
661 cx.spawn(|this, mut cx| async move {
662 let project_id = this.update(&mut cx, |this, cx| {
663 if let ProjectClientState::Local {
664 is_shared,
665 remote_id_rx,
666 ..
667 } = &mut this.client_state
668 {
669 *is_shared = true;
670
671 for open_buffer in this.opened_buffers.values_mut() {
672 match open_buffer {
673 OpenBuffer::Strong(_) => {}
674 OpenBuffer::Weak(buffer) => {
675 if let Some(buffer) = buffer.upgrade(cx) {
676 *open_buffer = OpenBuffer::Strong(buffer);
677 }
678 }
679 OpenBuffer::Loading(_) => unreachable!(),
680 }
681 }
682
683 for worktree_handle in this.worktrees.iter_mut() {
684 match worktree_handle {
685 WorktreeHandle::Strong(_) => {}
686 WorktreeHandle::Weak(worktree) => {
687 if let Some(worktree) = worktree.upgrade(cx) {
688 *worktree_handle = WorktreeHandle::Strong(worktree);
689 }
690 }
691 }
692 }
693
694 remote_id_rx
695 .borrow()
696 .ok_or_else(|| anyhow!("no project id"))
697 } else {
698 Err(anyhow!("can't share a remote project"))
699 }
700 })?;
701
702 rpc.request(proto::ShareProject { project_id }).await?;
703
704 let mut tasks = Vec::new();
705 this.update(&mut cx, |this, cx| {
706 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
707 worktree.update(cx, |worktree, cx| {
708 let worktree = worktree.as_local_mut().unwrap();
709 tasks.push(worktree.share(project_id, cx));
710 });
711 }
712 });
713 for task in tasks {
714 task.await?;
715 }
716 this.update(&mut cx, |_, cx| cx.notify());
717 Ok(())
718 })
719 }
720
721 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
722 let rpc = self.client.clone();
723 cx.spawn(|this, mut cx| async move {
724 let project_id = this.update(&mut cx, |this, cx| {
725 if let ProjectClientState::Local {
726 is_shared,
727 remote_id_rx,
728 ..
729 } = &mut this.client_state
730 {
731 *is_shared = false;
732
733 for open_buffer in this.opened_buffers.values_mut() {
734 match open_buffer {
735 OpenBuffer::Strong(buffer) => {
736 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
737 }
738 _ => {}
739 }
740 }
741
742 for worktree_handle in this.worktrees.iter_mut() {
743 match worktree_handle {
744 WorktreeHandle::Strong(worktree) => {
745 if !worktree.read(cx).is_visible() {
746 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
747 }
748 }
749 _ => {}
750 }
751 }
752
753 remote_id_rx
754 .borrow()
755 .ok_or_else(|| anyhow!("no project id"))
756 } else {
757 Err(anyhow!("can't share a remote project"))
758 }
759 })?;
760
761 rpc.send(proto::UnshareProject { project_id })?;
762 this.update(&mut cx, |this, cx| {
763 this.collaborators.clear();
764 this.shared_buffers.clear();
765 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
766 worktree.update(cx, |worktree, _| {
767 worktree.as_local_mut().unwrap().unshare();
768 });
769 }
770 cx.notify()
771 });
772 Ok(())
773 })
774 }
775
776 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
777 if let ProjectClientState::Remote {
778 sharing_has_stopped,
779 ..
780 } = &mut self.client_state
781 {
782 *sharing_has_stopped = true;
783 self.collaborators.clear();
784 cx.notify();
785 }
786 }
787
788 pub fn is_read_only(&self) -> bool {
789 match &self.client_state {
790 ProjectClientState::Local { .. } => false,
791 ProjectClientState::Remote {
792 sharing_has_stopped,
793 ..
794 } => *sharing_has_stopped,
795 }
796 }
797
798 pub fn is_local(&self) -> bool {
799 match &self.client_state {
800 ProjectClientState::Local { .. } => true,
801 ProjectClientState::Remote { .. } => false,
802 }
803 }
804
805 pub fn is_remote(&self) -> bool {
806 !self.is_local()
807 }
808
809 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
810 if self.is_remote() {
811 return Err(anyhow!("creating buffers as a guest is not supported yet"));
812 }
813
814 let buffer = cx.add_model(|cx| {
815 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
816 });
817 self.register_buffer(&buffer, cx)?;
818 Ok(buffer)
819 }
820
821 pub fn open_buffer_for_entry(
822 &mut self,
823 entry_id: ProjectEntryId,
824 cx: &mut ModelContext<Self>,
825 ) -> Task<Result<ModelHandle<Buffer>>> {
826 if let Some(project_path) = self.path_for_entry(entry_id, cx) {
827 self.open_buffer_for_path(project_path, cx)
828 } else {
829 Task::ready(Err(anyhow!("entry not found")))
830 }
831 }
832
833 pub fn open_buffer_for_path(
834 &mut self,
835 path: impl Into<ProjectPath>,
836 cx: &mut ModelContext<Self>,
837 ) -> Task<Result<ModelHandle<Buffer>>> {
838 let project_path = path.into();
839 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
840 worktree
841 } else {
842 return Task::ready(Err(anyhow!("no such worktree")));
843 };
844
845 // If there is already a buffer for the given path, then return it.
846 let existing_buffer = self.get_open_buffer(&project_path, cx);
847 if let Some(existing_buffer) = existing_buffer {
848 return Task::ready(Ok(existing_buffer));
849 }
850
851 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
852 // If the given path is already being loaded, then wait for that existing
853 // task to complete and return the same buffer.
854 hash_map::Entry::Occupied(e) => e.get().clone(),
855
856 // Otherwise, record the fact that this path is now being loaded.
857 hash_map::Entry::Vacant(entry) => {
858 let (mut tx, rx) = postage::watch::channel();
859 entry.insert(rx.clone());
860
861 let load_buffer = if worktree.read(cx).is_local() {
862 self.open_local_buffer(&project_path.path, &worktree, cx)
863 } else {
864 self.open_remote_buffer(&project_path.path, &worktree, cx)
865 };
866
867 cx.spawn(move |this, mut cx| async move {
868 let load_result = load_buffer.await;
869 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
870 // Record the fact that the buffer is no longer loading.
871 this.loading_buffers.remove(&project_path);
872 let buffer = load_result.map_err(Arc::new)?;
873 Ok(buffer)
874 }));
875 })
876 .detach();
877 rx
878 }
879 };
880
881 cx.foreground().spawn(async move {
882 loop {
883 if let Some(result) = loading_watch.borrow().as_ref() {
884 match result {
885 Ok(buffer) => return Ok(buffer.clone()),
886 Err(error) => return Err(anyhow!("{}", error)),
887 }
888 }
889 loading_watch.next().await;
890 }
891 })
892 }
893
894 fn open_local_buffer(
895 &mut self,
896 path: &Arc<Path>,
897 worktree: &ModelHandle<Worktree>,
898 cx: &mut ModelContext<Self>,
899 ) -> Task<Result<ModelHandle<Buffer>>> {
900 let load_buffer = worktree.update(cx, |worktree, cx| {
901 let worktree = worktree.as_local_mut().unwrap();
902 worktree.load_buffer(path, cx)
903 });
904 cx.spawn(|this, mut cx| async move {
905 let buffer = load_buffer.await?;
906 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
907 Ok(buffer)
908 })
909 }
910
911 fn open_remote_buffer(
912 &mut self,
913 path: &Arc<Path>,
914 worktree: &ModelHandle<Worktree>,
915 cx: &mut ModelContext<Self>,
916 ) -> Task<Result<ModelHandle<Buffer>>> {
917 let rpc = self.client.clone();
918 let project_id = self.remote_id().unwrap();
919 let remote_worktree_id = worktree.read(cx).id();
920 let path = path.clone();
921 let path_string = path.to_string_lossy().to_string();
922 cx.spawn(|this, mut cx| async move {
923 let response = rpc
924 .request(proto::OpenBuffer {
925 project_id,
926 worktree_id: remote_worktree_id.to_proto(),
927 path: path_string,
928 })
929 .await?;
930 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
931 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
932 .await
933 })
934 }
935
936 fn open_local_buffer_via_lsp(
937 &mut self,
938 abs_path: lsp::Url,
939 lang_name: Arc<str>,
940 lang_server: Arc<LanguageServer>,
941 cx: &mut ModelContext<Self>,
942 ) -> Task<Result<ModelHandle<Buffer>>> {
943 cx.spawn(|this, mut cx| async move {
944 let abs_path = abs_path
945 .to_file_path()
946 .map_err(|_| anyhow!("can't convert URI to path"))?;
947 let (worktree, relative_path) = if let Some(result) =
948 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
949 {
950 result
951 } else {
952 let worktree = this
953 .update(&mut cx, |this, cx| {
954 this.create_local_worktree(&abs_path, false, cx)
955 })
956 .await?;
957 this.update(&mut cx, |this, cx| {
958 this.language_servers
959 .insert((worktree.read(cx).id(), lang_name), lang_server);
960 });
961 (worktree, PathBuf::new())
962 };
963
964 let project_path = ProjectPath {
965 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
966 path: relative_path.into(),
967 };
968 this.update(&mut cx, |this, cx| {
969 this.open_buffer_for_path(project_path, cx)
970 })
971 .await
972 })
973 }
974
975 pub fn save_buffer_as(
976 &mut self,
977 buffer: ModelHandle<Buffer>,
978 abs_path: PathBuf,
979 cx: &mut ModelContext<Project>,
980 ) -> Task<Result<()>> {
981 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
982 cx.spawn(|this, mut cx| async move {
983 let (worktree, path) = worktree_task.await?;
984 worktree
985 .update(&mut cx, |worktree, cx| {
986 worktree
987 .as_local_mut()
988 .unwrap()
989 .save_buffer_as(buffer.clone(), path, cx)
990 })
991 .await?;
992 this.update(&mut cx, |this, cx| {
993 this.assign_language_to_buffer(&buffer, cx);
994 this.register_buffer_with_language_server(&buffer, cx);
995 });
996 Ok(())
997 })
998 }
999
1000 pub fn get_open_buffer(
1001 &mut self,
1002 path: &ProjectPath,
1003 cx: &mut ModelContext<Self>,
1004 ) -> Option<ModelHandle<Buffer>> {
1005 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1006 self.opened_buffers.values().find_map(|buffer| {
1007 let buffer = buffer.upgrade(cx)?;
1008 let file = File::from_dyn(buffer.read(cx).file())?;
1009 if file.worktree == worktree && file.path() == &path.path {
1010 Some(buffer)
1011 } else {
1012 None
1013 }
1014 })
1015 }
1016
1017 fn register_buffer(
1018 &mut self,
1019 buffer: &ModelHandle<Buffer>,
1020 cx: &mut ModelContext<Self>,
1021 ) -> Result<()> {
1022 let remote_id = buffer.read(cx).remote_id();
1023 let open_buffer = if self.is_remote() || self.is_shared() {
1024 OpenBuffer::Strong(buffer.clone())
1025 } else {
1026 OpenBuffer::Weak(buffer.downgrade())
1027 };
1028
1029 match self.opened_buffers.insert(remote_id, open_buffer) {
1030 None => {}
1031 Some(OpenBuffer::Loading(operations)) => {
1032 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1033 }
1034 Some(OpenBuffer::Weak(existing_handle)) => {
1035 if existing_handle.upgrade(cx).is_some() {
1036 Err(anyhow!(
1037 "already registered buffer with remote id {}",
1038 remote_id
1039 ))?
1040 }
1041 }
1042 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1043 "already registered buffer with remote id {}",
1044 remote_id
1045 ))?,
1046 }
1047 cx.subscribe(buffer, |this, buffer, event, cx| {
1048 this.on_buffer_event(buffer, event, cx);
1049 })
1050 .detach();
1051
1052 self.assign_language_to_buffer(buffer, cx);
1053 self.register_buffer_with_language_server(buffer, cx);
1054
1055 Ok(())
1056 }
1057
1058 fn register_buffer_with_language_server(
1059 &mut self,
1060 buffer_handle: &ModelHandle<Buffer>,
1061 cx: &mut ModelContext<Self>,
1062 ) {
1063 let buffer = buffer_handle.read(cx);
1064 let buffer_id = buffer.remote_id();
1065 if let Some(file) = File::from_dyn(buffer.file()) {
1066 if file.is_local() {
1067 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1068 let initial_snapshot = buffer.text_snapshot();
1069 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1070
1071 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1072 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1073 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1074 .log_err();
1075 }
1076 }
1077
1078 if let Some(server) = language_server {
1079 server
1080 .notify::<lsp::notification::DidOpenTextDocument>(
1081 lsp::DidOpenTextDocumentParams {
1082 text_document: lsp::TextDocumentItem::new(
1083 uri,
1084 Default::default(),
1085 0,
1086 initial_snapshot.text(),
1087 ),
1088 }
1089 .clone(),
1090 )
1091 .log_err();
1092 buffer_handle.update(cx, |buffer, cx| {
1093 buffer.set_completion_triggers(
1094 server
1095 .capabilities()
1096 .completion_provider
1097 .as_ref()
1098 .and_then(|provider| provider.trigger_characters.clone())
1099 .unwrap_or(Vec::new()),
1100 cx,
1101 )
1102 });
1103 self.buffer_snapshots
1104 .insert(buffer_id, vec![(0, initial_snapshot)]);
1105 }
1106
1107 cx.observe_release(buffer_handle, |this, buffer, cx| {
1108 if let Some(file) = File::from_dyn(buffer.file()) {
1109 if file.is_local() {
1110 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1111 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1112 server
1113 .notify::<lsp::notification::DidCloseTextDocument>(
1114 lsp::DidCloseTextDocumentParams {
1115 text_document: lsp::TextDocumentIdentifier::new(
1116 uri.clone(),
1117 ),
1118 },
1119 )
1120 .log_err();
1121 }
1122 }
1123 }
1124 })
1125 .detach();
1126 }
1127 }
1128 }
1129
1130 fn on_buffer_event(
1131 &mut self,
1132 buffer: ModelHandle<Buffer>,
1133 event: &BufferEvent,
1134 cx: &mut ModelContext<Self>,
1135 ) -> Option<()> {
1136 match event {
1137 BufferEvent::Operation(operation) => {
1138 let project_id = self.remote_id()?;
1139 let request = self.client.request(proto::UpdateBuffer {
1140 project_id,
1141 buffer_id: buffer.read(cx).remote_id(),
1142 operations: vec![language::proto::serialize_operation(&operation)],
1143 });
1144 cx.background().spawn(request).detach_and_log_err(cx);
1145 }
1146 BufferEvent::Edited => {
1147 let language_server = self
1148 .language_server_for_buffer(buffer.read(cx), cx)?
1149 .clone();
1150 let buffer = buffer.read(cx);
1151 let file = File::from_dyn(buffer.file())?;
1152 let abs_path = file.as_local()?.abs_path(cx);
1153 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1154 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1155 let (version, prev_snapshot) = buffer_snapshots.last()?;
1156 let next_snapshot = buffer.text_snapshot();
1157 let next_version = version + 1;
1158
1159 let content_changes = buffer
1160 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1161 .map(|edit| {
1162 let edit_start = edit.new.start.0;
1163 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1164 let new_text = next_snapshot
1165 .text_for_range(edit.new.start.1..edit.new.end.1)
1166 .collect();
1167 lsp::TextDocumentContentChangeEvent {
1168 range: Some(lsp::Range::new(
1169 edit_start.to_lsp_position(),
1170 edit_end.to_lsp_position(),
1171 )),
1172 range_length: None,
1173 text: new_text,
1174 }
1175 })
1176 .collect();
1177
1178 buffer_snapshots.push((next_version, next_snapshot));
1179
1180 language_server
1181 .notify::<lsp::notification::DidChangeTextDocument>(
1182 lsp::DidChangeTextDocumentParams {
1183 text_document: lsp::VersionedTextDocumentIdentifier::new(
1184 uri,
1185 next_version,
1186 ),
1187 content_changes,
1188 },
1189 )
1190 .log_err();
1191 }
1192 BufferEvent::Saved => {
1193 let file = File::from_dyn(buffer.read(cx).file())?;
1194 let worktree_id = file.worktree_id(cx);
1195 let abs_path = file.as_local()?.abs_path(cx);
1196 let text_document = lsp::TextDocumentIdentifier {
1197 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1198 };
1199
1200 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1201 server
1202 .notify::<lsp::notification::DidSaveTextDocument>(
1203 lsp::DidSaveTextDocumentParams {
1204 text_document: text_document.clone(),
1205 text: None,
1206 },
1207 )
1208 .log_err();
1209 }
1210 }
1211 _ => {}
1212 }
1213
1214 None
1215 }
1216
1217 fn language_servers_for_worktree(
1218 &self,
1219 worktree_id: WorktreeId,
1220 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1221 self.language_servers.iter().filter_map(
1222 move |((language_server_worktree_id, language_name), server)| {
1223 if *language_server_worktree_id == worktree_id {
1224 Some((language_name.as_ref(), server))
1225 } else {
1226 None
1227 }
1228 },
1229 )
1230 }
1231
1232 fn assign_language_to_buffer(
1233 &mut self,
1234 buffer: &ModelHandle<Buffer>,
1235 cx: &mut ModelContext<Self>,
1236 ) -> Option<()> {
1237 // If the buffer has a language, set it and start the language server if we haven't already.
1238 let full_path = buffer.read(cx).file()?.full_path(cx);
1239 let language = self.languages.select_language(&full_path)?;
1240 buffer.update(cx, |buffer, cx| {
1241 buffer.set_language(Some(language.clone()), cx);
1242 });
1243
1244 let file = File::from_dyn(buffer.read(cx).file())?;
1245 let worktree = file.worktree.read(cx).as_local()?;
1246 let worktree_id = worktree.id();
1247 let worktree_abs_path = worktree.abs_path().clone();
1248 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1249
1250 None
1251 }
1252
1253 fn start_language_server(
1254 &mut self,
1255 worktree_id: WorktreeId,
1256 worktree_path: Arc<Path>,
1257 language: Arc<Language>,
1258 cx: &mut ModelContext<Self>,
1259 ) {
1260 let key = (worktree_id, language.name());
1261 self.started_language_servers
1262 .entry(key.clone())
1263 .or_insert_with(|| {
1264 let server_id = post_inc(&mut self.next_language_server_id);
1265 let language_server = self.languages.start_language_server(
1266 language.clone(),
1267 worktree_path,
1268 self.client.http_client(),
1269 cx,
1270 );
1271 cx.spawn_weak(|this, mut cx| async move {
1272 let mut language_server = language_server?.await.log_err()?;
1273 let this = this.upgrade(&cx)?;
1274 let (language_server_events_tx, language_server_events_rx) =
1275 smol::channel::unbounded();
1276
1277 language_server
1278 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1279 let language_server_events_tx = language_server_events_tx.clone();
1280 move |params| {
1281 language_server_events_tx
1282 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1283 .ok();
1284 }
1285 })
1286 .detach();
1287
1288 language_server
1289 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1290 let settings = this
1291 .read_with(&cx, |this, _| this.language_server_settings.clone());
1292 move |params| {
1293 let settings = settings.lock();
1294 Ok(params
1295 .items
1296 .into_iter()
1297 .map(|item| {
1298 if let Some(section) = &item.section {
1299 settings
1300 .get(section)
1301 .cloned()
1302 .unwrap_or(serde_json::Value::Null)
1303 } else {
1304 settings.clone()
1305 }
1306 })
1307 .collect())
1308 }
1309 })
1310 .detach();
1311
1312 language_server
1313 .on_notification::<lsp::notification::Progress, _>(move |params| {
1314 let token = match params.token {
1315 lsp::NumberOrString::String(token) => token,
1316 lsp::NumberOrString::Number(token) => {
1317 log::info!("skipping numeric progress token {}", token);
1318 return;
1319 }
1320 };
1321
1322 match params.value {
1323 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1324 lsp::WorkDoneProgress::Begin(_) => {
1325 language_server_events_tx
1326 .try_send(LanguageServerEvent::WorkStart { token })
1327 .ok();
1328 }
1329 lsp::WorkDoneProgress::Report(report) => {
1330 language_server_events_tx
1331 .try_send(LanguageServerEvent::WorkProgress {
1332 token,
1333 progress: LanguageServerProgress {
1334 message: report.message,
1335 percentage: report
1336 .percentage
1337 .map(|p| p as usize),
1338 last_update_at: Instant::now(),
1339 },
1340 })
1341 .ok();
1342 }
1343 lsp::WorkDoneProgress::End(_) => {
1344 language_server_events_tx
1345 .try_send(LanguageServerEvent::WorkEnd { token })
1346 .ok();
1347 }
1348 },
1349 }
1350 })
1351 .detach();
1352
1353 // Process all the LSP events.
1354 cx.spawn(|mut cx| {
1355 let this = this.downgrade();
1356 async move {
1357 while let Ok(event) = language_server_events_rx.recv().await {
1358 let this = this.upgrade(&cx)?;
1359 this.update(&mut cx, |this, cx| {
1360 this.on_lsp_event(server_id, event, &language, cx)
1361 });
1362
1363 // Don't starve the main thread when lots of events arrive all at once.
1364 smol::future::yield_now().await;
1365 }
1366 Some(())
1367 }
1368 })
1369 .detach();
1370
1371 let language_server = language_server.initialize().await.log_err()?;
1372 this.update(&mut cx, |this, cx| {
1373 this.language_servers
1374 .insert(key.clone(), language_server.clone());
1375 this.language_server_statuses.insert(
1376 server_id,
1377 LanguageServerStatus {
1378 name: language_server.name().to_string(),
1379 pending_work: Default::default(),
1380 pending_diagnostic_updates: 0,
1381 },
1382 );
1383 language_server
1384 .notify::<lsp::notification::DidChangeConfiguration>(
1385 lsp::DidChangeConfigurationParams {
1386 settings: this.language_server_settings.lock().clone(),
1387 },
1388 )
1389 .ok();
1390
1391 if let Some(project_id) = this.remote_id() {
1392 this.client
1393 .send(proto::StartLanguageServer {
1394 project_id,
1395 server: Some(proto::LanguageServer {
1396 id: server_id as u64,
1397 name: language_server.name().to_string(),
1398 }),
1399 })
1400 .log_err();
1401 }
1402
1403 // Tell the language server about every open buffer in the worktree that matches the language.
1404 for buffer in this.opened_buffers.values() {
1405 if let Some(buffer_handle) = buffer.upgrade(cx) {
1406 let buffer = buffer_handle.read(cx);
1407 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1408 file
1409 } else {
1410 continue;
1411 };
1412 let language = if let Some(language) = buffer.language() {
1413 language
1414 } else {
1415 continue;
1416 };
1417 if (file.worktree.read(cx).id(), language.name()) != key {
1418 continue;
1419 }
1420
1421 let file = file.as_local()?;
1422 let versions = this
1423 .buffer_snapshots
1424 .entry(buffer.remote_id())
1425 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1426 let (version, initial_snapshot) = versions.last().unwrap();
1427 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1428 language_server
1429 .notify::<lsp::notification::DidOpenTextDocument>(
1430 lsp::DidOpenTextDocumentParams {
1431 text_document: lsp::TextDocumentItem::new(
1432 uri,
1433 Default::default(),
1434 *version,
1435 initial_snapshot.text(),
1436 ),
1437 },
1438 )
1439 .log_err()?;
1440 buffer_handle.update(cx, |buffer, cx| {
1441 buffer.set_completion_triggers(
1442 language_server
1443 .capabilities()
1444 .completion_provider
1445 .as_ref()
1446 .and_then(|provider| {
1447 provider.trigger_characters.clone()
1448 })
1449 .unwrap_or(Vec::new()),
1450 cx,
1451 )
1452 });
1453 }
1454 }
1455
1456 cx.notify();
1457 Some(())
1458 });
1459
1460 Some(language_server)
1461 })
1462 });
1463 }
1464
1465 fn on_lsp_event(
1466 &mut self,
1467 language_server_id: usize,
1468 event: LanguageServerEvent,
1469 language: &Arc<Language>,
1470 cx: &mut ModelContext<Self>,
1471 ) {
1472 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1473 let language_server_status =
1474 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1475 status
1476 } else {
1477 return;
1478 };
1479
1480 match event {
1481 LanguageServerEvent::WorkStart { token } => {
1482 if Some(&token) == disk_diagnostics_token {
1483 language_server_status.pending_diagnostic_updates += 1;
1484 if language_server_status.pending_diagnostic_updates == 1 {
1485 self.disk_based_diagnostics_started(cx);
1486 self.broadcast_language_server_update(
1487 language_server_id,
1488 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1489 proto::LspDiskBasedDiagnosticsUpdating {},
1490 ),
1491 );
1492 }
1493 } else {
1494 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1495 self.broadcast_language_server_update(
1496 language_server_id,
1497 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1498 token,
1499 }),
1500 );
1501 }
1502 }
1503 LanguageServerEvent::WorkProgress { token, progress } => {
1504 if Some(&token) != disk_diagnostics_token {
1505 self.on_lsp_work_progress(
1506 language_server_id,
1507 token.clone(),
1508 progress.clone(),
1509 cx,
1510 );
1511 self.broadcast_language_server_update(
1512 language_server_id,
1513 proto::update_language_server::Variant::WorkProgress(
1514 proto::LspWorkProgress {
1515 token,
1516 message: progress.message,
1517 percentage: progress.percentage.map(|p| p as u32),
1518 },
1519 ),
1520 );
1521 }
1522 }
1523 LanguageServerEvent::WorkEnd { token } => {
1524 if Some(&token) == disk_diagnostics_token {
1525 language_server_status.pending_diagnostic_updates -= 1;
1526 if language_server_status.pending_diagnostic_updates == 0 {
1527 self.disk_based_diagnostics_finished(cx);
1528 self.broadcast_language_server_update(
1529 language_server_id,
1530 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1531 proto::LspDiskBasedDiagnosticsUpdated {},
1532 ),
1533 );
1534 }
1535 } else {
1536 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1537 self.broadcast_language_server_update(
1538 language_server_id,
1539 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1540 token,
1541 }),
1542 );
1543 }
1544 }
1545 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1546 language.process_diagnostics(&mut params);
1547
1548 if disk_diagnostics_token.is_none() {
1549 self.disk_based_diagnostics_started(cx);
1550 self.broadcast_language_server_update(
1551 language_server_id,
1552 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1553 proto::LspDiskBasedDiagnosticsUpdating {},
1554 ),
1555 );
1556 }
1557 self.update_diagnostics(
1558 params,
1559 language
1560 .disk_based_diagnostic_sources()
1561 .unwrap_or(&Default::default()),
1562 cx,
1563 )
1564 .log_err();
1565 if disk_diagnostics_token.is_none() {
1566 self.disk_based_diagnostics_finished(cx);
1567 self.broadcast_language_server_update(
1568 language_server_id,
1569 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1570 proto::LspDiskBasedDiagnosticsUpdated {},
1571 ),
1572 );
1573 }
1574 }
1575 }
1576 }
1577
1578 fn on_lsp_work_start(
1579 &mut self,
1580 language_server_id: usize,
1581 token: String,
1582 cx: &mut ModelContext<Self>,
1583 ) {
1584 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1585 status.pending_work.insert(
1586 token,
1587 LanguageServerProgress {
1588 message: None,
1589 percentage: None,
1590 last_update_at: Instant::now(),
1591 },
1592 );
1593 cx.notify();
1594 }
1595 }
1596
1597 fn on_lsp_work_progress(
1598 &mut self,
1599 language_server_id: usize,
1600 token: String,
1601 progress: LanguageServerProgress,
1602 cx: &mut ModelContext<Self>,
1603 ) {
1604 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1605 status.pending_work.insert(token, progress);
1606 cx.notify();
1607 }
1608 }
1609
1610 fn on_lsp_work_end(
1611 &mut self,
1612 language_server_id: usize,
1613 token: String,
1614 cx: &mut ModelContext<Self>,
1615 ) {
1616 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1617 status.pending_work.remove(&token);
1618 cx.notify();
1619 }
1620 }
1621
1622 fn broadcast_language_server_update(
1623 &self,
1624 language_server_id: usize,
1625 event: proto::update_language_server::Variant,
1626 ) {
1627 if let Some(project_id) = self.remote_id() {
1628 self.client
1629 .send(proto::UpdateLanguageServer {
1630 project_id,
1631 language_server_id: language_server_id as u64,
1632 variant: Some(event),
1633 })
1634 .log_err();
1635 }
1636 }
1637
1638 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1639 for server in self.language_servers.values() {
1640 server
1641 .notify::<lsp::notification::DidChangeConfiguration>(
1642 lsp::DidChangeConfigurationParams {
1643 settings: settings.clone(),
1644 },
1645 )
1646 .ok();
1647 }
1648 *self.language_server_settings.lock() = settings;
1649 }
1650
1651 pub fn language_server_statuses(
1652 &self,
1653 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1654 self.language_server_statuses.values()
1655 }
1656
1657 pub fn update_diagnostics(
1658 &mut self,
1659 params: lsp::PublishDiagnosticsParams,
1660 disk_based_sources: &HashSet<String>,
1661 cx: &mut ModelContext<Self>,
1662 ) -> Result<()> {
1663 let abs_path = params
1664 .uri
1665 .to_file_path()
1666 .map_err(|_| anyhow!("URI is not a file"))?;
1667 let mut next_group_id = 0;
1668 let mut diagnostics = Vec::default();
1669 let mut primary_diagnostic_group_ids = HashMap::default();
1670 let mut sources_by_group_id = HashMap::default();
1671 let mut supporting_diagnostics = HashMap::default();
1672 for diagnostic in ¶ms.diagnostics {
1673 let source = diagnostic.source.as_ref();
1674 let code = diagnostic.code.as_ref().map(|code| match code {
1675 lsp::NumberOrString::Number(code) => code.to_string(),
1676 lsp::NumberOrString::String(code) => code.clone(),
1677 });
1678 let range = range_from_lsp(diagnostic.range);
1679 let is_supporting = diagnostic
1680 .related_information
1681 .as_ref()
1682 .map_or(false, |infos| {
1683 infos.iter().any(|info| {
1684 primary_diagnostic_group_ids.contains_key(&(
1685 source,
1686 code.clone(),
1687 range_from_lsp(info.location.range),
1688 ))
1689 })
1690 });
1691
1692 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1693 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1694 });
1695
1696 if is_supporting {
1697 supporting_diagnostics.insert(
1698 (source, code.clone(), range),
1699 (diagnostic.severity, is_unnecessary),
1700 );
1701 } else {
1702 let group_id = post_inc(&mut next_group_id);
1703 let is_disk_based =
1704 source.map_or(false, |source| disk_based_sources.contains(source));
1705
1706 sources_by_group_id.insert(group_id, source);
1707 primary_diagnostic_group_ids
1708 .insert((source, code.clone(), range.clone()), group_id);
1709
1710 diagnostics.push(DiagnosticEntry {
1711 range,
1712 diagnostic: Diagnostic {
1713 code: code.clone(),
1714 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1715 message: diagnostic.message.clone(),
1716 group_id,
1717 is_primary: true,
1718 is_valid: true,
1719 is_disk_based,
1720 is_unnecessary,
1721 },
1722 });
1723 if let Some(infos) = &diagnostic.related_information {
1724 for info in infos {
1725 if info.location.uri == params.uri && !info.message.is_empty() {
1726 let range = range_from_lsp(info.location.range);
1727 diagnostics.push(DiagnosticEntry {
1728 range,
1729 diagnostic: Diagnostic {
1730 code: code.clone(),
1731 severity: DiagnosticSeverity::INFORMATION,
1732 message: info.message.clone(),
1733 group_id,
1734 is_primary: false,
1735 is_valid: true,
1736 is_disk_based,
1737 is_unnecessary: false,
1738 },
1739 });
1740 }
1741 }
1742 }
1743 }
1744 }
1745
1746 for entry in &mut diagnostics {
1747 let diagnostic = &mut entry.diagnostic;
1748 if !diagnostic.is_primary {
1749 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1750 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1751 source,
1752 diagnostic.code.clone(),
1753 entry.range.clone(),
1754 )) {
1755 if let Some(severity) = severity {
1756 diagnostic.severity = severity;
1757 }
1758 diagnostic.is_unnecessary = is_unnecessary;
1759 }
1760 }
1761 }
1762
1763 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1764 Ok(())
1765 }
1766
1767 pub fn update_diagnostic_entries(
1768 &mut self,
1769 abs_path: PathBuf,
1770 version: Option<i32>,
1771 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1772 cx: &mut ModelContext<Project>,
1773 ) -> Result<(), anyhow::Error> {
1774 let (worktree, relative_path) = self
1775 .find_local_worktree(&abs_path, cx)
1776 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1777 if !worktree.read(cx).is_visible() {
1778 return Ok(());
1779 }
1780
1781 let project_path = ProjectPath {
1782 worktree_id: worktree.read(cx).id(),
1783 path: relative_path.into(),
1784 };
1785
1786 for buffer in self.opened_buffers.values() {
1787 if let Some(buffer) = buffer.upgrade(cx) {
1788 if buffer
1789 .read(cx)
1790 .file()
1791 .map_or(false, |file| *file.path() == project_path.path)
1792 {
1793 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1794 break;
1795 }
1796 }
1797 }
1798 worktree.update(cx, |worktree, cx| {
1799 worktree
1800 .as_local_mut()
1801 .ok_or_else(|| anyhow!("not a local worktree"))?
1802 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1803 })?;
1804 cx.emit(Event::DiagnosticsUpdated(project_path));
1805 Ok(())
1806 }
1807
1808 fn update_buffer_diagnostics(
1809 &mut self,
1810 buffer: &ModelHandle<Buffer>,
1811 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1812 version: Option<i32>,
1813 cx: &mut ModelContext<Self>,
1814 ) -> Result<()> {
1815 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1816 Ordering::Equal
1817 .then_with(|| b.is_primary.cmp(&a.is_primary))
1818 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1819 .then_with(|| a.severity.cmp(&b.severity))
1820 .then_with(|| a.message.cmp(&b.message))
1821 }
1822
1823 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1824
1825 diagnostics.sort_unstable_by(|a, b| {
1826 Ordering::Equal
1827 .then_with(|| a.range.start.cmp(&b.range.start))
1828 .then_with(|| b.range.end.cmp(&a.range.end))
1829 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1830 });
1831
1832 let mut sanitized_diagnostics = Vec::new();
1833 let mut edits_since_save = snapshot
1834 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1835 .peekable();
1836 let mut last_edit_old_end = PointUtf16::zero();
1837 let mut last_edit_new_end = PointUtf16::zero();
1838 'outer: for entry in diagnostics {
1839 let mut start = entry.range.start;
1840 let mut end = entry.range.end;
1841
1842 // Some diagnostics are based on files on disk instead of buffers'
1843 // current contents. Adjust these diagnostics' ranges to reflect
1844 // any unsaved edits.
1845 if entry.diagnostic.is_disk_based {
1846 while let Some(edit) = edits_since_save.peek() {
1847 if edit.old.end <= start {
1848 last_edit_old_end = edit.old.end;
1849 last_edit_new_end = edit.new.end;
1850 edits_since_save.next();
1851 } else if edit.old.start <= end && edit.old.end >= start {
1852 continue 'outer;
1853 } else {
1854 break;
1855 }
1856 }
1857
1858 let start_overshoot = start - last_edit_old_end;
1859 start = last_edit_new_end;
1860 start += start_overshoot;
1861
1862 let end_overshoot = end - last_edit_old_end;
1863 end = last_edit_new_end;
1864 end += end_overshoot;
1865 }
1866
1867 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1868 ..snapshot.clip_point_utf16(end, Bias::Right);
1869
1870 // Expand empty ranges by one character
1871 if range.start == range.end {
1872 range.end.column += 1;
1873 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1874 if range.start == range.end && range.end.column > 0 {
1875 range.start.column -= 1;
1876 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1877 }
1878 }
1879
1880 sanitized_diagnostics.push(DiagnosticEntry {
1881 range,
1882 diagnostic: entry.diagnostic,
1883 });
1884 }
1885 drop(edits_since_save);
1886
1887 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1888 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1889 Ok(())
1890 }
1891
1892 pub fn format(
1893 &self,
1894 buffers: HashSet<ModelHandle<Buffer>>,
1895 push_to_history: bool,
1896 cx: &mut ModelContext<Project>,
1897 ) -> Task<Result<ProjectTransaction>> {
1898 let mut local_buffers = Vec::new();
1899 let mut remote_buffers = None;
1900 for buffer_handle in buffers {
1901 let buffer = buffer_handle.read(cx);
1902 if let Some(file) = File::from_dyn(buffer.file()) {
1903 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1904 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1905 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1906 }
1907 } else {
1908 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1909 }
1910 } else {
1911 return Task::ready(Ok(Default::default()));
1912 }
1913 }
1914
1915 let remote_buffers = self.remote_id().zip(remote_buffers);
1916 let client = self.client.clone();
1917
1918 cx.spawn(|this, mut cx| async move {
1919 let mut project_transaction = ProjectTransaction::default();
1920
1921 if let Some((project_id, remote_buffers)) = remote_buffers {
1922 let response = client
1923 .request(proto::FormatBuffers {
1924 project_id,
1925 buffer_ids: remote_buffers
1926 .iter()
1927 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1928 .collect(),
1929 })
1930 .await?
1931 .transaction
1932 .ok_or_else(|| anyhow!("missing transaction"))?;
1933 project_transaction = this
1934 .update(&mut cx, |this, cx| {
1935 this.deserialize_project_transaction(response, push_to_history, cx)
1936 })
1937 .await?;
1938 }
1939
1940 for (buffer, buffer_abs_path, language_server) in local_buffers {
1941 let text_document = lsp::TextDocumentIdentifier::new(
1942 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1943 );
1944 let capabilities = &language_server.capabilities();
1945 let lsp_edits = if capabilities
1946 .document_formatting_provider
1947 .as_ref()
1948 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1949 {
1950 language_server
1951 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1952 text_document,
1953 options: Default::default(),
1954 work_done_progress_params: Default::default(),
1955 })
1956 .await?
1957 } else if capabilities
1958 .document_range_formatting_provider
1959 .as_ref()
1960 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1961 {
1962 let buffer_start = lsp::Position::new(0, 0);
1963 let buffer_end = buffer
1964 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1965 .to_lsp_position();
1966 language_server
1967 .request::<lsp::request::RangeFormatting>(
1968 lsp::DocumentRangeFormattingParams {
1969 text_document,
1970 range: lsp::Range::new(buffer_start, buffer_end),
1971 options: Default::default(),
1972 work_done_progress_params: Default::default(),
1973 },
1974 )
1975 .await?
1976 } else {
1977 continue;
1978 };
1979
1980 if let Some(lsp_edits) = lsp_edits {
1981 let edits = this
1982 .update(&mut cx, |this, cx| {
1983 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1984 })
1985 .await?;
1986 buffer.update(&mut cx, |buffer, cx| {
1987 buffer.finalize_last_transaction();
1988 buffer.start_transaction();
1989 for (range, text) in edits {
1990 buffer.edit([range], text, cx);
1991 }
1992 if buffer.end_transaction(cx).is_some() {
1993 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1994 if !push_to_history {
1995 buffer.forget_transaction(transaction.id);
1996 }
1997 project_transaction.0.insert(cx.handle(), transaction);
1998 }
1999 });
2000 }
2001 }
2002
2003 Ok(project_transaction)
2004 })
2005 }
2006
2007 pub fn definition<T: ToPointUtf16>(
2008 &self,
2009 buffer: &ModelHandle<Buffer>,
2010 position: T,
2011 cx: &mut ModelContext<Self>,
2012 ) -> Task<Result<Vec<Location>>> {
2013 let position = position.to_point_utf16(buffer.read(cx));
2014 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2015 }
2016
2017 pub fn references<T: ToPointUtf16>(
2018 &self,
2019 buffer: &ModelHandle<Buffer>,
2020 position: T,
2021 cx: &mut ModelContext<Self>,
2022 ) -> Task<Result<Vec<Location>>> {
2023 let position = position.to_point_utf16(buffer.read(cx));
2024 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2025 }
2026
2027 pub fn document_highlights<T: ToPointUtf16>(
2028 &self,
2029 buffer: &ModelHandle<Buffer>,
2030 position: T,
2031 cx: &mut ModelContext<Self>,
2032 ) -> Task<Result<Vec<DocumentHighlight>>> {
2033 let position = position.to_point_utf16(buffer.read(cx));
2034
2035 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2036 }
2037
2038 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2039 if self.is_local() {
2040 let mut language_servers = HashMap::default();
2041 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
2042 if let Some((worktree, language)) = self
2043 .worktree_for_id(*worktree_id, cx)
2044 .and_then(|worktree| worktree.read(cx).as_local())
2045 .zip(self.languages.get_language(language_name))
2046 {
2047 language_servers
2048 .entry(Arc::as_ptr(language_server))
2049 .or_insert((
2050 language_server.clone(),
2051 *worktree_id,
2052 worktree.abs_path().clone(),
2053 language.clone(),
2054 ));
2055 }
2056 }
2057
2058 let mut requests = Vec::new();
2059 for (language_server, _, _, _) in language_servers.values() {
2060 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2061 lsp::WorkspaceSymbolParams {
2062 query: query.to_string(),
2063 ..Default::default()
2064 },
2065 ));
2066 }
2067
2068 cx.spawn_weak(|this, cx| async move {
2069 let responses = futures::future::try_join_all(requests).await?;
2070
2071 let mut symbols = Vec::new();
2072 if let Some(this) = this.upgrade(&cx) {
2073 this.read_with(&cx, |this, cx| {
2074 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2075 language_servers.into_values().zip(responses)
2076 {
2077 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2078 |lsp_symbol| {
2079 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2080 let mut worktree_id = source_worktree_id;
2081 let path;
2082 if let Some((worktree, rel_path)) =
2083 this.find_local_worktree(&abs_path, cx)
2084 {
2085 worktree_id = worktree.read(cx).id();
2086 path = rel_path;
2087 } else {
2088 path = relativize_path(&worktree_abs_path, &abs_path);
2089 }
2090
2091 let label = language
2092 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2093 .unwrap_or_else(|| {
2094 CodeLabel::plain(lsp_symbol.name.clone(), None)
2095 });
2096 let signature = this.symbol_signature(worktree_id, &path);
2097
2098 Some(Symbol {
2099 source_worktree_id,
2100 worktree_id,
2101 language_name: language.name().to_string(),
2102 name: lsp_symbol.name,
2103 kind: lsp_symbol.kind,
2104 label,
2105 path,
2106 range: range_from_lsp(lsp_symbol.location.range),
2107 signature,
2108 })
2109 },
2110 ));
2111 }
2112 })
2113 }
2114
2115 Ok(symbols)
2116 })
2117 } else if let Some(project_id) = self.remote_id() {
2118 let request = self.client.request(proto::GetProjectSymbols {
2119 project_id,
2120 query: query.to_string(),
2121 });
2122 cx.spawn_weak(|this, cx| async move {
2123 let response = request.await?;
2124 let mut symbols = Vec::new();
2125 if let Some(this) = this.upgrade(&cx) {
2126 this.read_with(&cx, |this, _| {
2127 symbols.extend(
2128 response
2129 .symbols
2130 .into_iter()
2131 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2132 );
2133 })
2134 }
2135 Ok(symbols)
2136 })
2137 } else {
2138 Task::ready(Ok(Default::default()))
2139 }
2140 }
2141
2142 pub fn open_buffer_for_symbol(
2143 &mut self,
2144 symbol: &Symbol,
2145 cx: &mut ModelContext<Self>,
2146 ) -> Task<Result<ModelHandle<Buffer>>> {
2147 if self.is_local() {
2148 let language_server = if let Some(server) = self.language_servers.get(&(
2149 symbol.source_worktree_id,
2150 Arc::from(symbol.language_name.as_str()),
2151 )) {
2152 server.clone()
2153 } else {
2154 return Task::ready(Err(anyhow!(
2155 "language server for worktree and language not found"
2156 )));
2157 };
2158
2159 let worktree_abs_path = if let Some(worktree_abs_path) = self
2160 .worktree_for_id(symbol.worktree_id, cx)
2161 .and_then(|worktree| worktree.read(cx).as_local())
2162 .map(|local_worktree| local_worktree.abs_path())
2163 {
2164 worktree_abs_path
2165 } else {
2166 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2167 };
2168 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2169 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2170 uri
2171 } else {
2172 return Task::ready(Err(anyhow!("invalid symbol path")));
2173 };
2174
2175 self.open_local_buffer_via_lsp(
2176 symbol_uri,
2177 Arc::from(symbol.language_name.as_str()),
2178 language_server,
2179 cx,
2180 )
2181 } else if let Some(project_id) = self.remote_id() {
2182 let request = self.client.request(proto::OpenBufferForSymbol {
2183 project_id,
2184 symbol: Some(serialize_symbol(symbol)),
2185 });
2186 cx.spawn(|this, mut cx| async move {
2187 let response = request.await?;
2188 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2189 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2190 .await
2191 })
2192 } else {
2193 Task::ready(Err(anyhow!("project does not have a remote id")))
2194 }
2195 }
2196
2197 pub fn completions<T: ToPointUtf16>(
2198 &self,
2199 source_buffer_handle: &ModelHandle<Buffer>,
2200 position: T,
2201 cx: &mut ModelContext<Self>,
2202 ) -> Task<Result<Vec<Completion>>> {
2203 let source_buffer_handle = source_buffer_handle.clone();
2204 let source_buffer = source_buffer_handle.read(cx);
2205 let buffer_id = source_buffer.remote_id();
2206 let language = source_buffer.language().cloned();
2207 let worktree;
2208 let buffer_abs_path;
2209 if let Some(file) = File::from_dyn(source_buffer.file()) {
2210 worktree = file.worktree.clone();
2211 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2212 } else {
2213 return Task::ready(Ok(Default::default()));
2214 };
2215
2216 let position = position.to_point_utf16(source_buffer);
2217 let anchor = source_buffer.anchor_after(position);
2218
2219 if worktree.read(cx).as_local().is_some() {
2220 let buffer_abs_path = buffer_abs_path.unwrap();
2221 let lang_server =
2222 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2223 server.clone()
2224 } else {
2225 return Task::ready(Ok(Default::default()));
2226 };
2227
2228 cx.spawn(|_, cx| async move {
2229 let completions = lang_server
2230 .request::<lsp::request::Completion>(lsp::CompletionParams {
2231 text_document_position: lsp::TextDocumentPositionParams::new(
2232 lsp::TextDocumentIdentifier::new(
2233 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2234 ),
2235 position.to_lsp_position(),
2236 ),
2237 context: Default::default(),
2238 work_done_progress_params: Default::default(),
2239 partial_result_params: Default::default(),
2240 })
2241 .await
2242 .context("lsp completion request failed")?;
2243
2244 let completions = if let Some(completions) = completions {
2245 match completions {
2246 lsp::CompletionResponse::Array(completions) => completions,
2247 lsp::CompletionResponse::List(list) => list.items,
2248 }
2249 } else {
2250 Default::default()
2251 };
2252
2253 source_buffer_handle.read_with(&cx, |this, _| {
2254 Ok(completions
2255 .into_iter()
2256 .filter_map(|lsp_completion| {
2257 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2258 lsp::CompletionTextEdit::Edit(edit) => {
2259 (range_from_lsp(edit.range), edit.new_text.clone())
2260 }
2261 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2262 log::info!("unsupported insert/replace completion");
2263 return None;
2264 }
2265 };
2266
2267 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2268 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2269 if clipped_start == old_range.start && clipped_end == old_range.end {
2270 Some(Completion {
2271 old_range: this.anchor_before(old_range.start)
2272 ..this.anchor_after(old_range.end),
2273 new_text,
2274 label: language
2275 .as_ref()
2276 .and_then(|l| l.label_for_completion(&lsp_completion))
2277 .unwrap_or_else(|| {
2278 CodeLabel::plain(
2279 lsp_completion.label.clone(),
2280 lsp_completion.filter_text.as_deref(),
2281 )
2282 }),
2283 lsp_completion,
2284 })
2285 } else {
2286 None
2287 }
2288 })
2289 .collect())
2290 })
2291 })
2292 } else if let Some(project_id) = self.remote_id() {
2293 let rpc = self.client.clone();
2294 let message = proto::GetCompletions {
2295 project_id,
2296 buffer_id,
2297 position: Some(language::proto::serialize_anchor(&anchor)),
2298 version: serialize_version(&source_buffer.version()),
2299 };
2300 cx.spawn_weak(|_, mut cx| async move {
2301 let response = rpc.request(message).await?;
2302
2303 source_buffer_handle
2304 .update(&mut cx, |buffer, _| {
2305 buffer.wait_for_version(deserialize_version(response.version))
2306 })
2307 .await;
2308
2309 response
2310 .completions
2311 .into_iter()
2312 .map(|completion| {
2313 language::proto::deserialize_completion(completion, language.as_ref())
2314 })
2315 .collect()
2316 })
2317 } else {
2318 Task::ready(Ok(Default::default()))
2319 }
2320 }
2321
2322 pub fn apply_additional_edits_for_completion(
2323 &self,
2324 buffer_handle: ModelHandle<Buffer>,
2325 completion: Completion,
2326 push_to_history: bool,
2327 cx: &mut ModelContext<Self>,
2328 ) -> Task<Result<Option<Transaction>>> {
2329 let buffer = buffer_handle.read(cx);
2330 let buffer_id = buffer.remote_id();
2331
2332 if self.is_local() {
2333 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2334 server.clone()
2335 } else {
2336 return Task::ready(Ok(Default::default()));
2337 };
2338
2339 cx.spawn(|this, mut cx| async move {
2340 let resolved_completion = lang_server
2341 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2342 .await?;
2343 if let Some(edits) = resolved_completion.additional_text_edits {
2344 let edits = this
2345 .update(&mut cx, |this, cx| {
2346 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2347 })
2348 .await?;
2349 buffer_handle.update(&mut cx, |buffer, cx| {
2350 buffer.finalize_last_transaction();
2351 buffer.start_transaction();
2352 for (range, text) in edits {
2353 buffer.edit([range], text, cx);
2354 }
2355 let transaction = if buffer.end_transaction(cx).is_some() {
2356 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2357 if !push_to_history {
2358 buffer.forget_transaction(transaction.id);
2359 }
2360 Some(transaction)
2361 } else {
2362 None
2363 };
2364 Ok(transaction)
2365 })
2366 } else {
2367 Ok(None)
2368 }
2369 })
2370 } else if let Some(project_id) = self.remote_id() {
2371 let client = self.client.clone();
2372 cx.spawn(|_, mut cx| async move {
2373 let response = client
2374 .request(proto::ApplyCompletionAdditionalEdits {
2375 project_id,
2376 buffer_id,
2377 completion: Some(language::proto::serialize_completion(&completion)),
2378 })
2379 .await?;
2380
2381 if let Some(transaction) = response.transaction {
2382 let transaction = language::proto::deserialize_transaction(transaction)?;
2383 buffer_handle
2384 .update(&mut cx, |buffer, _| {
2385 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2386 })
2387 .await;
2388 if push_to_history {
2389 buffer_handle.update(&mut cx, |buffer, _| {
2390 buffer.push_transaction(transaction.clone(), Instant::now());
2391 });
2392 }
2393 Ok(Some(transaction))
2394 } else {
2395 Ok(None)
2396 }
2397 })
2398 } else {
2399 Task::ready(Err(anyhow!("project does not have a remote id")))
2400 }
2401 }
2402
2403 pub fn code_actions<T: ToOffset>(
2404 &self,
2405 buffer_handle: &ModelHandle<Buffer>,
2406 range: Range<T>,
2407 cx: &mut ModelContext<Self>,
2408 ) -> Task<Result<Vec<CodeAction>>> {
2409 let buffer_handle = buffer_handle.clone();
2410 let buffer = buffer_handle.read(cx);
2411 let buffer_id = buffer.remote_id();
2412 let worktree;
2413 let buffer_abs_path;
2414 if let Some(file) = File::from_dyn(buffer.file()) {
2415 worktree = file.worktree.clone();
2416 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2417 } else {
2418 return Task::ready(Ok(Default::default()));
2419 };
2420 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2421
2422 if worktree.read(cx).as_local().is_some() {
2423 let buffer_abs_path = buffer_abs_path.unwrap();
2424 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2425 server.clone()
2426 } else {
2427 return Task::ready(Ok(Default::default()));
2428 };
2429
2430 let lsp_range = lsp::Range::new(
2431 range.start.to_point_utf16(buffer).to_lsp_position(),
2432 range.end.to_point_utf16(buffer).to_lsp_position(),
2433 );
2434 cx.foreground().spawn(async move {
2435 if !lang_server.capabilities().code_action_provider.is_some() {
2436 return Ok(Default::default());
2437 }
2438
2439 Ok(lang_server
2440 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2441 text_document: lsp::TextDocumentIdentifier::new(
2442 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2443 ),
2444 range: lsp_range,
2445 work_done_progress_params: Default::default(),
2446 partial_result_params: Default::default(),
2447 context: lsp::CodeActionContext {
2448 diagnostics: Default::default(),
2449 only: Some(vec![
2450 lsp::CodeActionKind::QUICKFIX,
2451 lsp::CodeActionKind::REFACTOR,
2452 lsp::CodeActionKind::REFACTOR_EXTRACT,
2453 ]),
2454 },
2455 })
2456 .await?
2457 .unwrap_or_default()
2458 .into_iter()
2459 .filter_map(|entry| {
2460 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2461 Some(CodeAction {
2462 range: range.clone(),
2463 lsp_action,
2464 })
2465 } else {
2466 None
2467 }
2468 })
2469 .collect())
2470 })
2471 } else if let Some(project_id) = self.remote_id() {
2472 let rpc = self.client.clone();
2473 let version = buffer.version();
2474 cx.spawn_weak(|_, mut cx| async move {
2475 let response = rpc
2476 .request(proto::GetCodeActions {
2477 project_id,
2478 buffer_id,
2479 start: Some(language::proto::serialize_anchor(&range.start)),
2480 end: Some(language::proto::serialize_anchor(&range.end)),
2481 version: serialize_version(&version),
2482 })
2483 .await?;
2484
2485 buffer_handle
2486 .update(&mut cx, |buffer, _| {
2487 buffer.wait_for_version(deserialize_version(response.version))
2488 })
2489 .await;
2490
2491 response
2492 .actions
2493 .into_iter()
2494 .map(language::proto::deserialize_code_action)
2495 .collect()
2496 })
2497 } else {
2498 Task::ready(Ok(Default::default()))
2499 }
2500 }
2501
2502 pub fn apply_code_action(
2503 &self,
2504 buffer_handle: ModelHandle<Buffer>,
2505 mut action: CodeAction,
2506 push_to_history: bool,
2507 cx: &mut ModelContext<Self>,
2508 ) -> Task<Result<ProjectTransaction>> {
2509 if self.is_local() {
2510 let buffer = buffer_handle.read(cx);
2511 let lang_name = if let Some(lang) = buffer.language() {
2512 lang.name()
2513 } else {
2514 return Task::ready(Ok(Default::default()));
2515 };
2516 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2517 server.clone()
2518 } else {
2519 return Task::ready(Ok(Default::default()));
2520 };
2521 let range = action.range.to_point_utf16(buffer);
2522
2523 cx.spawn(|this, mut cx| async move {
2524 if let Some(lsp_range) = action
2525 .lsp_action
2526 .data
2527 .as_mut()
2528 .and_then(|d| d.get_mut("codeActionParams"))
2529 .and_then(|d| d.get_mut("range"))
2530 {
2531 *lsp_range = serde_json::to_value(&lsp::Range::new(
2532 range.start.to_lsp_position(),
2533 range.end.to_lsp_position(),
2534 ))
2535 .unwrap();
2536 action.lsp_action = lang_server
2537 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2538 .await?;
2539 } else {
2540 let actions = this
2541 .update(&mut cx, |this, cx| {
2542 this.code_actions(&buffer_handle, action.range, cx)
2543 })
2544 .await?;
2545 action.lsp_action = actions
2546 .into_iter()
2547 .find(|a| a.lsp_action.title == action.lsp_action.title)
2548 .ok_or_else(|| anyhow!("code action is outdated"))?
2549 .lsp_action;
2550 }
2551
2552 if let Some(edit) = action.lsp_action.edit {
2553 Self::deserialize_workspace_edit(
2554 this,
2555 edit,
2556 push_to_history,
2557 lang_name,
2558 lang_server,
2559 &mut cx,
2560 )
2561 .await
2562 } else {
2563 Ok(ProjectTransaction::default())
2564 }
2565 })
2566 } else if let Some(project_id) = self.remote_id() {
2567 let client = self.client.clone();
2568 let request = proto::ApplyCodeAction {
2569 project_id,
2570 buffer_id: buffer_handle.read(cx).remote_id(),
2571 action: Some(language::proto::serialize_code_action(&action)),
2572 };
2573 cx.spawn(|this, mut cx| async move {
2574 let response = client
2575 .request(request)
2576 .await?
2577 .transaction
2578 .ok_or_else(|| anyhow!("missing transaction"))?;
2579 this.update(&mut cx, |this, cx| {
2580 this.deserialize_project_transaction(response, push_to_history, cx)
2581 })
2582 .await
2583 })
2584 } else {
2585 Task::ready(Err(anyhow!("project does not have a remote id")))
2586 }
2587 }
2588
2589 async fn deserialize_workspace_edit(
2590 this: ModelHandle<Self>,
2591 edit: lsp::WorkspaceEdit,
2592 push_to_history: bool,
2593 language_name: Arc<str>,
2594 language_server: Arc<LanguageServer>,
2595 cx: &mut AsyncAppContext,
2596 ) -> Result<ProjectTransaction> {
2597 let fs = this.read_with(cx, |this, _| this.fs.clone());
2598 let mut operations = Vec::new();
2599 if let Some(document_changes) = edit.document_changes {
2600 match document_changes {
2601 lsp::DocumentChanges::Edits(edits) => {
2602 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2603 }
2604 lsp::DocumentChanges::Operations(ops) => operations = ops,
2605 }
2606 } else if let Some(changes) = edit.changes {
2607 operations.extend(changes.into_iter().map(|(uri, edits)| {
2608 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2609 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2610 uri,
2611 version: None,
2612 },
2613 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2614 })
2615 }));
2616 }
2617
2618 let mut project_transaction = ProjectTransaction::default();
2619 for operation in operations {
2620 match operation {
2621 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2622 let abs_path = op
2623 .uri
2624 .to_file_path()
2625 .map_err(|_| anyhow!("can't convert URI to path"))?;
2626
2627 if let Some(parent_path) = abs_path.parent() {
2628 fs.create_dir(parent_path).await?;
2629 }
2630 if abs_path.ends_with("/") {
2631 fs.create_dir(&abs_path).await?;
2632 } else {
2633 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2634 .await?;
2635 }
2636 }
2637 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2638 let source_abs_path = op
2639 .old_uri
2640 .to_file_path()
2641 .map_err(|_| anyhow!("can't convert URI to path"))?;
2642 let target_abs_path = op
2643 .new_uri
2644 .to_file_path()
2645 .map_err(|_| anyhow!("can't convert URI to path"))?;
2646 fs.rename(
2647 &source_abs_path,
2648 &target_abs_path,
2649 op.options.map(Into::into).unwrap_or_default(),
2650 )
2651 .await?;
2652 }
2653 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2654 let abs_path = op
2655 .uri
2656 .to_file_path()
2657 .map_err(|_| anyhow!("can't convert URI to path"))?;
2658 let options = op.options.map(Into::into).unwrap_or_default();
2659 if abs_path.ends_with("/") {
2660 fs.remove_dir(&abs_path, options).await?;
2661 } else {
2662 fs.remove_file(&abs_path, options).await?;
2663 }
2664 }
2665 lsp::DocumentChangeOperation::Edit(op) => {
2666 let buffer_to_edit = this
2667 .update(cx, |this, cx| {
2668 this.open_local_buffer_via_lsp(
2669 op.text_document.uri,
2670 language_name.clone(),
2671 language_server.clone(),
2672 cx,
2673 )
2674 })
2675 .await?;
2676
2677 let edits = this
2678 .update(cx, |this, cx| {
2679 let edits = op.edits.into_iter().map(|edit| match edit {
2680 lsp::OneOf::Left(edit) => edit,
2681 lsp::OneOf::Right(edit) => edit.text_edit,
2682 });
2683 this.edits_from_lsp(
2684 &buffer_to_edit,
2685 edits,
2686 op.text_document.version,
2687 cx,
2688 )
2689 })
2690 .await?;
2691
2692 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2693 buffer.finalize_last_transaction();
2694 buffer.start_transaction();
2695 for (range, text) in edits {
2696 buffer.edit([range], text, cx);
2697 }
2698 let transaction = if buffer.end_transaction(cx).is_some() {
2699 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2700 if !push_to_history {
2701 buffer.forget_transaction(transaction.id);
2702 }
2703 Some(transaction)
2704 } else {
2705 None
2706 };
2707
2708 transaction
2709 });
2710 if let Some(transaction) = transaction {
2711 project_transaction.0.insert(buffer_to_edit, transaction);
2712 }
2713 }
2714 }
2715 }
2716
2717 Ok(project_transaction)
2718 }
2719
2720 pub fn prepare_rename<T: ToPointUtf16>(
2721 &self,
2722 buffer: ModelHandle<Buffer>,
2723 position: T,
2724 cx: &mut ModelContext<Self>,
2725 ) -> Task<Result<Option<Range<Anchor>>>> {
2726 let position = position.to_point_utf16(buffer.read(cx));
2727 self.request_lsp(buffer, PrepareRename { position }, cx)
2728 }
2729
2730 pub fn perform_rename<T: ToPointUtf16>(
2731 &self,
2732 buffer: ModelHandle<Buffer>,
2733 position: T,
2734 new_name: String,
2735 push_to_history: bool,
2736 cx: &mut ModelContext<Self>,
2737 ) -> Task<Result<ProjectTransaction>> {
2738 let position = position.to_point_utf16(buffer.read(cx));
2739 self.request_lsp(
2740 buffer,
2741 PerformRename {
2742 position,
2743 new_name,
2744 push_to_history,
2745 },
2746 cx,
2747 )
2748 }
2749
2750 pub fn search(
2751 &self,
2752 query: SearchQuery,
2753 cx: &mut ModelContext<Self>,
2754 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2755 if self.is_local() {
2756 let snapshots = self
2757 .visible_worktrees(cx)
2758 .filter_map(|tree| {
2759 let tree = tree.read(cx).as_local()?;
2760 Some(tree.snapshot())
2761 })
2762 .collect::<Vec<_>>();
2763
2764 let background = cx.background().clone();
2765 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2766 if path_count == 0 {
2767 return Task::ready(Ok(Default::default()));
2768 }
2769 let workers = background.num_cpus().min(path_count);
2770 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2771 cx.background()
2772 .spawn({
2773 let fs = self.fs.clone();
2774 let background = cx.background().clone();
2775 let query = query.clone();
2776 async move {
2777 let fs = &fs;
2778 let query = &query;
2779 let matching_paths_tx = &matching_paths_tx;
2780 let paths_per_worker = (path_count + workers - 1) / workers;
2781 let snapshots = &snapshots;
2782 background
2783 .scoped(|scope| {
2784 for worker_ix in 0..workers {
2785 let worker_start_ix = worker_ix * paths_per_worker;
2786 let worker_end_ix = worker_start_ix + paths_per_worker;
2787 scope.spawn(async move {
2788 let mut snapshot_start_ix = 0;
2789 let mut abs_path = PathBuf::new();
2790 for snapshot in snapshots {
2791 let snapshot_end_ix =
2792 snapshot_start_ix + snapshot.visible_file_count();
2793 if worker_end_ix <= snapshot_start_ix {
2794 break;
2795 } else if worker_start_ix > snapshot_end_ix {
2796 snapshot_start_ix = snapshot_end_ix;
2797 continue;
2798 } else {
2799 let start_in_snapshot = worker_start_ix
2800 .saturating_sub(snapshot_start_ix);
2801 let end_in_snapshot =
2802 cmp::min(worker_end_ix, snapshot_end_ix)
2803 - snapshot_start_ix;
2804
2805 for entry in snapshot
2806 .files(false, start_in_snapshot)
2807 .take(end_in_snapshot - start_in_snapshot)
2808 {
2809 if matching_paths_tx.is_closed() {
2810 break;
2811 }
2812
2813 abs_path.clear();
2814 abs_path.push(&snapshot.abs_path());
2815 abs_path.push(&entry.path);
2816 let matches = if let Some(file) =
2817 fs.open_sync(&abs_path).await.log_err()
2818 {
2819 query.detect(file).unwrap_or(false)
2820 } else {
2821 false
2822 };
2823
2824 if matches {
2825 let project_path =
2826 (snapshot.id(), entry.path.clone());
2827 if matching_paths_tx
2828 .send(project_path)
2829 .await
2830 .is_err()
2831 {
2832 break;
2833 }
2834 }
2835 }
2836
2837 snapshot_start_ix = snapshot_end_ix;
2838 }
2839 }
2840 });
2841 }
2842 })
2843 .await;
2844 }
2845 })
2846 .detach();
2847
2848 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2849 let open_buffers = self
2850 .opened_buffers
2851 .values()
2852 .filter_map(|b| b.upgrade(cx))
2853 .collect::<HashSet<_>>();
2854 cx.spawn(|this, cx| async move {
2855 for buffer in &open_buffers {
2856 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2857 buffers_tx.send((buffer.clone(), snapshot)).await?;
2858 }
2859
2860 let open_buffers = Rc::new(RefCell::new(open_buffers));
2861 while let Some(project_path) = matching_paths_rx.next().await {
2862 if buffers_tx.is_closed() {
2863 break;
2864 }
2865
2866 let this = this.clone();
2867 let open_buffers = open_buffers.clone();
2868 let buffers_tx = buffers_tx.clone();
2869 cx.spawn(|mut cx| async move {
2870 if let Some(buffer) = this
2871 .update(&mut cx, |this, cx| {
2872 this.open_buffer_for_path(project_path, cx)
2873 })
2874 .await
2875 .log_err()
2876 {
2877 if open_buffers.borrow_mut().insert(buffer.clone()) {
2878 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2879 buffers_tx.send((buffer, snapshot)).await?;
2880 }
2881 }
2882
2883 Ok::<_, anyhow::Error>(())
2884 })
2885 .detach();
2886 }
2887
2888 Ok::<_, anyhow::Error>(())
2889 })
2890 .detach_and_log_err(cx);
2891
2892 let background = cx.background().clone();
2893 cx.background().spawn(async move {
2894 let query = &query;
2895 let mut matched_buffers = Vec::new();
2896 for _ in 0..workers {
2897 matched_buffers.push(HashMap::default());
2898 }
2899 background
2900 .scoped(|scope| {
2901 for worker_matched_buffers in matched_buffers.iter_mut() {
2902 let mut buffers_rx = buffers_rx.clone();
2903 scope.spawn(async move {
2904 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2905 let buffer_matches = query
2906 .search(snapshot.as_rope())
2907 .await
2908 .iter()
2909 .map(|range| {
2910 snapshot.anchor_before(range.start)
2911 ..snapshot.anchor_after(range.end)
2912 })
2913 .collect::<Vec<_>>();
2914 if !buffer_matches.is_empty() {
2915 worker_matched_buffers
2916 .insert(buffer.clone(), buffer_matches);
2917 }
2918 }
2919 });
2920 }
2921 })
2922 .await;
2923 Ok(matched_buffers.into_iter().flatten().collect())
2924 })
2925 } else if let Some(project_id) = self.remote_id() {
2926 let request = self.client.request(query.to_proto(project_id));
2927 cx.spawn(|this, mut cx| async move {
2928 let response = request.await?;
2929 let mut result = HashMap::default();
2930 for location in response.locations {
2931 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2932 let target_buffer = this
2933 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2934 .await?;
2935 let start = location
2936 .start
2937 .and_then(deserialize_anchor)
2938 .ok_or_else(|| anyhow!("missing target start"))?;
2939 let end = location
2940 .end
2941 .and_then(deserialize_anchor)
2942 .ok_or_else(|| anyhow!("missing target end"))?;
2943 result
2944 .entry(target_buffer)
2945 .or_insert(Vec::new())
2946 .push(start..end)
2947 }
2948 Ok(result)
2949 })
2950 } else {
2951 Task::ready(Ok(Default::default()))
2952 }
2953 }
2954
2955 fn request_lsp<R: LspCommand>(
2956 &self,
2957 buffer_handle: ModelHandle<Buffer>,
2958 request: R,
2959 cx: &mut ModelContext<Self>,
2960 ) -> Task<Result<R::Response>>
2961 where
2962 <R::LspRequest as lsp::request::Request>::Result: Send,
2963 {
2964 let buffer = buffer_handle.read(cx);
2965 if self.is_local() {
2966 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2967 if let Some((file, language_server)) =
2968 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2969 {
2970 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2971 return cx.spawn(|this, cx| async move {
2972 if !request.check_capabilities(&language_server.capabilities()) {
2973 return Ok(Default::default());
2974 }
2975
2976 let response = language_server
2977 .request::<R::LspRequest>(lsp_params)
2978 .await
2979 .context("lsp request failed")?;
2980 request
2981 .response_from_lsp(response, this, buffer_handle, cx)
2982 .await
2983 });
2984 }
2985 } else if let Some(project_id) = self.remote_id() {
2986 let rpc = self.client.clone();
2987 let message = request.to_proto(project_id, buffer);
2988 return cx.spawn(|this, cx| async move {
2989 let response = rpc.request(message).await?;
2990 request
2991 .response_from_proto(response, this, buffer_handle, cx)
2992 .await
2993 });
2994 }
2995 Task::ready(Ok(Default::default()))
2996 }
2997
2998 pub fn find_or_create_local_worktree(
2999 &mut self,
3000 abs_path: impl AsRef<Path>,
3001 visible: bool,
3002 cx: &mut ModelContext<Self>,
3003 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3004 let abs_path = abs_path.as_ref();
3005 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3006 Task::ready(Ok((tree.clone(), relative_path.into())))
3007 } else {
3008 let worktree = self.create_local_worktree(abs_path, visible, cx);
3009 cx.foreground()
3010 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3011 }
3012 }
3013
3014 pub fn find_local_worktree(
3015 &self,
3016 abs_path: &Path,
3017 cx: &AppContext,
3018 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3019 for tree in self.worktrees(cx) {
3020 if let Some(relative_path) = tree
3021 .read(cx)
3022 .as_local()
3023 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3024 {
3025 return Some((tree.clone(), relative_path.into()));
3026 }
3027 }
3028 None
3029 }
3030
3031 pub fn is_shared(&self) -> bool {
3032 match &self.client_state {
3033 ProjectClientState::Local { is_shared, .. } => *is_shared,
3034 ProjectClientState::Remote { .. } => false,
3035 }
3036 }
3037
3038 fn create_local_worktree(
3039 &mut self,
3040 abs_path: impl AsRef<Path>,
3041 visible: bool,
3042 cx: &mut ModelContext<Self>,
3043 ) -> Task<Result<ModelHandle<Worktree>>> {
3044 let fs = self.fs.clone();
3045 let client = self.client.clone();
3046 let next_entry_id = self.next_entry_id.clone();
3047 let path: Arc<Path> = abs_path.as_ref().into();
3048 let task = self
3049 .loading_local_worktrees
3050 .entry(path.clone())
3051 .or_insert_with(|| {
3052 cx.spawn(|project, mut cx| {
3053 async move {
3054 let worktree = Worktree::local(
3055 client.clone(),
3056 path.clone(),
3057 visible,
3058 fs,
3059 next_entry_id,
3060 &mut cx,
3061 )
3062 .await;
3063 project.update(&mut cx, |project, _| {
3064 project.loading_local_worktrees.remove(&path);
3065 });
3066 let worktree = worktree?;
3067
3068 let (remote_project_id, is_shared) =
3069 project.update(&mut cx, |project, cx| {
3070 project.add_worktree(&worktree, cx);
3071 (project.remote_id(), project.is_shared())
3072 });
3073
3074 if let Some(project_id) = remote_project_id {
3075 if is_shared {
3076 worktree
3077 .update(&mut cx, |worktree, cx| {
3078 worktree.as_local_mut().unwrap().share(project_id, cx)
3079 })
3080 .await?;
3081 } else {
3082 worktree
3083 .update(&mut cx, |worktree, cx| {
3084 worktree.as_local_mut().unwrap().register(project_id, cx)
3085 })
3086 .await?;
3087 }
3088 }
3089
3090 Ok(worktree)
3091 }
3092 .map_err(|err| Arc::new(err))
3093 })
3094 .shared()
3095 })
3096 .clone();
3097 cx.foreground().spawn(async move {
3098 match task.await {
3099 Ok(worktree) => Ok(worktree),
3100 Err(err) => Err(anyhow!("{}", err)),
3101 }
3102 })
3103 }
3104
3105 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3106 self.worktrees.retain(|worktree| {
3107 worktree
3108 .upgrade(cx)
3109 .map_or(false, |w| w.read(cx).id() != id)
3110 });
3111 cx.notify();
3112 }
3113
3114 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3115 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3116 if worktree.read(cx).is_local() {
3117 cx.subscribe(&worktree, |this, worktree, _, cx| {
3118 this.update_local_worktree_buffers(worktree, cx);
3119 })
3120 .detach();
3121 }
3122
3123 let push_strong_handle = {
3124 let worktree = worktree.read(cx);
3125 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3126 };
3127 if push_strong_handle {
3128 self.worktrees
3129 .push(WorktreeHandle::Strong(worktree.clone()));
3130 } else {
3131 cx.observe_release(&worktree, |this, _, cx| {
3132 this.worktrees
3133 .retain(|worktree| worktree.upgrade(cx).is_some());
3134 cx.notify();
3135 })
3136 .detach();
3137 self.worktrees
3138 .push(WorktreeHandle::Weak(worktree.downgrade()));
3139 }
3140 cx.notify();
3141 }
3142
3143 fn update_local_worktree_buffers(
3144 &mut self,
3145 worktree_handle: ModelHandle<Worktree>,
3146 cx: &mut ModelContext<Self>,
3147 ) {
3148 let snapshot = worktree_handle.read(cx).snapshot();
3149 let mut buffers_to_delete = Vec::new();
3150 for (buffer_id, buffer) in &self.opened_buffers {
3151 if let Some(buffer) = buffer.upgrade(cx) {
3152 buffer.update(cx, |buffer, cx| {
3153 if let Some(old_file) = File::from_dyn(buffer.file()) {
3154 if old_file.worktree != worktree_handle {
3155 return;
3156 }
3157
3158 let new_file = if let Some(entry) = old_file
3159 .entry_id
3160 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3161 {
3162 File {
3163 is_local: true,
3164 entry_id: Some(entry.id),
3165 mtime: entry.mtime,
3166 path: entry.path.clone(),
3167 worktree: worktree_handle.clone(),
3168 }
3169 } else if let Some(entry) =
3170 snapshot.entry_for_path(old_file.path().as_ref())
3171 {
3172 File {
3173 is_local: true,
3174 entry_id: Some(entry.id),
3175 mtime: entry.mtime,
3176 path: entry.path.clone(),
3177 worktree: worktree_handle.clone(),
3178 }
3179 } else {
3180 File {
3181 is_local: true,
3182 entry_id: None,
3183 path: old_file.path().clone(),
3184 mtime: old_file.mtime(),
3185 worktree: worktree_handle.clone(),
3186 }
3187 };
3188
3189 if let Some(project_id) = self.remote_id() {
3190 self.client
3191 .send(proto::UpdateBufferFile {
3192 project_id,
3193 buffer_id: *buffer_id as u64,
3194 file: Some(new_file.to_proto()),
3195 })
3196 .log_err();
3197 }
3198 buffer.file_updated(Box::new(new_file), cx).detach();
3199 }
3200 });
3201 } else {
3202 buffers_to_delete.push(*buffer_id);
3203 }
3204 }
3205
3206 for buffer_id in buffers_to_delete {
3207 self.opened_buffers.remove(&buffer_id);
3208 }
3209 }
3210
3211 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3212 let new_active_entry = entry.and_then(|project_path| {
3213 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3214 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3215 Some(entry.id)
3216 });
3217 if new_active_entry != self.active_entry {
3218 self.active_entry = new_active_entry;
3219 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3220 }
3221 }
3222
3223 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3224 self.language_servers_with_diagnostics_running > 0
3225 }
3226
3227 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3228 let mut summary = DiagnosticSummary::default();
3229 for (_, path_summary) in self.diagnostic_summaries(cx) {
3230 summary.error_count += path_summary.error_count;
3231 summary.warning_count += path_summary.warning_count;
3232 summary.info_count += path_summary.info_count;
3233 summary.hint_count += path_summary.hint_count;
3234 }
3235 summary
3236 }
3237
3238 pub fn diagnostic_summaries<'a>(
3239 &'a self,
3240 cx: &'a AppContext,
3241 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3242 self.worktrees(cx).flat_map(move |worktree| {
3243 let worktree = worktree.read(cx);
3244 let worktree_id = worktree.id();
3245 worktree
3246 .diagnostic_summaries()
3247 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3248 })
3249 }
3250
3251 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3252 self.language_servers_with_diagnostics_running += 1;
3253 if self.language_servers_with_diagnostics_running == 1 {
3254 cx.emit(Event::DiskBasedDiagnosticsStarted);
3255 }
3256 }
3257
3258 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3259 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3260 self.language_servers_with_diagnostics_running -= 1;
3261 if self.language_servers_with_diagnostics_running == 0 {
3262 cx.emit(Event::DiskBasedDiagnosticsFinished);
3263 }
3264 }
3265
3266 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3267 self.active_entry
3268 }
3269
3270 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3271 self.worktree_for_id(path.worktree_id, cx)?
3272 .read(cx)
3273 .entry_for_path(&path.path)
3274 .map(|entry| entry.id)
3275 }
3276
3277 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3278 let worktree = self.worktree_for_entry(entry_id, cx)?;
3279 let worktree = worktree.read(cx);
3280 let worktree_id = worktree.id();
3281 let path = worktree.entry_for_id(entry_id)?.path.clone();
3282 Some(ProjectPath { worktree_id, path })
3283 }
3284
3285 // RPC message handlers
3286
3287 async fn handle_unshare_project(
3288 this: ModelHandle<Self>,
3289 _: TypedEnvelope<proto::UnshareProject>,
3290 _: Arc<Client>,
3291 mut cx: AsyncAppContext,
3292 ) -> Result<()> {
3293 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3294 Ok(())
3295 }
3296
3297 async fn handle_add_collaborator(
3298 this: ModelHandle<Self>,
3299 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3300 _: Arc<Client>,
3301 mut cx: AsyncAppContext,
3302 ) -> Result<()> {
3303 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3304 let collaborator = envelope
3305 .payload
3306 .collaborator
3307 .take()
3308 .ok_or_else(|| anyhow!("empty collaborator"))?;
3309
3310 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3311 this.update(&mut cx, |this, cx| {
3312 this.collaborators
3313 .insert(collaborator.peer_id, collaborator);
3314 cx.notify();
3315 });
3316
3317 Ok(())
3318 }
3319
3320 async fn handle_remove_collaborator(
3321 this: ModelHandle<Self>,
3322 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3323 _: Arc<Client>,
3324 mut cx: AsyncAppContext,
3325 ) -> Result<()> {
3326 this.update(&mut cx, |this, cx| {
3327 let peer_id = PeerId(envelope.payload.peer_id);
3328 let replica_id = this
3329 .collaborators
3330 .remove(&peer_id)
3331 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3332 .replica_id;
3333 for (_, buffer) in &this.opened_buffers {
3334 if let Some(buffer) = buffer.upgrade(cx) {
3335 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3336 }
3337 }
3338 cx.notify();
3339 Ok(())
3340 })
3341 }
3342
3343 async fn handle_register_worktree(
3344 this: ModelHandle<Self>,
3345 envelope: TypedEnvelope<proto::RegisterWorktree>,
3346 client: Arc<Client>,
3347 mut cx: AsyncAppContext,
3348 ) -> Result<()> {
3349 this.update(&mut cx, |this, cx| {
3350 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3351 let replica_id = this.replica_id();
3352 let worktree = proto::Worktree {
3353 id: envelope.payload.worktree_id,
3354 root_name: envelope.payload.root_name,
3355 entries: Default::default(),
3356 diagnostic_summaries: Default::default(),
3357 visible: envelope.payload.visible,
3358 };
3359 let (worktree, load_task) =
3360 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3361 this.add_worktree(&worktree, cx);
3362 load_task.detach();
3363 Ok(())
3364 })
3365 }
3366
3367 async fn handle_unregister_worktree(
3368 this: ModelHandle<Self>,
3369 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3370 _: Arc<Client>,
3371 mut cx: AsyncAppContext,
3372 ) -> Result<()> {
3373 this.update(&mut cx, |this, cx| {
3374 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3375 this.remove_worktree(worktree_id, cx);
3376 Ok(())
3377 })
3378 }
3379
3380 async fn handle_update_worktree(
3381 this: ModelHandle<Self>,
3382 envelope: TypedEnvelope<proto::UpdateWorktree>,
3383 _: Arc<Client>,
3384 mut cx: AsyncAppContext,
3385 ) -> Result<()> {
3386 this.update(&mut cx, |this, cx| {
3387 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3388 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3389 worktree.update(cx, |worktree, _| {
3390 let worktree = worktree.as_remote_mut().unwrap();
3391 worktree.update_from_remote(envelope)
3392 })?;
3393 }
3394 Ok(())
3395 })
3396 }
3397
3398 async fn handle_update_diagnostic_summary(
3399 this: ModelHandle<Self>,
3400 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3401 _: Arc<Client>,
3402 mut cx: AsyncAppContext,
3403 ) -> Result<()> {
3404 this.update(&mut cx, |this, cx| {
3405 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3406 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3407 if let Some(summary) = envelope.payload.summary {
3408 let project_path = ProjectPath {
3409 worktree_id,
3410 path: Path::new(&summary.path).into(),
3411 };
3412 worktree.update(cx, |worktree, _| {
3413 worktree
3414 .as_remote_mut()
3415 .unwrap()
3416 .update_diagnostic_summary(project_path.path.clone(), &summary);
3417 });
3418 cx.emit(Event::DiagnosticsUpdated(project_path));
3419 }
3420 }
3421 Ok(())
3422 })
3423 }
3424
3425 async fn handle_start_language_server(
3426 this: ModelHandle<Self>,
3427 envelope: TypedEnvelope<proto::StartLanguageServer>,
3428 _: Arc<Client>,
3429 mut cx: AsyncAppContext,
3430 ) -> Result<()> {
3431 let server = envelope
3432 .payload
3433 .server
3434 .ok_or_else(|| anyhow!("invalid server"))?;
3435 this.update(&mut cx, |this, cx| {
3436 this.language_server_statuses.insert(
3437 server.id as usize,
3438 LanguageServerStatus {
3439 name: server.name,
3440 pending_work: Default::default(),
3441 pending_diagnostic_updates: 0,
3442 },
3443 );
3444 cx.notify();
3445 });
3446 Ok(())
3447 }
3448
3449 async fn handle_update_language_server(
3450 this: ModelHandle<Self>,
3451 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3452 _: Arc<Client>,
3453 mut cx: AsyncAppContext,
3454 ) -> Result<()> {
3455 let language_server_id = envelope.payload.language_server_id as usize;
3456 match envelope
3457 .payload
3458 .variant
3459 .ok_or_else(|| anyhow!("invalid variant"))?
3460 {
3461 proto::update_language_server::Variant::WorkStart(payload) => {
3462 this.update(&mut cx, |this, cx| {
3463 this.on_lsp_work_start(language_server_id, payload.token, cx);
3464 })
3465 }
3466 proto::update_language_server::Variant::WorkProgress(payload) => {
3467 this.update(&mut cx, |this, cx| {
3468 this.on_lsp_work_progress(
3469 language_server_id,
3470 payload.token,
3471 LanguageServerProgress {
3472 message: payload.message,
3473 percentage: payload.percentage.map(|p| p as usize),
3474 last_update_at: Instant::now(),
3475 },
3476 cx,
3477 );
3478 })
3479 }
3480 proto::update_language_server::Variant::WorkEnd(payload) => {
3481 this.update(&mut cx, |this, cx| {
3482 this.on_lsp_work_end(language_server_id, payload.token, cx);
3483 })
3484 }
3485 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3486 this.update(&mut cx, |this, cx| {
3487 this.disk_based_diagnostics_started(cx);
3488 })
3489 }
3490 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3491 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3492 }
3493 }
3494
3495 Ok(())
3496 }
3497
3498 async fn handle_update_buffer(
3499 this: ModelHandle<Self>,
3500 envelope: TypedEnvelope<proto::UpdateBuffer>,
3501 _: Arc<Client>,
3502 mut cx: AsyncAppContext,
3503 ) -> Result<()> {
3504 this.update(&mut cx, |this, cx| {
3505 let payload = envelope.payload.clone();
3506 let buffer_id = payload.buffer_id;
3507 let ops = payload
3508 .operations
3509 .into_iter()
3510 .map(|op| language::proto::deserialize_operation(op))
3511 .collect::<Result<Vec<_>, _>>()?;
3512 match this.opened_buffers.entry(buffer_id) {
3513 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3514 OpenBuffer::Strong(buffer) => {
3515 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3516 }
3517 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3518 OpenBuffer::Weak(_) => {}
3519 },
3520 hash_map::Entry::Vacant(e) => {
3521 e.insert(OpenBuffer::Loading(ops));
3522 }
3523 }
3524 Ok(())
3525 })
3526 }
3527
3528 async fn handle_update_buffer_file(
3529 this: ModelHandle<Self>,
3530 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3531 _: Arc<Client>,
3532 mut cx: AsyncAppContext,
3533 ) -> Result<()> {
3534 this.update(&mut cx, |this, cx| {
3535 let payload = envelope.payload.clone();
3536 let buffer_id = payload.buffer_id;
3537 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3538 let worktree = this
3539 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3540 .ok_or_else(|| anyhow!("no such worktree"))?;
3541 let file = File::from_proto(file, worktree.clone(), cx)?;
3542 let buffer = this
3543 .opened_buffers
3544 .get_mut(&buffer_id)
3545 .and_then(|b| b.upgrade(cx))
3546 .ok_or_else(|| anyhow!("no such buffer"))?;
3547 buffer.update(cx, |buffer, cx| {
3548 buffer.file_updated(Box::new(file), cx).detach();
3549 });
3550 Ok(())
3551 })
3552 }
3553
3554 async fn handle_save_buffer(
3555 this: ModelHandle<Self>,
3556 envelope: TypedEnvelope<proto::SaveBuffer>,
3557 _: Arc<Client>,
3558 mut cx: AsyncAppContext,
3559 ) -> Result<proto::BufferSaved> {
3560 let buffer_id = envelope.payload.buffer_id;
3561 let requested_version = deserialize_version(envelope.payload.version);
3562
3563 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3564 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3565 let buffer = this
3566 .opened_buffers
3567 .get(&buffer_id)
3568 .map(|buffer| buffer.upgrade(cx).unwrap())
3569 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3570 Ok::<_, anyhow::Error>((project_id, buffer))
3571 })?;
3572 buffer
3573 .update(&mut cx, |buffer, _| {
3574 buffer.wait_for_version(requested_version)
3575 })
3576 .await;
3577
3578 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3579 Ok(proto::BufferSaved {
3580 project_id,
3581 buffer_id,
3582 version: serialize_version(&saved_version),
3583 mtime: Some(mtime.into()),
3584 })
3585 }
3586
3587 async fn handle_format_buffers(
3588 this: ModelHandle<Self>,
3589 envelope: TypedEnvelope<proto::FormatBuffers>,
3590 _: Arc<Client>,
3591 mut cx: AsyncAppContext,
3592 ) -> Result<proto::FormatBuffersResponse> {
3593 let sender_id = envelope.original_sender_id()?;
3594 let format = this.update(&mut cx, |this, cx| {
3595 let mut buffers = HashSet::default();
3596 for buffer_id in &envelope.payload.buffer_ids {
3597 buffers.insert(
3598 this.opened_buffers
3599 .get(buffer_id)
3600 .map(|buffer| buffer.upgrade(cx).unwrap())
3601 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3602 );
3603 }
3604 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3605 })?;
3606
3607 let project_transaction = format.await?;
3608 let project_transaction = this.update(&mut cx, |this, cx| {
3609 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3610 });
3611 Ok(proto::FormatBuffersResponse {
3612 transaction: Some(project_transaction),
3613 })
3614 }
3615
3616 async fn handle_get_completions(
3617 this: ModelHandle<Self>,
3618 envelope: TypedEnvelope<proto::GetCompletions>,
3619 _: Arc<Client>,
3620 mut cx: AsyncAppContext,
3621 ) -> Result<proto::GetCompletionsResponse> {
3622 let position = envelope
3623 .payload
3624 .position
3625 .and_then(language::proto::deserialize_anchor)
3626 .ok_or_else(|| anyhow!("invalid position"))?;
3627 let version = deserialize_version(envelope.payload.version);
3628 let buffer = this.read_with(&cx, |this, cx| {
3629 this.opened_buffers
3630 .get(&envelope.payload.buffer_id)
3631 .map(|buffer| buffer.upgrade(cx).unwrap())
3632 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3633 })?;
3634 buffer
3635 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3636 .await;
3637 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3638 let completions = this
3639 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3640 .await?;
3641
3642 Ok(proto::GetCompletionsResponse {
3643 completions: completions
3644 .iter()
3645 .map(language::proto::serialize_completion)
3646 .collect(),
3647 version: serialize_version(&version),
3648 })
3649 }
3650
3651 async fn handle_apply_additional_edits_for_completion(
3652 this: ModelHandle<Self>,
3653 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3654 _: Arc<Client>,
3655 mut cx: AsyncAppContext,
3656 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3657 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3658 let buffer = this
3659 .opened_buffers
3660 .get(&envelope.payload.buffer_id)
3661 .map(|buffer| buffer.upgrade(cx).unwrap())
3662 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3663 let language = buffer.read(cx).language();
3664 let completion = language::proto::deserialize_completion(
3665 envelope
3666 .payload
3667 .completion
3668 .ok_or_else(|| anyhow!("invalid completion"))?,
3669 language,
3670 )?;
3671 Ok::<_, anyhow::Error>(
3672 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3673 )
3674 })?;
3675
3676 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3677 transaction: apply_additional_edits
3678 .await?
3679 .as_ref()
3680 .map(language::proto::serialize_transaction),
3681 })
3682 }
3683
3684 async fn handle_get_code_actions(
3685 this: ModelHandle<Self>,
3686 envelope: TypedEnvelope<proto::GetCodeActions>,
3687 _: Arc<Client>,
3688 mut cx: AsyncAppContext,
3689 ) -> Result<proto::GetCodeActionsResponse> {
3690 let start = envelope
3691 .payload
3692 .start
3693 .and_then(language::proto::deserialize_anchor)
3694 .ok_or_else(|| anyhow!("invalid start"))?;
3695 let end = envelope
3696 .payload
3697 .end
3698 .and_then(language::proto::deserialize_anchor)
3699 .ok_or_else(|| anyhow!("invalid end"))?;
3700 let buffer = this.update(&mut cx, |this, cx| {
3701 this.opened_buffers
3702 .get(&envelope.payload.buffer_id)
3703 .map(|buffer| buffer.upgrade(cx).unwrap())
3704 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3705 })?;
3706 buffer
3707 .update(&mut cx, |buffer, _| {
3708 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3709 })
3710 .await;
3711
3712 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3713 let code_actions = this.update(&mut cx, |this, cx| {
3714 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3715 })?;
3716
3717 Ok(proto::GetCodeActionsResponse {
3718 actions: code_actions
3719 .await?
3720 .iter()
3721 .map(language::proto::serialize_code_action)
3722 .collect(),
3723 version: serialize_version(&version),
3724 })
3725 }
3726
3727 async fn handle_apply_code_action(
3728 this: ModelHandle<Self>,
3729 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3730 _: Arc<Client>,
3731 mut cx: AsyncAppContext,
3732 ) -> Result<proto::ApplyCodeActionResponse> {
3733 let sender_id = envelope.original_sender_id()?;
3734 let action = language::proto::deserialize_code_action(
3735 envelope
3736 .payload
3737 .action
3738 .ok_or_else(|| anyhow!("invalid action"))?,
3739 )?;
3740 let apply_code_action = this.update(&mut cx, |this, cx| {
3741 let buffer = this
3742 .opened_buffers
3743 .get(&envelope.payload.buffer_id)
3744 .map(|buffer| buffer.upgrade(cx).unwrap())
3745 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3746 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3747 })?;
3748
3749 let project_transaction = apply_code_action.await?;
3750 let project_transaction = this.update(&mut cx, |this, cx| {
3751 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3752 });
3753 Ok(proto::ApplyCodeActionResponse {
3754 transaction: Some(project_transaction),
3755 })
3756 }
3757
3758 async fn handle_lsp_command<T: LspCommand>(
3759 this: ModelHandle<Self>,
3760 envelope: TypedEnvelope<T::ProtoRequest>,
3761 _: Arc<Client>,
3762 mut cx: AsyncAppContext,
3763 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3764 where
3765 <T::LspRequest as lsp::request::Request>::Result: Send,
3766 {
3767 let sender_id = envelope.original_sender_id()?;
3768 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3769 let buffer_handle = this.read_with(&cx, |this, _| {
3770 this.opened_buffers
3771 .get(&buffer_id)
3772 .and_then(|buffer| buffer.upgrade(&cx))
3773 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3774 })?;
3775 let request = T::from_proto(
3776 envelope.payload,
3777 this.clone(),
3778 buffer_handle.clone(),
3779 cx.clone(),
3780 )
3781 .await?;
3782 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3783 let response = this
3784 .update(&mut cx, |this, cx| {
3785 this.request_lsp(buffer_handle, request, cx)
3786 })
3787 .await?;
3788 this.update(&mut cx, |this, cx| {
3789 Ok(T::response_to_proto(
3790 response,
3791 this,
3792 sender_id,
3793 &buffer_version,
3794 cx,
3795 ))
3796 })
3797 }
3798
3799 async fn handle_get_project_symbols(
3800 this: ModelHandle<Self>,
3801 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3802 _: Arc<Client>,
3803 mut cx: AsyncAppContext,
3804 ) -> Result<proto::GetProjectSymbolsResponse> {
3805 let symbols = this
3806 .update(&mut cx, |this, cx| {
3807 this.symbols(&envelope.payload.query, cx)
3808 })
3809 .await?;
3810
3811 Ok(proto::GetProjectSymbolsResponse {
3812 symbols: symbols.iter().map(serialize_symbol).collect(),
3813 })
3814 }
3815
3816 async fn handle_search_project(
3817 this: ModelHandle<Self>,
3818 envelope: TypedEnvelope<proto::SearchProject>,
3819 _: Arc<Client>,
3820 mut cx: AsyncAppContext,
3821 ) -> Result<proto::SearchProjectResponse> {
3822 let peer_id = envelope.original_sender_id()?;
3823 let query = SearchQuery::from_proto(envelope.payload)?;
3824 let result = this
3825 .update(&mut cx, |this, cx| this.search(query, cx))
3826 .await?;
3827
3828 this.update(&mut cx, |this, cx| {
3829 let mut locations = Vec::new();
3830 for (buffer, ranges) in result {
3831 for range in ranges {
3832 let start = serialize_anchor(&range.start);
3833 let end = serialize_anchor(&range.end);
3834 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3835 locations.push(proto::Location {
3836 buffer: Some(buffer),
3837 start: Some(start),
3838 end: Some(end),
3839 });
3840 }
3841 }
3842 Ok(proto::SearchProjectResponse { locations })
3843 })
3844 }
3845
3846 async fn handle_open_buffer_for_symbol(
3847 this: ModelHandle<Self>,
3848 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3849 _: Arc<Client>,
3850 mut cx: AsyncAppContext,
3851 ) -> Result<proto::OpenBufferForSymbolResponse> {
3852 let peer_id = envelope.original_sender_id()?;
3853 let symbol = envelope
3854 .payload
3855 .symbol
3856 .ok_or_else(|| anyhow!("invalid symbol"))?;
3857 let symbol = this.read_with(&cx, |this, _| {
3858 let symbol = this.deserialize_symbol(symbol)?;
3859 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3860 if signature == symbol.signature {
3861 Ok(symbol)
3862 } else {
3863 Err(anyhow!("invalid symbol signature"))
3864 }
3865 })?;
3866 let buffer = this
3867 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3868 .await?;
3869
3870 Ok(proto::OpenBufferForSymbolResponse {
3871 buffer: Some(this.update(&mut cx, |this, cx| {
3872 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3873 })),
3874 })
3875 }
3876
3877 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3878 let mut hasher = Sha256::new();
3879 hasher.update(worktree_id.to_proto().to_be_bytes());
3880 hasher.update(path.to_string_lossy().as_bytes());
3881 hasher.update(self.nonce.to_be_bytes());
3882 hasher.finalize().as_slice().try_into().unwrap()
3883 }
3884
3885 async fn handle_open_buffer(
3886 this: ModelHandle<Self>,
3887 envelope: TypedEnvelope<proto::OpenBuffer>,
3888 _: Arc<Client>,
3889 mut cx: AsyncAppContext,
3890 ) -> Result<proto::OpenBufferResponse> {
3891 let peer_id = envelope.original_sender_id()?;
3892 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3893 let open_buffer = this.update(&mut cx, |this, cx| {
3894 this.open_buffer_for_path(
3895 ProjectPath {
3896 worktree_id,
3897 path: PathBuf::from(envelope.payload.path).into(),
3898 },
3899 cx,
3900 )
3901 });
3902
3903 let buffer = open_buffer.await?;
3904 this.update(&mut cx, |this, cx| {
3905 Ok(proto::OpenBufferResponse {
3906 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3907 })
3908 })
3909 }
3910
3911 fn serialize_project_transaction_for_peer(
3912 &mut self,
3913 project_transaction: ProjectTransaction,
3914 peer_id: PeerId,
3915 cx: &AppContext,
3916 ) -> proto::ProjectTransaction {
3917 let mut serialized_transaction = proto::ProjectTransaction {
3918 buffers: Default::default(),
3919 transactions: Default::default(),
3920 };
3921 for (buffer, transaction) in project_transaction.0 {
3922 serialized_transaction
3923 .buffers
3924 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3925 serialized_transaction
3926 .transactions
3927 .push(language::proto::serialize_transaction(&transaction));
3928 }
3929 serialized_transaction
3930 }
3931
3932 fn deserialize_project_transaction(
3933 &mut self,
3934 message: proto::ProjectTransaction,
3935 push_to_history: bool,
3936 cx: &mut ModelContext<Self>,
3937 ) -> Task<Result<ProjectTransaction>> {
3938 cx.spawn(|this, mut cx| async move {
3939 let mut project_transaction = ProjectTransaction::default();
3940 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3941 let buffer = this
3942 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3943 .await?;
3944 let transaction = language::proto::deserialize_transaction(transaction)?;
3945 project_transaction.0.insert(buffer, transaction);
3946 }
3947
3948 for (buffer, transaction) in &project_transaction.0 {
3949 buffer
3950 .update(&mut cx, |buffer, _| {
3951 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3952 })
3953 .await;
3954
3955 if push_to_history {
3956 buffer.update(&mut cx, |buffer, _| {
3957 buffer.push_transaction(transaction.clone(), Instant::now());
3958 });
3959 }
3960 }
3961
3962 Ok(project_transaction)
3963 })
3964 }
3965
3966 fn serialize_buffer_for_peer(
3967 &mut self,
3968 buffer: &ModelHandle<Buffer>,
3969 peer_id: PeerId,
3970 cx: &AppContext,
3971 ) -> proto::Buffer {
3972 let buffer_id = buffer.read(cx).remote_id();
3973 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3974 if shared_buffers.insert(buffer_id) {
3975 proto::Buffer {
3976 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3977 }
3978 } else {
3979 proto::Buffer {
3980 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3981 }
3982 }
3983 }
3984
3985 fn deserialize_buffer(
3986 &mut self,
3987 buffer: proto::Buffer,
3988 cx: &mut ModelContext<Self>,
3989 ) -> Task<Result<ModelHandle<Buffer>>> {
3990 let replica_id = self.replica_id();
3991
3992 let opened_buffer_tx = self.opened_buffer.0.clone();
3993 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3994 cx.spawn(|this, mut cx| async move {
3995 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3996 proto::buffer::Variant::Id(id) => {
3997 let buffer = loop {
3998 let buffer = this.read_with(&cx, |this, cx| {
3999 this.opened_buffers
4000 .get(&id)
4001 .and_then(|buffer| buffer.upgrade(cx))
4002 });
4003 if let Some(buffer) = buffer {
4004 break buffer;
4005 }
4006 opened_buffer_rx
4007 .next()
4008 .await
4009 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4010 };
4011 Ok(buffer)
4012 }
4013 proto::buffer::Variant::State(mut buffer) => {
4014 let mut buffer_worktree = None;
4015 let mut buffer_file = None;
4016 if let Some(file) = buffer.file.take() {
4017 this.read_with(&cx, |this, cx| {
4018 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4019 let worktree =
4020 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4021 anyhow!("no worktree found for id {}", file.worktree_id)
4022 })?;
4023 buffer_file =
4024 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4025 as Box<dyn language::File>);
4026 buffer_worktree = Some(worktree);
4027 Ok::<_, anyhow::Error>(())
4028 })?;
4029 }
4030
4031 let buffer = cx.add_model(|cx| {
4032 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4033 });
4034
4035 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4036
4037 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4038 Ok(buffer)
4039 }
4040 }
4041 })
4042 }
4043
4044 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4045 let language = self
4046 .languages
4047 .get_language(&serialized_symbol.language_name);
4048 let start = serialized_symbol
4049 .start
4050 .ok_or_else(|| anyhow!("invalid start"))?;
4051 let end = serialized_symbol
4052 .end
4053 .ok_or_else(|| anyhow!("invalid end"))?;
4054 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4055 Ok(Symbol {
4056 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
4057 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
4058 language_name: serialized_symbol.language_name.clone(),
4059 label: language
4060 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4061 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4062 name: serialized_symbol.name,
4063 path: PathBuf::from(serialized_symbol.path),
4064 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4065 kind,
4066 signature: serialized_symbol
4067 .signature
4068 .try_into()
4069 .map_err(|_| anyhow!("invalid signature"))?,
4070 })
4071 }
4072
4073 async fn handle_buffer_saved(
4074 this: ModelHandle<Self>,
4075 envelope: TypedEnvelope<proto::BufferSaved>,
4076 _: Arc<Client>,
4077 mut cx: AsyncAppContext,
4078 ) -> Result<()> {
4079 let version = deserialize_version(envelope.payload.version);
4080 let mtime = envelope
4081 .payload
4082 .mtime
4083 .ok_or_else(|| anyhow!("missing mtime"))?
4084 .into();
4085
4086 this.update(&mut cx, |this, cx| {
4087 let buffer = this
4088 .opened_buffers
4089 .get(&envelope.payload.buffer_id)
4090 .and_then(|buffer| buffer.upgrade(cx));
4091 if let Some(buffer) = buffer {
4092 buffer.update(cx, |buffer, cx| {
4093 buffer.did_save(version, mtime, None, cx);
4094 });
4095 }
4096 Ok(())
4097 })
4098 }
4099
4100 async fn handle_buffer_reloaded(
4101 this: ModelHandle<Self>,
4102 envelope: TypedEnvelope<proto::BufferReloaded>,
4103 _: Arc<Client>,
4104 mut cx: AsyncAppContext,
4105 ) -> Result<()> {
4106 let payload = envelope.payload.clone();
4107 let version = deserialize_version(payload.version);
4108 let mtime = payload
4109 .mtime
4110 .ok_or_else(|| anyhow!("missing mtime"))?
4111 .into();
4112 this.update(&mut cx, |this, cx| {
4113 let buffer = this
4114 .opened_buffers
4115 .get(&payload.buffer_id)
4116 .and_then(|buffer| buffer.upgrade(cx));
4117 if let Some(buffer) = buffer {
4118 buffer.update(cx, |buffer, cx| {
4119 buffer.did_reload(version, mtime, cx);
4120 });
4121 }
4122 Ok(())
4123 })
4124 }
4125
4126 pub fn match_paths<'a>(
4127 &self,
4128 query: &'a str,
4129 include_ignored: bool,
4130 smart_case: bool,
4131 max_results: usize,
4132 cancel_flag: &'a AtomicBool,
4133 cx: &AppContext,
4134 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4135 let worktrees = self
4136 .worktrees(cx)
4137 .filter(|worktree| worktree.read(cx).is_visible())
4138 .collect::<Vec<_>>();
4139 let include_root_name = worktrees.len() > 1;
4140 let candidate_sets = worktrees
4141 .into_iter()
4142 .map(|worktree| CandidateSet {
4143 snapshot: worktree.read(cx).snapshot(),
4144 include_ignored,
4145 include_root_name,
4146 })
4147 .collect::<Vec<_>>();
4148
4149 let background = cx.background().clone();
4150 async move {
4151 fuzzy::match_paths(
4152 candidate_sets.as_slice(),
4153 query,
4154 smart_case,
4155 max_results,
4156 cancel_flag,
4157 background,
4158 )
4159 .await
4160 }
4161 }
4162
4163 fn edits_from_lsp(
4164 &mut self,
4165 buffer: &ModelHandle<Buffer>,
4166 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4167 version: Option<i32>,
4168 cx: &mut ModelContext<Self>,
4169 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4170 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4171 cx.background().spawn(async move {
4172 let snapshot = snapshot?;
4173 let mut lsp_edits = lsp_edits
4174 .into_iter()
4175 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4176 .peekable();
4177
4178 let mut edits = Vec::new();
4179 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4180 // Combine any LSP edits that are adjacent.
4181 //
4182 // Also, combine LSP edits that are separated from each other by only
4183 // a newline. This is important because for some code actions,
4184 // Rust-analyzer rewrites the entire buffer via a series of edits that
4185 // are separated by unchanged newline characters.
4186 //
4187 // In order for the diffing logic below to work properly, any edits that
4188 // cancel each other out must be combined into one.
4189 while let Some((next_range, next_text)) = lsp_edits.peek() {
4190 if next_range.start > range.end {
4191 if next_range.start.row > range.end.row + 1
4192 || next_range.start.column > 0
4193 || snapshot.clip_point_utf16(
4194 PointUtf16::new(range.end.row, u32::MAX),
4195 Bias::Left,
4196 ) > range.end
4197 {
4198 break;
4199 }
4200 new_text.push('\n');
4201 }
4202 range.end = next_range.end;
4203 new_text.push_str(&next_text);
4204 lsp_edits.next();
4205 }
4206
4207 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4208 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4209 {
4210 return Err(anyhow!("invalid edits received from language server"));
4211 }
4212
4213 // For multiline edits, perform a diff of the old and new text so that
4214 // we can identify the changes more precisely, preserving the locations
4215 // of any anchors positioned in the unchanged regions.
4216 if range.end.row > range.start.row {
4217 let mut offset = range.start.to_offset(&snapshot);
4218 let old_text = snapshot.text_for_range(range).collect::<String>();
4219
4220 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4221 let mut moved_since_edit = true;
4222 for change in diff.iter_all_changes() {
4223 let tag = change.tag();
4224 let value = change.value();
4225 match tag {
4226 ChangeTag::Equal => {
4227 offset += value.len();
4228 moved_since_edit = true;
4229 }
4230 ChangeTag::Delete => {
4231 let start = snapshot.anchor_after(offset);
4232 let end = snapshot.anchor_before(offset + value.len());
4233 if moved_since_edit {
4234 edits.push((start..end, String::new()));
4235 } else {
4236 edits.last_mut().unwrap().0.end = end;
4237 }
4238 offset += value.len();
4239 moved_since_edit = false;
4240 }
4241 ChangeTag::Insert => {
4242 if moved_since_edit {
4243 let anchor = snapshot.anchor_after(offset);
4244 edits.push((anchor.clone()..anchor, value.to_string()));
4245 } else {
4246 edits.last_mut().unwrap().1.push_str(value);
4247 }
4248 moved_since_edit = false;
4249 }
4250 }
4251 }
4252 } else if range.end == range.start {
4253 let anchor = snapshot.anchor_after(range.start);
4254 edits.push((anchor.clone()..anchor, new_text));
4255 } else {
4256 let edit_start = snapshot.anchor_after(range.start);
4257 let edit_end = snapshot.anchor_before(range.end);
4258 edits.push((edit_start..edit_end, new_text));
4259 }
4260 }
4261
4262 Ok(edits)
4263 })
4264 }
4265
4266 fn buffer_snapshot_for_lsp_version(
4267 &mut self,
4268 buffer: &ModelHandle<Buffer>,
4269 version: Option<i32>,
4270 cx: &AppContext,
4271 ) -> Result<TextBufferSnapshot> {
4272 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4273
4274 if let Some(version) = version {
4275 let buffer_id = buffer.read(cx).remote_id();
4276 let snapshots = self
4277 .buffer_snapshots
4278 .get_mut(&buffer_id)
4279 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4280 let mut found_snapshot = None;
4281 snapshots.retain(|(snapshot_version, snapshot)| {
4282 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4283 false
4284 } else {
4285 if *snapshot_version == version {
4286 found_snapshot = Some(snapshot.clone());
4287 }
4288 true
4289 }
4290 });
4291
4292 found_snapshot.ok_or_else(|| {
4293 anyhow!(
4294 "snapshot not found for buffer {} at version {}",
4295 buffer_id,
4296 version
4297 )
4298 })
4299 } else {
4300 Ok((buffer.read(cx)).text_snapshot())
4301 }
4302 }
4303
4304 fn language_server_for_buffer(
4305 &self,
4306 buffer: &Buffer,
4307 cx: &AppContext,
4308 ) -> Option<&Arc<LanguageServer>> {
4309 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4310 let worktree_id = file.worktree_id(cx);
4311 self.language_servers.get(&(worktree_id, language.name()))
4312 } else {
4313 None
4314 }
4315 }
4316}
4317
4318impl WorktreeHandle {
4319 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4320 match self {
4321 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4322 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4323 }
4324 }
4325}
4326
4327impl OpenBuffer {
4328 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4329 match self {
4330 OpenBuffer::Strong(handle) => Some(handle.clone()),
4331 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4332 OpenBuffer::Loading(_) => None,
4333 }
4334 }
4335}
4336
4337struct CandidateSet {
4338 snapshot: Snapshot,
4339 include_ignored: bool,
4340 include_root_name: bool,
4341}
4342
4343impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4344 type Candidates = CandidateSetIter<'a>;
4345
4346 fn id(&self) -> usize {
4347 self.snapshot.id().to_usize()
4348 }
4349
4350 fn len(&self) -> usize {
4351 if self.include_ignored {
4352 self.snapshot.file_count()
4353 } else {
4354 self.snapshot.visible_file_count()
4355 }
4356 }
4357
4358 fn prefix(&self) -> Arc<str> {
4359 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4360 self.snapshot.root_name().into()
4361 } else if self.include_root_name {
4362 format!("{}/", self.snapshot.root_name()).into()
4363 } else {
4364 "".into()
4365 }
4366 }
4367
4368 fn candidates(&'a self, start: usize) -> Self::Candidates {
4369 CandidateSetIter {
4370 traversal: self.snapshot.files(self.include_ignored, start),
4371 }
4372 }
4373}
4374
4375struct CandidateSetIter<'a> {
4376 traversal: Traversal<'a>,
4377}
4378
4379impl<'a> Iterator for CandidateSetIter<'a> {
4380 type Item = PathMatchCandidate<'a>;
4381
4382 fn next(&mut self) -> Option<Self::Item> {
4383 self.traversal.next().map(|entry| {
4384 if let EntryKind::File(char_bag) = entry.kind {
4385 PathMatchCandidate {
4386 path: &entry.path,
4387 char_bag,
4388 }
4389 } else {
4390 unreachable!()
4391 }
4392 })
4393 }
4394}
4395
4396impl Entity for Project {
4397 type Event = Event;
4398
4399 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4400 match &self.client_state {
4401 ProjectClientState::Local { remote_id_rx, .. } => {
4402 if let Some(project_id) = *remote_id_rx.borrow() {
4403 self.client
4404 .send(proto::UnregisterProject { project_id })
4405 .log_err();
4406 }
4407 }
4408 ProjectClientState::Remote { remote_id, .. } => {
4409 self.client
4410 .send(proto::LeaveProject {
4411 project_id: *remote_id,
4412 })
4413 .log_err();
4414 }
4415 }
4416 }
4417
4418 fn app_will_quit(
4419 &mut self,
4420 _: &mut MutableAppContext,
4421 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4422 let shutdown_futures = self
4423 .language_servers
4424 .drain()
4425 .filter_map(|(_, server)| server.shutdown())
4426 .collect::<Vec<_>>();
4427 Some(
4428 async move {
4429 futures::future::join_all(shutdown_futures).await;
4430 }
4431 .boxed(),
4432 )
4433 }
4434}
4435
4436impl Collaborator {
4437 fn from_proto(
4438 message: proto::Collaborator,
4439 user_store: &ModelHandle<UserStore>,
4440 cx: &mut AsyncAppContext,
4441 ) -> impl Future<Output = Result<Self>> {
4442 let user = user_store.update(cx, |user_store, cx| {
4443 user_store.fetch_user(message.user_id, cx)
4444 });
4445
4446 async move {
4447 Ok(Self {
4448 peer_id: PeerId(message.peer_id),
4449 user: user.await?,
4450 replica_id: message.replica_id as ReplicaId,
4451 })
4452 }
4453 }
4454}
4455
4456impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4457 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4458 Self {
4459 worktree_id,
4460 path: path.as_ref().into(),
4461 }
4462 }
4463}
4464
4465impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4466 fn from(options: lsp::CreateFileOptions) -> Self {
4467 Self {
4468 overwrite: options.overwrite.unwrap_or(false),
4469 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4470 }
4471 }
4472}
4473
4474impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4475 fn from(options: lsp::RenameFileOptions) -> Self {
4476 Self {
4477 overwrite: options.overwrite.unwrap_or(false),
4478 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4479 }
4480 }
4481}
4482
4483impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4484 fn from(options: lsp::DeleteFileOptions) -> Self {
4485 Self {
4486 recursive: options.recursive.unwrap_or(false),
4487 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4488 }
4489 }
4490}
4491
4492fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4493 proto::Symbol {
4494 source_worktree_id: symbol.source_worktree_id.to_proto(),
4495 worktree_id: symbol.worktree_id.to_proto(),
4496 language_name: symbol.language_name.clone(),
4497 name: symbol.name.clone(),
4498 kind: unsafe { mem::transmute(symbol.kind) },
4499 path: symbol.path.to_string_lossy().to_string(),
4500 start: Some(proto::Point {
4501 row: symbol.range.start.row,
4502 column: symbol.range.start.column,
4503 }),
4504 end: Some(proto::Point {
4505 row: symbol.range.end.row,
4506 column: symbol.range.end.column,
4507 }),
4508 signature: symbol.signature.to_vec(),
4509 }
4510}
4511
4512fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4513 let mut path_components = path.components();
4514 let mut base_components = base.components();
4515 let mut components: Vec<Component> = Vec::new();
4516 loop {
4517 match (path_components.next(), base_components.next()) {
4518 (None, None) => break,
4519 (Some(a), None) => {
4520 components.push(a);
4521 components.extend(path_components.by_ref());
4522 break;
4523 }
4524 (None, _) => components.push(Component::ParentDir),
4525 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4526 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4527 (Some(a), Some(_)) => {
4528 components.push(Component::ParentDir);
4529 for _ in base_components {
4530 components.push(Component::ParentDir);
4531 }
4532 components.push(a);
4533 components.extend(path_components.by_ref());
4534 break;
4535 }
4536 }
4537 }
4538 components.iter().map(|c| c.as_os_str()).collect()
4539}
4540
4541#[cfg(test)]
4542mod tests {
4543 use super::{Event, *};
4544 use fs::RealFs;
4545 use futures::StreamExt;
4546 use gpui::test::subscribe;
4547 use language::{
4548 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4549 ToPoint,
4550 };
4551 use lsp::Url;
4552 use serde_json::json;
4553 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4554 use unindent::Unindent as _;
4555 use util::test::temp_tree;
4556 use worktree::WorktreeHandle as _;
4557
4558 #[gpui::test]
4559 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4560 let dir = temp_tree(json!({
4561 "root": {
4562 "apple": "",
4563 "banana": {
4564 "carrot": {
4565 "date": "",
4566 "endive": "",
4567 }
4568 },
4569 "fennel": {
4570 "grape": "",
4571 }
4572 }
4573 }));
4574
4575 let root_link_path = dir.path().join("root_link");
4576 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4577 unix::fs::symlink(
4578 &dir.path().join("root/fennel"),
4579 &dir.path().join("root/finnochio"),
4580 )
4581 .unwrap();
4582
4583 let project = Project::test(Arc::new(RealFs), cx);
4584
4585 let (tree, _) = project
4586 .update(cx, |project, cx| {
4587 project.find_or_create_local_worktree(&root_link_path, true, cx)
4588 })
4589 .await
4590 .unwrap();
4591
4592 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4593 .await;
4594 cx.read(|cx| {
4595 let tree = tree.read(cx);
4596 assert_eq!(tree.file_count(), 5);
4597 assert_eq!(
4598 tree.inode_for_path("fennel/grape"),
4599 tree.inode_for_path("finnochio/grape")
4600 );
4601 });
4602
4603 let cancel_flag = Default::default();
4604 let results = project
4605 .read_with(cx, |project, cx| {
4606 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4607 })
4608 .await;
4609 assert_eq!(
4610 results
4611 .into_iter()
4612 .map(|result| result.path)
4613 .collect::<Vec<Arc<Path>>>(),
4614 vec![
4615 PathBuf::from("banana/carrot/date").into(),
4616 PathBuf::from("banana/carrot/endive").into(),
4617 ]
4618 );
4619 }
4620
4621 #[gpui::test]
4622 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4623 cx.foreground().forbid_parking();
4624
4625 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4626 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4627 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4628 completion_provider: Some(lsp::CompletionOptions {
4629 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4630 ..Default::default()
4631 }),
4632 ..Default::default()
4633 });
4634 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4635 completion_provider: Some(lsp::CompletionOptions {
4636 trigger_characters: Some(vec![":".to_string()]),
4637 ..Default::default()
4638 }),
4639 ..Default::default()
4640 });
4641
4642 let rust_language = Arc::new(Language::new(
4643 LanguageConfig {
4644 name: "Rust".into(),
4645 path_suffixes: vec!["rs".to_string()],
4646 language_server: Some(rust_lsp_config),
4647 ..Default::default()
4648 },
4649 Some(tree_sitter_rust::language()),
4650 ));
4651 let json_language = Arc::new(Language::new(
4652 LanguageConfig {
4653 name: "JSON".into(),
4654 path_suffixes: vec!["json".to_string()],
4655 language_server: Some(json_lsp_config),
4656 ..Default::default()
4657 },
4658 None,
4659 ));
4660
4661 let fs = FakeFs::new(cx.background());
4662 fs.insert_tree(
4663 "/the-root",
4664 json!({
4665 "test.rs": "const A: i32 = 1;",
4666 "test2.rs": "",
4667 "Cargo.toml": "a = 1",
4668 "package.json": "{\"a\": 1}",
4669 }),
4670 )
4671 .await;
4672
4673 let project = Project::test(fs, cx);
4674 project.update(cx, |project, _| {
4675 project.languages.add(rust_language);
4676 project.languages.add(json_language);
4677 });
4678
4679 let worktree_id = project
4680 .update(cx, |project, cx| {
4681 project.find_or_create_local_worktree("/the-root", true, cx)
4682 })
4683 .await
4684 .unwrap()
4685 .0
4686 .read_with(cx, |tree, _| tree.id());
4687
4688 // Open a buffer without an associated language server.
4689 let toml_buffer = project
4690 .update(cx, |project, cx| {
4691 project.open_buffer_for_path((worktree_id, "Cargo.toml"), cx)
4692 })
4693 .await
4694 .unwrap();
4695
4696 // Open a buffer with an associated language server.
4697 let rust_buffer = project
4698 .update(cx, |project, cx| {
4699 project.open_buffer_for_path((worktree_id, "test.rs"), cx)
4700 })
4701 .await
4702 .unwrap();
4703
4704 // A server is started up, and it is notified about Rust files.
4705 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4706 assert_eq!(
4707 fake_rust_server
4708 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4709 .await
4710 .text_document,
4711 lsp::TextDocumentItem {
4712 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4713 version: 0,
4714 text: "const A: i32 = 1;".to_string(),
4715 language_id: Default::default()
4716 }
4717 );
4718
4719 // The buffer is configured based on the language server's capabilities.
4720 rust_buffer.read_with(cx, |buffer, _| {
4721 assert_eq!(
4722 buffer.completion_triggers(),
4723 &[".".to_string(), "::".to_string()]
4724 );
4725 });
4726 toml_buffer.read_with(cx, |buffer, _| {
4727 assert!(buffer.completion_triggers().is_empty());
4728 });
4729
4730 // Edit a buffer. The changes are reported to the language server.
4731 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4732 assert_eq!(
4733 fake_rust_server
4734 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4735 .await
4736 .text_document,
4737 lsp::VersionedTextDocumentIdentifier::new(
4738 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4739 1
4740 )
4741 );
4742
4743 // Open a third buffer with a different associated language server.
4744 let json_buffer = project
4745 .update(cx, |project, cx| {
4746 project.open_buffer_for_path((worktree_id, "package.json"), cx)
4747 })
4748 .await
4749 .unwrap();
4750
4751 // Another language server is started up, and it is notified about
4752 // all three open buffers.
4753 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4754 assert_eq!(
4755 fake_json_server
4756 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4757 .await
4758 .text_document,
4759 lsp::TextDocumentItem {
4760 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4761 version: 0,
4762 text: "{\"a\": 1}".to_string(),
4763 language_id: Default::default()
4764 }
4765 );
4766
4767 // This buffer is configured based on the second language server's
4768 // capabilities.
4769 json_buffer.read_with(cx, |buffer, _| {
4770 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4771 });
4772
4773 // When opening another buffer whose language server is already running,
4774 // it is also configured based on the existing language server's capabilities.
4775 let rust_buffer2 = project
4776 .update(cx, |project, cx| {
4777 project.open_buffer_for_path((worktree_id, "test2.rs"), cx)
4778 })
4779 .await
4780 .unwrap();
4781 rust_buffer2.read_with(cx, |buffer, _| {
4782 assert_eq!(
4783 buffer.completion_triggers(),
4784 &[".".to_string(), "::".to_string()]
4785 );
4786 });
4787
4788 // Changes are reported only to servers matching the buffer's language.
4789 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4790 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4791 assert_eq!(
4792 fake_rust_server
4793 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4794 .await
4795 .text_document,
4796 lsp::VersionedTextDocumentIdentifier::new(
4797 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4798 1
4799 )
4800 );
4801
4802 // Save notifications are reported to all servers.
4803 toml_buffer
4804 .update(cx, |buffer, cx| buffer.save(cx))
4805 .await
4806 .unwrap();
4807 assert_eq!(
4808 fake_rust_server
4809 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4810 .await
4811 .text_document,
4812 lsp::TextDocumentIdentifier::new(
4813 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4814 )
4815 );
4816 assert_eq!(
4817 fake_json_server
4818 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4819 .await
4820 .text_document,
4821 lsp::TextDocumentIdentifier::new(
4822 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4823 )
4824 );
4825
4826 // Close notifications are reported only to servers matching the buffer's language.
4827 cx.update(|_| drop(json_buffer));
4828 let close_message = lsp::DidCloseTextDocumentParams {
4829 text_document: lsp::TextDocumentIdentifier::new(
4830 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4831 ),
4832 };
4833 assert_eq!(
4834 fake_json_server
4835 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4836 .await,
4837 close_message,
4838 );
4839 }
4840
4841 #[gpui::test]
4842 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4843 cx.foreground().forbid_parking();
4844
4845 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4846 let progress_token = language_server_config
4847 .disk_based_diagnostics_progress_token
4848 .clone()
4849 .unwrap();
4850
4851 let language = Arc::new(Language::new(
4852 LanguageConfig {
4853 name: "Rust".into(),
4854 path_suffixes: vec!["rs".to_string()],
4855 language_server: Some(language_server_config),
4856 ..Default::default()
4857 },
4858 Some(tree_sitter_rust::language()),
4859 ));
4860
4861 let fs = FakeFs::new(cx.background());
4862 fs.insert_tree(
4863 "/dir",
4864 json!({
4865 "a.rs": "fn a() { A }",
4866 "b.rs": "const y: i32 = 1",
4867 }),
4868 )
4869 .await;
4870
4871 let project = Project::test(fs, cx);
4872 project.update(cx, |project, _| project.languages.add(language));
4873
4874 let (tree, _) = project
4875 .update(cx, |project, cx| {
4876 project.find_or_create_local_worktree("/dir", true, cx)
4877 })
4878 .await
4879 .unwrap();
4880 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4881
4882 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4883 .await;
4884
4885 // Cause worktree to start the fake language server
4886 let _buffer = project
4887 .update(cx, |project, cx| {
4888 project.open_buffer_for_path((worktree_id, Path::new("b.rs")), cx)
4889 })
4890 .await
4891 .unwrap();
4892
4893 let mut events = subscribe(&project, cx);
4894
4895 let mut fake_server = fake_servers.next().await.unwrap();
4896 fake_server.start_progress(&progress_token).await;
4897 assert_eq!(
4898 events.next().await.unwrap(),
4899 Event::DiskBasedDiagnosticsStarted
4900 );
4901
4902 fake_server.start_progress(&progress_token).await;
4903 fake_server.end_progress(&progress_token).await;
4904 fake_server.start_progress(&progress_token).await;
4905
4906 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4907 lsp::PublishDiagnosticsParams {
4908 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4909 version: None,
4910 diagnostics: vec![lsp::Diagnostic {
4911 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4912 severity: Some(lsp::DiagnosticSeverity::ERROR),
4913 message: "undefined variable 'A'".to_string(),
4914 ..Default::default()
4915 }],
4916 },
4917 );
4918 assert_eq!(
4919 events.next().await.unwrap(),
4920 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4921 );
4922
4923 fake_server.end_progress(&progress_token).await;
4924 fake_server.end_progress(&progress_token).await;
4925 assert_eq!(
4926 events.next().await.unwrap(),
4927 Event::DiskBasedDiagnosticsUpdated
4928 );
4929 assert_eq!(
4930 events.next().await.unwrap(),
4931 Event::DiskBasedDiagnosticsFinished
4932 );
4933
4934 let buffer = project
4935 .update(cx, |p, cx| {
4936 p.open_buffer_for_path((worktree_id, "a.rs"), cx)
4937 })
4938 .await
4939 .unwrap();
4940
4941 buffer.read_with(cx, |buffer, _| {
4942 let snapshot = buffer.snapshot();
4943 let diagnostics = snapshot
4944 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4945 .collect::<Vec<_>>();
4946 assert_eq!(
4947 diagnostics,
4948 &[DiagnosticEntry {
4949 range: Point::new(0, 9)..Point::new(0, 10),
4950 diagnostic: Diagnostic {
4951 severity: lsp::DiagnosticSeverity::ERROR,
4952 message: "undefined variable 'A'".to_string(),
4953 group_id: 0,
4954 is_primary: true,
4955 ..Default::default()
4956 }
4957 }]
4958 )
4959 });
4960 }
4961
4962 #[gpui::test]
4963 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4964 cx.foreground().forbid_parking();
4965
4966 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4967 lsp_config
4968 .disk_based_diagnostic_sources
4969 .insert("disk".to_string());
4970 let language = Arc::new(Language::new(
4971 LanguageConfig {
4972 name: "Rust".into(),
4973 path_suffixes: vec!["rs".to_string()],
4974 language_server: Some(lsp_config),
4975 ..Default::default()
4976 },
4977 Some(tree_sitter_rust::language()),
4978 ));
4979
4980 let text = "
4981 fn a() { A }
4982 fn b() { BB }
4983 fn c() { CCC }
4984 "
4985 .unindent();
4986
4987 let fs = FakeFs::new(cx.background());
4988 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4989
4990 let project = Project::test(fs, cx);
4991 project.update(cx, |project, _| project.languages.add(language));
4992
4993 let worktree_id = project
4994 .update(cx, |project, cx| {
4995 project.find_or_create_local_worktree("/dir", true, cx)
4996 })
4997 .await
4998 .unwrap()
4999 .0
5000 .read_with(cx, |tree, _| tree.id());
5001
5002 let buffer = project
5003 .update(cx, |project, cx| {
5004 project.open_buffer_for_path((worktree_id, "a.rs"), cx)
5005 })
5006 .await
5007 .unwrap();
5008
5009 let mut fake_server = fake_servers.next().await.unwrap();
5010 let open_notification = fake_server
5011 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5012 .await;
5013
5014 // Edit the buffer, moving the content down
5015 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5016 let change_notification_1 = fake_server
5017 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5018 .await;
5019 assert!(
5020 change_notification_1.text_document.version > open_notification.text_document.version
5021 );
5022
5023 // Report some diagnostics for the initial version of the buffer
5024 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5025 lsp::PublishDiagnosticsParams {
5026 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5027 version: Some(open_notification.text_document.version),
5028 diagnostics: vec![
5029 lsp::Diagnostic {
5030 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5031 severity: Some(DiagnosticSeverity::ERROR),
5032 message: "undefined variable 'A'".to_string(),
5033 source: Some("disk".to_string()),
5034 ..Default::default()
5035 },
5036 lsp::Diagnostic {
5037 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5038 severity: Some(DiagnosticSeverity::ERROR),
5039 message: "undefined variable 'BB'".to_string(),
5040 source: Some("disk".to_string()),
5041 ..Default::default()
5042 },
5043 lsp::Diagnostic {
5044 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5045 severity: Some(DiagnosticSeverity::ERROR),
5046 source: Some("disk".to_string()),
5047 message: "undefined variable 'CCC'".to_string(),
5048 ..Default::default()
5049 },
5050 ],
5051 },
5052 );
5053
5054 // The diagnostics have moved down since they were created.
5055 buffer.next_notification(cx).await;
5056 buffer.read_with(cx, |buffer, _| {
5057 assert_eq!(
5058 buffer
5059 .snapshot()
5060 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5061 .collect::<Vec<_>>(),
5062 &[
5063 DiagnosticEntry {
5064 range: Point::new(3, 9)..Point::new(3, 11),
5065 diagnostic: Diagnostic {
5066 severity: DiagnosticSeverity::ERROR,
5067 message: "undefined variable 'BB'".to_string(),
5068 is_disk_based: true,
5069 group_id: 1,
5070 is_primary: true,
5071 ..Default::default()
5072 },
5073 },
5074 DiagnosticEntry {
5075 range: Point::new(4, 9)..Point::new(4, 12),
5076 diagnostic: Diagnostic {
5077 severity: DiagnosticSeverity::ERROR,
5078 message: "undefined variable 'CCC'".to_string(),
5079 is_disk_based: true,
5080 group_id: 2,
5081 is_primary: true,
5082 ..Default::default()
5083 }
5084 }
5085 ]
5086 );
5087 assert_eq!(
5088 chunks_with_diagnostics(buffer, 0..buffer.len()),
5089 [
5090 ("\n\nfn a() { ".to_string(), None),
5091 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5092 (" }\nfn b() { ".to_string(), None),
5093 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5094 (" }\nfn c() { ".to_string(), None),
5095 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5096 (" }\n".to_string(), None),
5097 ]
5098 );
5099 assert_eq!(
5100 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5101 [
5102 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5103 (" }\nfn c() { ".to_string(), None),
5104 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5105 ]
5106 );
5107 });
5108
5109 // Ensure overlapping diagnostics are highlighted correctly.
5110 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5111 lsp::PublishDiagnosticsParams {
5112 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5113 version: Some(open_notification.text_document.version),
5114 diagnostics: vec![
5115 lsp::Diagnostic {
5116 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5117 severity: Some(DiagnosticSeverity::ERROR),
5118 message: "undefined variable 'A'".to_string(),
5119 source: Some("disk".to_string()),
5120 ..Default::default()
5121 },
5122 lsp::Diagnostic {
5123 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5124 severity: Some(DiagnosticSeverity::WARNING),
5125 message: "unreachable statement".to_string(),
5126 source: Some("disk".to_string()),
5127 ..Default::default()
5128 },
5129 ],
5130 },
5131 );
5132
5133 buffer.next_notification(cx).await;
5134 buffer.read_with(cx, |buffer, _| {
5135 assert_eq!(
5136 buffer
5137 .snapshot()
5138 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5139 .collect::<Vec<_>>(),
5140 &[
5141 DiagnosticEntry {
5142 range: Point::new(2, 9)..Point::new(2, 12),
5143 diagnostic: Diagnostic {
5144 severity: DiagnosticSeverity::WARNING,
5145 message: "unreachable statement".to_string(),
5146 is_disk_based: true,
5147 group_id: 1,
5148 is_primary: true,
5149 ..Default::default()
5150 }
5151 },
5152 DiagnosticEntry {
5153 range: Point::new(2, 9)..Point::new(2, 10),
5154 diagnostic: Diagnostic {
5155 severity: DiagnosticSeverity::ERROR,
5156 message: "undefined variable 'A'".to_string(),
5157 is_disk_based: true,
5158 group_id: 0,
5159 is_primary: true,
5160 ..Default::default()
5161 },
5162 }
5163 ]
5164 );
5165 assert_eq!(
5166 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5167 [
5168 ("fn a() { ".to_string(), None),
5169 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5170 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5171 ("\n".to_string(), None),
5172 ]
5173 );
5174 assert_eq!(
5175 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5176 [
5177 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5178 ("\n".to_string(), None),
5179 ]
5180 );
5181 });
5182
5183 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5184 // changes since the last save.
5185 buffer.update(cx, |buffer, cx| {
5186 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5187 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5188 });
5189 let change_notification_2 =
5190 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
5191 assert!(
5192 change_notification_2.await.text_document.version
5193 > change_notification_1.text_document.version
5194 );
5195
5196 // Handle out-of-order diagnostics
5197 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5198 lsp::PublishDiagnosticsParams {
5199 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5200 version: Some(open_notification.text_document.version),
5201 diagnostics: vec![
5202 lsp::Diagnostic {
5203 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5204 severity: Some(DiagnosticSeverity::ERROR),
5205 message: "undefined variable 'BB'".to_string(),
5206 source: Some("disk".to_string()),
5207 ..Default::default()
5208 },
5209 lsp::Diagnostic {
5210 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5211 severity: Some(DiagnosticSeverity::WARNING),
5212 message: "undefined variable 'A'".to_string(),
5213 source: Some("disk".to_string()),
5214 ..Default::default()
5215 },
5216 ],
5217 },
5218 );
5219
5220 buffer.next_notification(cx).await;
5221 buffer.read_with(cx, |buffer, _| {
5222 assert_eq!(
5223 buffer
5224 .snapshot()
5225 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5226 .collect::<Vec<_>>(),
5227 &[
5228 DiagnosticEntry {
5229 range: Point::new(2, 21)..Point::new(2, 22),
5230 diagnostic: Diagnostic {
5231 severity: DiagnosticSeverity::WARNING,
5232 message: "undefined variable 'A'".to_string(),
5233 is_disk_based: true,
5234 group_id: 1,
5235 is_primary: true,
5236 ..Default::default()
5237 }
5238 },
5239 DiagnosticEntry {
5240 range: Point::new(3, 9)..Point::new(3, 11),
5241 diagnostic: Diagnostic {
5242 severity: DiagnosticSeverity::ERROR,
5243 message: "undefined variable 'BB'".to_string(),
5244 is_disk_based: true,
5245 group_id: 0,
5246 is_primary: true,
5247 ..Default::default()
5248 },
5249 }
5250 ]
5251 );
5252 });
5253 }
5254
5255 #[gpui::test]
5256 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5257 cx.foreground().forbid_parking();
5258
5259 let text = concat!(
5260 "let one = ;\n", //
5261 "let two = \n",
5262 "let three = 3;\n",
5263 );
5264
5265 let fs = FakeFs::new(cx.background());
5266 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5267
5268 let project = Project::test(fs, cx);
5269 let worktree_id = project
5270 .update(cx, |project, cx| {
5271 project.find_or_create_local_worktree("/dir", true, cx)
5272 })
5273 .await
5274 .unwrap()
5275 .0
5276 .read_with(cx, |tree, _| tree.id());
5277
5278 let buffer = project
5279 .update(cx, |project, cx| {
5280 project.open_buffer_for_path((worktree_id, "a.rs"), cx)
5281 })
5282 .await
5283 .unwrap();
5284
5285 project.update(cx, |project, cx| {
5286 project
5287 .update_buffer_diagnostics(
5288 &buffer,
5289 vec![
5290 DiagnosticEntry {
5291 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5292 diagnostic: Diagnostic {
5293 severity: DiagnosticSeverity::ERROR,
5294 message: "syntax error 1".to_string(),
5295 ..Default::default()
5296 },
5297 },
5298 DiagnosticEntry {
5299 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5300 diagnostic: Diagnostic {
5301 severity: DiagnosticSeverity::ERROR,
5302 message: "syntax error 2".to_string(),
5303 ..Default::default()
5304 },
5305 },
5306 ],
5307 None,
5308 cx,
5309 )
5310 .unwrap();
5311 });
5312
5313 // An empty range is extended forward to include the following character.
5314 // At the end of a line, an empty range is extended backward to include
5315 // the preceding character.
5316 buffer.read_with(cx, |buffer, _| {
5317 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5318 assert_eq!(
5319 chunks
5320 .iter()
5321 .map(|(s, d)| (s.as_str(), *d))
5322 .collect::<Vec<_>>(),
5323 &[
5324 ("let one = ", None),
5325 (";", Some(DiagnosticSeverity::ERROR)),
5326 ("\nlet two =", None),
5327 (" ", Some(DiagnosticSeverity::ERROR)),
5328 ("\nlet three = 3;\n", None)
5329 ]
5330 );
5331 });
5332 }
5333
5334 #[gpui::test]
5335 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5336 cx.foreground().forbid_parking();
5337
5338 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5339 let language = Arc::new(Language::new(
5340 LanguageConfig {
5341 name: "Rust".into(),
5342 path_suffixes: vec!["rs".to_string()],
5343 language_server: Some(lsp_config),
5344 ..Default::default()
5345 },
5346 Some(tree_sitter_rust::language()),
5347 ));
5348
5349 let text = "
5350 fn a() {
5351 f1();
5352 }
5353 fn b() {
5354 f2();
5355 }
5356 fn c() {
5357 f3();
5358 }
5359 "
5360 .unindent();
5361
5362 let fs = FakeFs::new(cx.background());
5363 fs.insert_tree(
5364 "/dir",
5365 json!({
5366 "a.rs": text.clone(),
5367 }),
5368 )
5369 .await;
5370
5371 let project = Project::test(fs, cx);
5372 project.update(cx, |project, _| project.languages.add(language));
5373
5374 let worktree_id = project
5375 .update(cx, |project, cx| {
5376 project.find_or_create_local_worktree("/dir", true, cx)
5377 })
5378 .await
5379 .unwrap()
5380 .0
5381 .read_with(cx, |tree, _| tree.id());
5382
5383 let buffer = project
5384 .update(cx, |project, cx| {
5385 project.open_buffer_for_path((worktree_id, "a.rs"), cx)
5386 })
5387 .await
5388 .unwrap();
5389
5390 let mut fake_server = fake_servers.next().await.unwrap();
5391 let lsp_document_version = fake_server
5392 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5393 .await
5394 .text_document
5395 .version;
5396
5397 // Simulate editing the buffer after the language server computes some edits.
5398 buffer.update(cx, |buffer, cx| {
5399 buffer.edit(
5400 [Point::new(0, 0)..Point::new(0, 0)],
5401 "// above first function\n",
5402 cx,
5403 );
5404 buffer.edit(
5405 [Point::new(2, 0)..Point::new(2, 0)],
5406 " // inside first function\n",
5407 cx,
5408 );
5409 buffer.edit(
5410 [Point::new(6, 4)..Point::new(6, 4)],
5411 "// inside second function ",
5412 cx,
5413 );
5414
5415 assert_eq!(
5416 buffer.text(),
5417 "
5418 // above first function
5419 fn a() {
5420 // inside first function
5421 f1();
5422 }
5423 fn b() {
5424 // inside second function f2();
5425 }
5426 fn c() {
5427 f3();
5428 }
5429 "
5430 .unindent()
5431 );
5432 });
5433
5434 let edits = project
5435 .update(cx, |project, cx| {
5436 project.edits_from_lsp(
5437 &buffer,
5438 vec![
5439 // replace body of first function
5440 lsp::TextEdit {
5441 range: lsp::Range::new(
5442 lsp::Position::new(0, 0),
5443 lsp::Position::new(3, 0),
5444 ),
5445 new_text: "
5446 fn a() {
5447 f10();
5448 }
5449 "
5450 .unindent(),
5451 },
5452 // edit inside second function
5453 lsp::TextEdit {
5454 range: lsp::Range::new(
5455 lsp::Position::new(4, 6),
5456 lsp::Position::new(4, 6),
5457 ),
5458 new_text: "00".into(),
5459 },
5460 // edit inside third function via two distinct edits
5461 lsp::TextEdit {
5462 range: lsp::Range::new(
5463 lsp::Position::new(7, 5),
5464 lsp::Position::new(7, 5),
5465 ),
5466 new_text: "4000".into(),
5467 },
5468 lsp::TextEdit {
5469 range: lsp::Range::new(
5470 lsp::Position::new(7, 5),
5471 lsp::Position::new(7, 6),
5472 ),
5473 new_text: "".into(),
5474 },
5475 ],
5476 Some(lsp_document_version),
5477 cx,
5478 )
5479 })
5480 .await
5481 .unwrap();
5482
5483 buffer.update(cx, |buffer, cx| {
5484 for (range, new_text) in edits {
5485 buffer.edit([range], new_text, cx);
5486 }
5487 assert_eq!(
5488 buffer.text(),
5489 "
5490 // above first function
5491 fn a() {
5492 // inside first function
5493 f10();
5494 }
5495 fn b() {
5496 // inside second function f200();
5497 }
5498 fn c() {
5499 f4000();
5500 }
5501 "
5502 .unindent()
5503 );
5504 });
5505 }
5506
5507 #[gpui::test]
5508 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5509 cx.foreground().forbid_parking();
5510
5511 let text = "
5512 use a::b;
5513 use a::c;
5514
5515 fn f() {
5516 b();
5517 c();
5518 }
5519 "
5520 .unindent();
5521
5522 let fs = FakeFs::new(cx.background());
5523 fs.insert_tree(
5524 "/dir",
5525 json!({
5526 "a.rs": text.clone(),
5527 }),
5528 )
5529 .await;
5530
5531 let project = Project::test(fs, cx);
5532 let worktree_id = project
5533 .update(cx, |project, cx| {
5534 project.find_or_create_local_worktree("/dir", true, cx)
5535 })
5536 .await
5537 .unwrap()
5538 .0
5539 .read_with(cx, |tree, _| tree.id());
5540
5541 let buffer = project
5542 .update(cx, |project, cx| {
5543 project.open_buffer_for_path((worktree_id, "a.rs"), cx)
5544 })
5545 .await
5546 .unwrap();
5547
5548 // Simulate the language server sending us a small edit in the form of a very large diff.
5549 // Rust-analyzer does this when performing a merge-imports code action.
5550 let edits = project
5551 .update(cx, |project, cx| {
5552 project.edits_from_lsp(
5553 &buffer,
5554 [
5555 // Replace the first use statement without editing the semicolon.
5556 lsp::TextEdit {
5557 range: lsp::Range::new(
5558 lsp::Position::new(0, 4),
5559 lsp::Position::new(0, 8),
5560 ),
5561 new_text: "a::{b, c}".into(),
5562 },
5563 // Reinsert the remainder of the file between the semicolon and the final
5564 // newline of the file.
5565 lsp::TextEdit {
5566 range: lsp::Range::new(
5567 lsp::Position::new(0, 9),
5568 lsp::Position::new(0, 9),
5569 ),
5570 new_text: "\n\n".into(),
5571 },
5572 lsp::TextEdit {
5573 range: lsp::Range::new(
5574 lsp::Position::new(0, 9),
5575 lsp::Position::new(0, 9),
5576 ),
5577 new_text: "
5578 fn f() {
5579 b();
5580 c();
5581 }"
5582 .unindent(),
5583 },
5584 // Delete everything after the first newline of the file.
5585 lsp::TextEdit {
5586 range: lsp::Range::new(
5587 lsp::Position::new(1, 0),
5588 lsp::Position::new(7, 0),
5589 ),
5590 new_text: "".into(),
5591 },
5592 ],
5593 None,
5594 cx,
5595 )
5596 })
5597 .await
5598 .unwrap();
5599
5600 buffer.update(cx, |buffer, cx| {
5601 let edits = edits
5602 .into_iter()
5603 .map(|(range, text)| {
5604 (
5605 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5606 text,
5607 )
5608 })
5609 .collect::<Vec<_>>();
5610
5611 assert_eq!(
5612 edits,
5613 [
5614 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5615 (Point::new(1, 0)..Point::new(2, 0), "".into())
5616 ]
5617 );
5618
5619 for (range, new_text) in edits {
5620 buffer.edit([range], new_text, cx);
5621 }
5622 assert_eq!(
5623 buffer.text(),
5624 "
5625 use a::{b, c};
5626
5627 fn f() {
5628 b();
5629 c();
5630 }
5631 "
5632 .unindent()
5633 );
5634 });
5635 }
5636
5637 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5638 buffer: &Buffer,
5639 range: Range<T>,
5640 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5641 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5642 for chunk in buffer.snapshot().chunks(range, true) {
5643 if chunks.last().map_or(false, |prev_chunk| {
5644 prev_chunk.1 == chunk.diagnostic_severity
5645 }) {
5646 chunks.last_mut().unwrap().0.push_str(chunk.text);
5647 } else {
5648 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5649 }
5650 }
5651 chunks
5652 }
5653
5654 #[gpui::test]
5655 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5656 let dir = temp_tree(json!({
5657 "root": {
5658 "dir1": {},
5659 "dir2": {
5660 "dir3": {}
5661 }
5662 }
5663 }));
5664
5665 let project = Project::test(Arc::new(RealFs), cx);
5666 let (tree, _) = project
5667 .update(cx, |project, cx| {
5668 project.find_or_create_local_worktree(&dir.path(), true, cx)
5669 })
5670 .await
5671 .unwrap();
5672
5673 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5674 .await;
5675
5676 let cancel_flag = Default::default();
5677 let results = project
5678 .read_with(cx, |project, cx| {
5679 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5680 })
5681 .await;
5682
5683 assert!(results.is_empty());
5684 }
5685
5686 #[gpui::test]
5687 async fn test_definition(cx: &mut gpui::TestAppContext) {
5688 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5689 let language = Arc::new(Language::new(
5690 LanguageConfig {
5691 name: "Rust".into(),
5692 path_suffixes: vec!["rs".to_string()],
5693 language_server: Some(language_server_config),
5694 ..Default::default()
5695 },
5696 Some(tree_sitter_rust::language()),
5697 ));
5698
5699 let fs = FakeFs::new(cx.background());
5700 fs.insert_tree(
5701 "/dir",
5702 json!({
5703 "a.rs": "const fn a() { A }",
5704 "b.rs": "const y: i32 = crate::a()",
5705 }),
5706 )
5707 .await;
5708
5709 let project = Project::test(fs, cx);
5710 project.update(cx, |project, _| {
5711 Arc::get_mut(&mut project.languages).unwrap().add(language);
5712 });
5713
5714 let (tree, _) = project
5715 .update(cx, |project, cx| {
5716 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5717 })
5718 .await
5719 .unwrap();
5720 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5721 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5722 .await;
5723
5724 let buffer = project
5725 .update(cx, |project, cx| {
5726 project.open_buffer_for_path(
5727 ProjectPath {
5728 worktree_id,
5729 path: Path::new("").into(),
5730 },
5731 cx,
5732 )
5733 })
5734 .await
5735 .unwrap();
5736
5737 let mut fake_server = fake_servers.next().await.unwrap();
5738 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5739 let params = params.text_document_position_params;
5740 assert_eq!(
5741 params.text_document.uri.to_file_path().unwrap(),
5742 Path::new("/dir/b.rs"),
5743 );
5744 assert_eq!(params.position, lsp::Position::new(0, 22));
5745
5746 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5747 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5748 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5749 )))
5750 });
5751
5752 let mut definitions = project
5753 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5754 .await
5755 .unwrap();
5756
5757 assert_eq!(definitions.len(), 1);
5758 let definition = definitions.pop().unwrap();
5759 cx.update(|cx| {
5760 let target_buffer = definition.buffer.read(cx);
5761 assert_eq!(
5762 target_buffer
5763 .file()
5764 .unwrap()
5765 .as_local()
5766 .unwrap()
5767 .abs_path(cx),
5768 Path::new("/dir/a.rs"),
5769 );
5770 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5771 assert_eq!(
5772 list_worktrees(&project, cx),
5773 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5774 );
5775
5776 drop(definition);
5777 });
5778 cx.read(|cx| {
5779 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5780 });
5781
5782 fn list_worktrees<'a>(
5783 project: &'a ModelHandle<Project>,
5784 cx: &'a AppContext,
5785 ) -> Vec<(&'a Path, bool)> {
5786 project
5787 .read(cx)
5788 .worktrees(cx)
5789 .map(|worktree| {
5790 let worktree = worktree.read(cx);
5791 (
5792 worktree.as_local().unwrap().abs_path().as_ref(),
5793 worktree.is_visible(),
5794 )
5795 })
5796 .collect::<Vec<_>>()
5797 }
5798 }
5799
5800 #[gpui::test]
5801 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5802 let fs = FakeFs::new(cx.background());
5803 fs.insert_tree(
5804 "/dir",
5805 json!({
5806 "file1": "the old contents",
5807 }),
5808 )
5809 .await;
5810
5811 let project = Project::test(fs.clone(), cx);
5812 let worktree_id = project
5813 .update(cx, |p, cx| {
5814 p.find_or_create_local_worktree("/dir", true, cx)
5815 })
5816 .await
5817 .unwrap()
5818 .0
5819 .read_with(cx, |tree, _| tree.id());
5820
5821 let buffer = project
5822 .update(cx, |p, cx| {
5823 p.open_buffer_for_path((worktree_id, "file1"), cx)
5824 })
5825 .await
5826 .unwrap();
5827 buffer
5828 .update(cx, |buffer, cx| {
5829 assert_eq!(buffer.text(), "the old contents");
5830 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5831 buffer.save(cx)
5832 })
5833 .await
5834 .unwrap();
5835
5836 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5837 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5838 }
5839
5840 #[gpui::test]
5841 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5842 let fs = FakeFs::new(cx.background());
5843 fs.insert_tree(
5844 "/dir",
5845 json!({
5846 "file1": "the old contents",
5847 }),
5848 )
5849 .await;
5850
5851 let project = Project::test(fs.clone(), cx);
5852 let worktree_id = project
5853 .update(cx, |p, cx| {
5854 p.find_or_create_local_worktree("/dir/file1", true, cx)
5855 })
5856 .await
5857 .unwrap()
5858 .0
5859 .read_with(cx, |tree, _| tree.id());
5860
5861 let buffer = project
5862 .update(cx, |p, cx| p.open_buffer_for_path((worktree_id, ""), cx))
5863 .await
5864 .unwrap();
5865 buffer
5866 .update(cx, |buffer, cx| {
5867 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5868 buffer.save(cx)
5869 })
5870 .await
5871 .unwrap();
5872
5873 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5874 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5875 }
5876
5877 #[gpui::test]
5878 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5879 let fs = FakeFs::new(cx.background());
5880 fs.insert_tree("/dir", json!({})).await;
5881
5882 let project = Project::test(fs.clone(), cx);
5883 let (worktree, _) = project
5884 .update(cx, |project, cx| {
5885 project.find_or_create_local_worktree("/dir", true, cx)
5886 })
5887 .await
5888 .unwrap();
5889 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5890
5891 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5892 buffer.update(cx, |buffer, cx| {
5893 buffer.edit([0..0], "abc", cx);
5894 assert!(buffer.is_dirty());
5895 assert!(!buffer.has_conflict());
5896 });
5897 project
5898 .update(cx, |project, cx| {
5899 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5900 })
5901 .await
5902 .unwrap();
5903 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5904 buffer.read_with(cx, |buffer, cx| {
5905 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5906 assert!(!buffer.is_dirty());
5907 assert!(!buffer.has_conflict());
5908 });
5909
5910 let opened_buffer = project
5911 .update(cx, |project, cx| {
5912 project.open_buffer_for_path((worktree_id, "file1"), cx)
5913 })
5914 .await
5915 .unwrap();
5916 assert_eq!(opened_buffer, buffer);
5917 }
5918
5919 #[gpui::test(retries = 5)]
5920 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5921 let dir = temp_tree(json!({
5922 "a": {
5923 "file1": "",
5924 "file2": "",
5925 "file3": "",
5926 },
5927 "b": {
5928 "c": {
5929 "file4": "",
5930 "file5": "",
5931 }
5932 }
5933 }));
5934
5935 let project = Project::test(Arc::new(RealFs), cx);
5936 let rpc = project.read_with(cx, |p, _| p.client.clone());
5937
5938 let (tree, _) = project
5939 .update(cx, |p, cx| {
5940 p.find_or_create_local_worktree(dir.path(), true, cx)
5941 })
5942 .await
5943 .unwrap();
5944 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5945
5946 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5947 let buffer =
5948 project.update(cx, |p, cx| p.open_buffer_for_path((worktree_id, path), cx));
5949 async move { buffer.await.unwrap() }
5950 };
5951 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5952 tree.read_with(cx, |tree, _| {
5953 tree.entry_for_path(path)
5954 .expect(&format!("no entry for path {}", path))
5955 .id
5956 })
5957 };
5958
5959 let buffer2 = buffer_for_path("a/file2", cx).await;
5960 let buffer3 = buffer_for_path("a/file3", cx).await;
5961 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5962 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5963
5964 let file2_id = id_for_path("a/file2", &cx);
5965 let file3_id = id_for_path("a/file3", &cx);
5966 let file4_id = id_for_path("b/c/file4", &cx);
5967
5968 // Wait for the initial scan.
5969 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5970 .await;
5971
5972 // Create a remote copy of this worktree.
5973 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5974 let (remote, load_task) = cx.update(|cx| {
5975 Worktree::remote(
5976 1,
5977 1,
5978 initial_snapshot.to_proto(&Default::default(), true),
5979 rpc.clone(),
5980 cx,
5981 )
5982 });
5983 load_task.await;
5984
5985 cx.read(|cx| {
5986 assert!(!buffer2.read(cx).is_dirty());
5987 assert!(!buffer3.read(cx).is_dirty());
5988 assert!(!buffer4.read(cx).is_dirty());
5989 assert!(!buffer5.read(cx).is_dirty());
5990 });
5991
5992 // Rename and delete files and directories.
5993 tree.flush_fs_events(&cx).await;
5994 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5995 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5996 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5997 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5998 tree.flush_fs_events(&cx).await;
5999
6000 let expected_paths = vec![
6001 "a",
6002 "a/file1",
6003 "a/file2.new",
6004 "b",
6005 "d",
6006 "d/file3",
6007 "d/file4",
6008 ];
6009
6010 cx.read(|app| {
6011 assert_eq!(
6012 tree.read(app)
6013 .paths()
6014 .map(|p| p.to_str().unwrap())
6015 .collect::<Vec<_>>(),
6016 expected_paths
6017 );
6018
6019 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6020 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6021 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6022
6023 assert_eq!(
6024 buffer2.read(app).file().unwrap().path().as_ref(),
6025 Path::new("a/file2.new")
6026 );
6027 assert_eq!(
6028 buffer3.read(app).file().unwrap().path().as_ref(),
6029 Path::new("d/file3")
6030 );
6031 assert_eq!(
6032 buffer4.read(app).file().unwrap().path().as_ref(),
6033 Path::new("d/file4")
6034 );
6035 assert_eq!(
6036 buffer5.read(app).file().unwrap().path().as_ref(),
6037 Path::new("b/c/file5")
6038 );
6039
6040 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6041 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6042 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6043 assert!(buffer5.read(app).file().unwrap().is_deleted());
6044 });
6045
6046 // Update the remote worktree. Check that it becomes consistent with the
6047 // local worktree.
6048 remote.update(cx, |remote, cx| {
6049 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6050 &initial_snapshot,
6051 1,
6052 1,
6053 true,
6054 );
6055 remote
6056 .as_remote_mut()
6057 .unwrap()
6058 .snapshot
6059 .apply_remote_update(update_message)
6060 .unwrap();
6061
6062 assert_eq!(
6063 remote
6064 .paths()
6065 .map(|p| p.to_str().unwrap())
6066 .collect::<Vec<_>>(),
6067 expected_paths
6068 );
6069 });
6070 }
6071
6072 #[gpui::test]
6073 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6074 let fs = FakeFs::new(cx.background());
6075 fs.insert_tree(
6076 "/the-dir",
6077 json!({
6078 "a.txt": "a-contents",
6079 "b.txt": "b-contents",
6080 }),
6081 )
6082 .await;
6083
6084 let project = Project::test(fs.clone(), cx);
6085 let worktree_id = project
6086 .update(cx, |p, cx| {
6087 p.find_or_create_local_worktree("/the-dir", true, cx)
6088 })
6089 .await
6090 .unwrap()
6091 .0
6092 .read_with(cx, |tree, _| tree.id());
6093
6094 // Spawn multiple tasks to open paths, repeating some paths.
6095 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6096 (
6097 p.open_buffer_for_path((worktree_id, "a.txt"), cx),
6098 p.open_buffer_for_path((worktree_id, "b.txt"), cx),
6099 p.open_buffer_for_path((worktree_id, "a.txt"), cx),
6100 )
6101 });
6102
6103 let buffer_a_1 = buffer_a_1.await.unwrap();
6104 let buffer_a_2 = buffer_a_2.await.unwrap();
6105 let buffer_b = buffer_b.await.unwrap();
6106 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6107 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6108
6109 // There is only one buffer per path.
6110 let buffer_a_id = buffer_a_1.id();
6111 assert_eq!(buffer_a_2.id(), buffer_a_id);
6112
6113 // Open the same path again while it is still open.
6114 drop(buffer_a_1);
6115 let buffer_a_3 = project
6116 .update(cx, |p, cx| {
6117 p.open_buffer_for_path((worktree_id, "a.txt"), cx)
6118 })
6119 .await
6120 .unwrap();
6121
6122 // There's still only one buffer per path.
6123 assert_eq!(buffer_a_3.id(), buffer_a_id);
6124 }
6125
6126 #[gpui::test]
6127 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6128 use std::fs;
6129
6130 let dir = temp_tree(json!({
6131 "file1": "abc",
6132 "file2": "def",
6133 "file3": "ghi",
6134 }));
6135
6136 let project = Project::test(Arc::new(RealFs), cx);
6137 let (worktree, _) = project
6138 .update(cx, |p, cx| {
6139 p.find_or_create_local_worktree(dir.path(), true, cx)
6140 })
6141 .await
6142 .unwrap();
6143 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6144
6145 worktree.flush_fs_events(&cx).await;
6146 worktree
6147 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6148 .await;
6149
6150 let buffer1 = project
6151 .update(cx, |p, cx| {
6152 p.open_buffer_for_path((worktree_id, "file1"), cx)
6153 })
6154 .await
6155 .unwrap();
6156 let events = Rc::new(RefCell::new(Vec::new()));
6157
6158 // initially, the buffer isn't dirty.
6159 buffer1.update(cx, |buffer, cx| {
6160 cx.subscribe(&buffer1, {
6161 let events = events.clone();
6162 move |_, _, event, _| match event {
6163 BufferEvent::Operation(_) => {}
6164 _ => events.borrow_mut().push(event.clone()),
6165 }
6166 })
6167 .detach();
6168
6169 assert!(!buffer.is_dirty());
6170 assert!(events.borrow().is_empty());
6171
6172 buffer.edit(vec![1..2], "", cx);
6173 });
6174
6175 // after the first edit, the buffer is dirty, and emits a dirtied event.
6176 buffer1.update(cx, |buffer, cx| {
6177 assert!(buffer.text() == "ac");
6178 assert!(buffer.is_dirty());
6179 assert_eq!(
6180 *events.borrow(),
6181 &[language::Event::Edited, language::Event::Dirtied]
6182 );
6183 events.borrow_mut().clear();
6184 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6185 });
6186
6187 // after saving, the buffer is not dirty, and emits a saved event.
6188 buffer1.update(cx, |buffer, cx| {
6189 assert!(!buffer.is_dirty());
6190 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6191 events.borrow_mut().clear();
6192
6193 buffer.edit(vec![1..1], "B", cx);
6194 buffer.edit(vec![2..2], "D", cx);
6195 });
6196
6197 // after editing again, the buffer is dirty, and emits another dirty event.
6198 buffer1.update(cx, |buffer, cx| {
6199 assert!(buffer.text() == "aBDc");
6200 assert!(buffer.is_dirty());
6201 assert_eq!(
6202 *events.borrow(),
6203 &[
6204 language::Event::Edited,
6205 language::Event::Dirtied,
6206 language::Event::Edited,
6207 ],
6208 );
6209 events.borrow_mut().clear();
6210
6211 // TODO - currently, after restoring the buffer to its
6212 // previously-saved state, the is still considered dirty.
6213 buffer.edit([1..3], "", cx);
6214 assert!(buffer.text() == "ac");
6215 assert!(buffer.is_dirty());
6216 });
6217
6218 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6219
6220 // When a file is deleted, the buffer is considered dirty.
6221 let events = Rc::new(RefCell::new(Vec::new()));
6222 let buffer2 = project
6223 .update(cx, |p, cx| {
6224 p.open_buffer_for_path((worktree_id, "file2"), cx)
6225 })
6226 .await
6227 .unwrap();
6228 buffer2.update(cx, |_, cx| {
6229 cx.subscribe(&buffer2, {
6230 let events = events.clone();
6231 move |_, _, event, _| events.borrow_mut().push(event.clone())
6232 })
6233 .detach();
6234 });
6235
6236 fs::remove_file(dir.path().join("file2")).unwrap();
6237 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6238 assert_eq!(
6239 *events.borrow(),
6240 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6241 );
6242
6243 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6244 let events = Rc::new(RefCell::new(Vec::new()));
6245 let buffer3 = project
6246 .update(cx, |p, cx| {
6247 p.open_buffer_for_path((worktree_id, "file3"), cx)
6248 })
6249 .await
6250 .unwrap();
6251 buffer3.update(cx, |_, cx| {
6252 cx.subscribe(&buffer3, {
6253 let events = events.clone();
6254 move |_, _, event, _| events.borrow_mut().push(event.clone())
6255 })
6256 .detach();
6257 });
6258
6259 worktree.flush_fs_events(&cx).await;
6260 buffer3.update(cx, |buffer, cx| {
6261 buffer.edit(Some(0..0), "x", cx);
6262 });
6263 events.borrow_mut().clear();
6264 fs::remove_file(dir.path().join("file3")).unwrap();
6265 buffer3
6266 .condition(&cx, |_, _| !events.borrow().is_empty())
6267 .await;
6268 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6269 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6270 }
6271
6272 #[gpui::test]
6273 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6274 use std::fs;
6275
6276 let initial_contents = "aaa\nbbbbb\nc\n";
6277 let dir = temp_tree(json!({ "the-file": initial_contents }));
6278
6279 let project = Project::test(Arc::new(RealFs), cx);
6280 let (worktree, _) = project
6281 .update(cx, |p, cx| {
6282 p.find_or_create_local_worktree(dir.path(), true, cx)
6283 })
6284 .await
6285 .unwrap();
6286 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6287
6288 worktree
6289 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6290 .await;
6291
6292 let abs_path = dir.path().join("the-file");
6293 let buffer = project
6294 .update(cx, |p, cx| {
6295 p.open_buffer_for_path((worktree_id, "the-file"), cx)
6296 })
6297 .await
6298 .unwrap();
6299
6300 // TODO
6301 // Add a cursor on each row.
6302 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6303 // assert!(!buffer.is_dirty());
6304 // buffer.add_selection_set(
6305 // &(0..3)
6306 // .map(|row| Selection {
6307 // id: row as usize,
6308 // start: Point::new(row, 1),
6309 // end: Point::new(row, 1),
6310 // reversed: false,
6311 // goal: SelectionGoal::None,
6312 // })
6313 // .collect::<Vec<_>>(),
6314 // cx,
6315 // )
6316 // });
6317
6318 // Change the file on disk, adding two new lines of text, and removing
6319 // one line.
6320 buffer.read_with(cx, |buffer, _| {
6321 assert!(!buffer.is_dirty());
6322 assert!(!buffer.has_conflict());
6323 });
6324 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6325 fs::write(&abs_path, new_contents).unwrap();
6326
6327 // Because the buffer was not modified, it is reloaded from disk. Its
6328 // contents are edited according to the diff between the old and new
6329 // file contents.
6330 buffer
6331 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6332 .await;
6333
6334 buffer.update(cx, |buffer, _| {
6335 assert_eq!(buffer.text(), new_contents);
6336 assert!(!buffer.is_dirty());
6337 assert!(!buffer.has_conflict());
6338
6339 // TODO
6340 // let cursor_positions = buffer
6341 // .selection_set(selection_set_id)
6342 // .unwrap()
6343 // .selections::<Point>(&*buffer)
6344 // .map(|selection| {
6345 // assert_eq!(selection.start, selection.end);
6346 // selection.start
6347 // })
6348 // .collect::<Vec<_>>();
6349 // assert_eq!(
6350 // cursor_positions,
6351 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6352 // );
6353 });
6354
6355 // Modify the buffer
6356 buffer.update(cx, |buffer, cx| {
6357 buffer.edit(vec![0..0], " ", cx);
6358 assert!(buffer.is_dirty());
6359 assert!(!buffer.has_conflict());
6360 });
6361
6362 // Change the file on disk again, adding blank lines to the beginning.
6363 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6364
6365 // Because the buffer is modified, it doesn't reload from disk, but is
6366 // marked as having a conflict.
6367 buffer
6368 .condition(&cx, |buffer, _| buffer.has_conflict())
6369 .await;
6370 }
6371
6372 #[gpui::test]
6373 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6374 cx.foreground().forbid_parking();
6375
6376 let fs = FakeFs::new(cx.background());
6377 fs.insert_tree(
6378 "/the-dir",
6379 json!({
6380 "a.rs": "
6381 fn foo(mut v: Vec<usize>) {
6382 for x in &v {
6383 v.push(1);
6384 }
6385 }
6386 "
6387 .unindent(),
6388 }),
6389 )
6390 .await;
6391
6392 let project = Project::test(fs.clone(), cx);
6393 let (worktree, _) = project
6394 .update(cx, |p, cx| {
6395 p.find_or_create_local_worktree("/the-dir", true, cx)
6396 })
6397 .await
6398 .unwrap();
6399 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6400
6401 let buffer = project
6402 .update(cx, |p, cx| {
6403 p.open_buffer_for_path((worktree_id, "a.rs"), cx)
6404 })
6405 .await
6406 .unwrap();
6407
6408 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6409 let message = lsp::PublishDiagnosticsParams {
6410 uri: buffer_uri.clone(),
6411 diagnostics: vec![
6412 lsp::Diagnostic {
6413 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6414 severity: Some(DiagnosticSeverity::WARNING),
6415 message: "error 1".to_string(),
6416 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6417 location: lsp::Location {
6418 uri: buffer_uri.clone(),
6419 range: lsp::Range::new(
6420 lsp::Position::new(1, 8),
6421 lsp::Position::new(1, 9),
6422 ),
6423 },
6424 message: "error 1 hint 1".to_string(),
6425 }]),
6426 ..Default::default()
6427 },
6428 lsp::Diagnostic {
6429 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6430 severity: Some(DiagnosticSeverity::HINT),
6431 message: "error 1 hint 1".to_string(),
6432 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6433 location: lsp::Location {
6434 uri: buffer_uri.clone(),
6435 range: lsp::Range::new(
6436 lsp::Position::new(1, 8),
6437 lsp::Position::new(1, 9),
6438 ),
6439 },
6440 message: "original diagnostic".to_string(),
6441 }]),
6442 ..Default::default()
6443 },
6444 lsp::Diagnostic {
6445 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6446 severity: Some(DiagnosticSeverity::ERROR),
6447 message: "error 2".to_string(),
6448 related_information: Some(vec![
6449 lsp::DiagnosticRelatedInformation {
6450 location: lsp::Location {
6451 uri: buffer_uri.clone(),
6452 range: lsp::Range::new(
6453 lsp::Position::new(1, 13),
6454 lsp::Position::new(1, 15),
6455 ),
6456 },
6457 message: "error 2 hint 1".to_string(),
6458 },
6459 lsp::DiagnosticRelatedInformation {
6460 location: lsp::Location {
6461 uri: buffer_uri.clone(),
6462 range: lsp::Range::new(
6463 lsp::Position::new(1, 13),
6464 lsp::Position::new(1, 15),
6465 ),
6466 },
6467 message: "error 2 hint 2".to_string(),
6468 },
6469 ]),
6470 ..Default::default()
6471 },
6472 lsp::Diagnostic {
6473 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6474 severity: Some(DiagnosticSeverity::HINT),
6475 message: "error 2 hint 1".to_string(),
6476 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6477 location: lsp::Location {
6478 uri: buffer_uri.clone(),
6479 range: lsp::Range::new(
6480 lsp::Position::new(2, 8),
6481 lsp::Position::new(2, 17),
6482 ),
6483 },
6484 message: "original diagnostic".to_string(),
6485 }]),
6486 ..Default::default()
6487 },
6488 lsp::Diagnostic {
6489 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6490 severity: Some(DiagnosticSeverity::HINT),
6491 message: "error 2 hint 2".to_string(),
6492 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6493 location: lsp::Location {
6494 uri: buffer_uri.clone(),
6495 range: lsp::Range::new(
6496 lsp::Position::new(2, 8),
6497 lsp::Position::new(2, 17),
6498 ),
6499 },
6500 message: "original diagnostic".to_string(),
6501 }]),
6502 ..Default::default()
6503 },
6504 ],
6505 version: None,
6506 };
6507
6508 project
6509 .update(cx, |p, cx| {
6510 p.update_diagnostics(message, &Default::default(), cx)
6511 })
6512 .unwrap();
6513 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6514
6515 assert_eq!(
6516 buffer
6517 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6518 .collect::<Vec<_>>(),
6519 &[
6520 DiagnosticEntry {
6521 range: Point::new(1, 8)..Point::new(1, 9),
6522 diagnostic: Diagnostic {
6523 severity: DiagnosticSeverity::WARNING,
6524 message: "error 1".to_string(),
6525 group_id: 0,
6526 is_primary: true,
6527 ..Default::default()
6528 }
6529 },
6530 DiagnosticEntry {
6531 range: Point::new(1, 8)..Point::new(1, 9),
6532 diagnostic: Diagnostic {
6533 severity: DiagnosticSeverity::HINT,
6534 message: "error 1 hint 1".to_string(),
6535 group_id: 0,
6536 is_primary: false,
6537 ..Default::default()
6538 }
6539 },
6540 DiagnosticEntry {
6541 range: Point::new(1, 13)..Point::new(1, 15),
6542 diagnostic: Diagnostic {
6543 severity: DiagnosticSeverity::HINT,
6544 message: "error 2 hint 1".to_string(),
6545 group_id: 1,
6546 is_primary: false,
6547 ..Default::default()
6548 }
6549 },
6550 DiagnosticEntry {
6551 range: Point::new(1, 13)..Point::new(1, 15),
6552 diagnostic: Diagnostic {
6553 severity: DiagnosticSeverity::HINT,
6554 message: "error 2 hint 2".to_string(),
6555 group_id: 1,
6556 is_primary: false,
6557 ..Default::default()
6558 }
6559 },
6560 DiagnosticEntry {
6561 range: Point::new(2, 8)..Point::new(2, 17),
6562 diagnostic: Diagnostic {
6563 severity: DiagnosticSeverity::ERROR,
6564 message: "error 2".to_string(),
6565 group_id: 1,
6566 is_primary: true,
6567 ..Default::default()
6568 }
6569 }
6570 ]
6571 );
6572
6573 assert_eq!(
6574 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6575 &[
6576 DiagnosticEntry {
6577 range: Point::new(1, 8)..Point::new(1, 9),
6578 diagnostic: Diagnostic {
6579 severity: DiagnosticSeverity::WARNING,
6580 message: "error 1".to_string(),
6581 group_id: 0,
6582 is_primary: true,
6583 ..Default::default()
6584 }
6585 },
6586 DiagnosticEntry {
6587 range: Point::new(1, 8)..Point::new(1, 9),
6588 diagnostic: Diagnostic {
6589 severity: DiagnosticSeverity::HINT,
6590 message: "error 1 hint 1".to_string(),
6591 group_id: 0,
6592 is_primary: false,
6593 ..Default::default()
6594 }
6595 },
6596 ]
6597 );
6598 assert_eq!(
6599 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6600 &[
6601 DiagnosticEntry {
6602 range: Point::new(1, 13)..Point::new(1, 15),
6603 diagnostic: Diagnostic {
6604 severity: DiagnosticSeverity::HINT,
6605 message: "error 2 hint 1".to_string(),
6606 group_id: 1,
6607 is_primary: false,
6608 ..Default::default()
6609 }
6610 },
6611 DiagnosticEntry {
6612 range: Point::new(1, 13)..Point::new(1, 15),
6613 diagnostic: Diagnostic {
6614 severity: DiagnosticSeverity::HINT,
6615 message: "error 2 hint 2".to_string(),
6616 group_id: 1,
6617 is_primary: false,
6618 ..Default::default()
6619 }
6620 },
6621 DiagnosticEntry {
6622 range: Point::new(2, 8)..Point::new(2, 17),
6623 diagnostic: Diagnostic {
6624 severity: DiagnosticSeverity::ERROR,
6625 message: "error 2".to_string(),
6626 group_id: 1,
6627 is_primary: true,
6628 ..Default::default()
6629 }
6630 }
6631 ]
6632 );
6633 }
6634
6635 #[gpui::test]
6636 async fn test_rename(cx: &mut gpui::TestAppContext) {
6637 cx.foreground().forbid_parking();
6638
6639 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6640 let language = Arc::new(Language::new(
6641 LanguageConfig {
6642 name: "Rust".into(),
6643 path_suffixes: vec!["rs".to_string()],
6644 language_server: Some(language_server_config),
6645 ..Default::default()
6646 },
6647 Some(tree_sitter_rust::language()),
6648 ));
6649
6650 let fs = FakeFs::new(cx.background());
6651 fs.insert_tree(
6652 "/dir",
6653 json!({
6654 "one.rs": "const ONE: usize = 1;",
6655 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6656 }),
6657 )
6658 .await;
6659
6660 let project = Project::test(fs.clone(), cx);
6661 project.update(cx, |project, _| {
6662 Arc::get_mut(&mut project.languages).unwrap().add(language);
6663 });
6664
6665 let (tree, _) = project
6666 .update(cx, |project, cx| {
6667 project.find_or_create_local_worktree("/dir", true, cx)
6668 })
6669 .await
6670 .unwrap();
6671 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6672 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6673 .await;
6674
6675 let buffer = project
6676 .update(cx, |project, cx| {
6677 project.open_buffer_for_path((worktree_id, Path::new("one.rs")), cx)
6678 })
6679 .await
6680 .unwrap();
6681
6682 let mut fake_server = fake_servers.next().await.unwrap();
6683
6684 let response = project.update(cx, |project, cx| {
6685 project.prepare_rename(buffer.clone(), 7, cx)
6686 });
6687 fake_server
6688 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6689 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6690 assert_eq!(params.position, lsp::Position::new(0, 7));
6691 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6692 lsp::Position::new(0, 6),
6693 lsp::Position::new(0, 9),
6694 )))
6695 })
6696 .next()
6697 .await
6698 .unwrap();
6699 let range = response.await.unwrap().unwrap();
6700 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6701 assert_eq!(range, 6..9);
6702
6703 let response = project.update(cx, |project, cx| {
6704 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6705 });
6706 fake_server
6707 .handle_request::<lsp::request::Rename, _>(|params, _| {
6708 assert_eq!(
6709 params.text_document_position.text_document.uri.as_str(),
6710 "file:///dir/one.rs"
6711 );
6712 assert_eq!(
6713 params.text_document_position.position,
6714 lsp::Position::new(0, 7)
6715 );
6716 assert_eq!(params.new_name, "THREE");
6717 Some(lsp::WorkspaceEdit {
6718 changes: Some(
6719 [
6720 (
6721 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6722 vec![lsp::TextEdit::new(
6723 lsp::Range::new(
6724 lsp::Position::new(0, 6),
6725 lsp::Position::new(0, 9),
6726 ),
6727 "THREE".to_string(),
6728 )],
6729 ),
6730 (
6731 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6732 vec![
6733 lsp::TextEdit::new(
6734 lsp::Range::new(
6735 lsp::Position::new(0, 24),
6736 lsp::Position::new(0, 27),
6737 ),
6738 "THREE".to_string(),
6739 ),
6740 lsp::TextEdit::new(
6741 lsp::Range::new(
6742 lsp::Position::new(0, 35),
6743 lsp::Position::new(0, 38),
6744 ),
6745 "THREE".to_string(),
6746 ),
6747 ],
6748 ),
6749 ]
6750 .into_iter()
6751 .collect(),
6752 ),
6753 ..Default::default()
6754 })
6755 })
6756 .next()
6757 .await
6758 .unwrap();
6759 let mut transaction = response.await.unwrap().0;
6760 assert_eq!(transaction.len(), 2);
6761 assert_eq!(
6762 transaction
6763 .remove_entry(&buffer)
6764 .unwrap()
6765 .0
6766 .read_with(cx, |buffer, _| buffer.text()),
6767 "const THREE: usize = 1;"
6768 );
6769 assert_eq!(
6770 transaction
6771 .into_keys()
6772 .next()
6773 .unwrap()
6774 .read_with(cx, |buffer, _| buffer.text()),
6775 "const TWO: usize = one::THREE + one::THREE;"
6776 );
6777 }
6778
6779 #[gpui::test]
6780 async fn test_search(cx: &mut gpui::TestAppContext) {
6781 let fs = FakeFs::new(cx.background());
6782 fs.insert_tree(
6783 "/dir",
6784 json!({
6785 "one.rs": "const ONE: usize = 1;",
6786 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6787 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6788 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6789 }),
6790 )
6791 .await;
6792 let project = Project::test(fs.clone(), cx);
6793 let (tree, _) = project
6794 .update(cx, |project, cx| {
6795 project.find_or_create_local_worktree("/dir", true, cx)
6796 })
6797 .await
6798 .unwrap();
6799 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6800 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6801 .await;
6802
6803 assert_eq!(
6804 search(&project, SearchQuery::text("TWO", false, true), cx)
6805 .await
6806 .unwrap(),
6807 HashMap::from_iter([
6808 ("two.rs".to_string(), vec![6..9]),
6809 ("three.rs".to_string(), vec![37..40])
6810 ])
6811 );
6812
6813 let buffer_4 = project
6814 .update(cx, |project, cx| {
6815 project.open_buffer_for_path((worktree_id, "four.rs"), cx)
6816 })
6817 .await
6818 .unwrap();
6819 buffer_4.update(cx, |buffer, cx| {
6820 buffer.edit([20..28, 31..43], "two::TWO", cx);
6821 });
6822
6823 assert_eq!(
6824 search(&project, SearchQuery::text("TWO", false, true), cx)
6825 .await
6826 .unwrap(),
6827 HashMap::from_iter([
6828 ("two.rs".to_string(), vec![6..9]),
6829 ("three.rs".to_string(), vec![37..40]),
6830 ("four.rs".to_string(), vec![25..28, 36..39])
6831 ])
6832 );
6833
6834 async fn search(
6835 project: &ModelHandle<Project>,
6836 query: SearchQuery,
6837 cx: &mut gpui::TestAppContext,
6838 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6839 let results = project
6840 .update(cx, |project, cx| project.search(query, cx))
6841 .await?;
6842
6843 Ok(results
6844 .into_iter()
6845 .map(|(buffer, ranges)| {
6846 buffer.read_with(cx, |buffer, _| {
6847 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6848 let ranges = ranges
6849 .into_iter()
6850 .map(|range| range.to_offset(buffer))
6851 .collect::<Vec<_>>();
6852 (path, ranges)
6853 })
6854 })
6855 .collect())
6856 }
6857 }
6858}