1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub trait Item: Entity {
53 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
54}
55
56pub struct Project {
57 worktrees: Vec<WorktreeHandle>,
58 active_entry: Option<ProjectEntryId>,
59 languages: Arc<LanguageRegistry>,
60 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
61 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
62 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
63 language_server_settings: Arc<Mutex<serde_json::Value>>,
64 next_language_server_id: usize,
65 client: Arc<client::Client>,
66 next_entry_id: Arc<AtomicUsize>,
67 user_store: ModelHandle<UserStore>,
68 fs: Arc<dyn Fs>,
69 client_state: ProjectClientState,
70 collaborators: HashMap<PeerId, Collaborator>,
71 subscriptions: Vec<client::Subscription>,
72 language_servers_with_diagnostics_running: isize,
73 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
74 shared_buffers: HashMap<PeerId, HashSet<u64>>,
75 loading_buffers: HashMap<
76 ProjectPath,
77 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
78 >,
79 loading_local_worktrees:
80 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
81 opened_buffers: HashMap<u64, OpenBuffer>,
82 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
83 nonce: u128,
84}
85
86enum OpenBuffer {
87 Strong(ModelHandle<Buffer>),
88 Weak(WeakModelHandle<Buffer>),
89 Loading(Vec<Operation>),
90}
91
92enum WorktreeHandle {
93 Strong(ModelHandle<Worktree>),
94 Weak(WeakModelHandle<Worktree>),
95}
96
97enum ProjectClientState {
98 Local {
99 is_shared: bool,
100 remote_id_tx: watch::Sender<Option<u64>>,
101 remote_id_rx: watch::Receiver<Option<u64>>,
102 _maintain_remote_id_task: Task<Option<()>>,
103 },
104 Remote {
105 sharing_has_stopped: bool,
106 remote_id: u64,
107 replica_id: ReplicaId,
108 _detect_unshare_task: Task<Option<()>>,
109 },
110}
111
112#[derive(Clone, Debug)]
113pub struct Collaborator {
114 pub user: Arc<User>,
115 pub peer_id: PeerId,
116 pub replica_id: ReplicaId,
117}
118
119#[derive(Clone, Debug, PartialEq)]
120pub enum Event {
121 ActiveEntryChanged(Option<ProjectEntryId>),
122 WorktreeRemoved(WorktreeId),
123 DiskBasedDiagnosticsStarted,
124 DiskBasedDiagnosticsUpdated,
125 DiskBasedDiagnosticsFinished,
126 DiagnosticsUpdated(ProjectPath),
127 RemoteIdChanged(Option<u64>),
128}
129
130enum LanguageServerEvent {
131 WorkStart {
132 token: String,
133 },
134 WorkProgress {
135 token: String,
136 progress: LanguageServerProgress,
137 },
138 WorkEnd {
139 token: String,
140 },
141 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
142}
143
144pub struct LanguageServerStatus {
145 pub name: String,
146 pub pending_work: BTreeMap<String, LanguageServerProgress>,
147 pending_diagnostic_updates: isize,
148}
149
150#[derive(Clone, Debug)]
151pub struct LanguageServerProgress {
152 pub message: Option<String>,
153 pub percentage: Option<usize>,
154 pub last_update_at: Instant,
155}
156
157#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
158pub struct ProjectPath {
159 pub worktree_id: WorktreeId,
160 pub path: Arc<Path>,
161}
162
163#[derive(Clone, Debug, Default, PartialEq)]
164pub struct DiagnosticSummary {
165 pub error_count: usize,
166 pub warning_count: usize,
167 pub info_count: usize,
168 pub hint_count: usize,
169}
170
171#[derive(Debug)]
172pub struct Location {
173 pub buffer: ModelHandle<Buffer>,
174 pub range: Range<language::Anchor>,
175}
176
177#[derive(Debug)]
178pub struct DocumentHighlight {
179 pub range: Range<language::Anchor>,
180 pub kind: DocumentHighlightKind,
181}
182
183#[derive(Clone, Debug)]
184pub struct Symbol {
185 pub source_worktree_id: WorktreeId,
186 pub worktree_id: WorktreeId,
187 pub language_name: String,
188 pub path: PathBuf,
189 pub label: CodeLabel,
190 pub name: String,
191 pub kind: lsp::SymbolKind,
192 pub range: Range<PointUtf16>,
193 pub signature: [u8; 32],
194}
195
196#[derive(Default)]
197pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
198
199impl DiagnosticSummary {
200 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
201 let mut this = Self {
202 error_count: 0,
203 warning_count: 0,
204 info_count: 0,
205 hint_count: 0,
206 };
207
208 for entry in diagnostics {
209 if entry.diagnostic.is_primary {
210 match entry.diagnostic.severity {
211 DiagnosticSeverity::ERROR => this.error_count += 1,
212 DiagnosticSeverity::WARNING => this.warning_count += 1,
213 DiagnosticSeverity::INFORMATION => this.info_count += 1,
214 DiagnosticSeverity::HINT => this.hint_count += 1,
215 _ => {}
216 }
217 }
218 }
219
220 this
221 }
222
223 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
224 proto::DiagnosticSummary {
225 path: path.to_string_lossy().to_string(),
226 error_count: self.error_count as u32,
227 warning_count: self.warning_count as u32,
228 info_count: self.info_count as u32,
229 hint_count: self.hint_count as u32,
230 }
231 }
232}
233
234#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
235pub struct ProjectEntryId(usize);
236
237impl ProjectEntryId {
238 pub fn new(counter: &AtomicUsize) -> Self {
239 Self(counter.fetch_add(1, SeqCst))
240 }
241
242 pub fn from_proto(id: u64) -> Self {
243 Self(id as usize)
244 }
245
246 pub fn to_proto(&self) -> u64 {
247 self.0 as u64
248 }
249
250 pub fn to_usize(&self) -> usize {
251 self.0
252 }
253}
254
255impl Project {
256 pub fn init(client: &Arc<Client>) {
257 client.add_model_message_handler(Self::handle_add_collaborator);
258 client.add_model_message_handler(Self::handle_buffer_reloaded);
259 client.add_model_message_handler(Self::handle_buffer_saved);
260 client.add_model_message_handler(Self::handle_start_language_server);
261 client.add_model_message_handler(Self::handle_update_language_server);
262 client.add_model_message_handler(Self::handle_remove_collaborator);
263 client.add_model_message_handler(Self::handle_register_worktree);
264 client.add_model_message_handler(Self::handle_unregister_worktree);
265 client.add_model_message_handler(Self::handle_unshare_project);
266 client.add_model_message_handler(Self::handle_update_buffer_file);
267 client.add_model_message_handler(Self::handle_update_buffer);
268 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
269 client.add_model_message_handler(Self::handle_update_worktree);
270 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
271 client.add_entity_request_handler(Self::handle_apply_code_action);
272 client.add_entity_request_handler(Self::handle_format_buffers);
273 client.add_entity_request_handler(Self::handle_get_code_actions);
274 client.add_entity_request_handler(Self::handle_get_completions);
275 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
276 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
277 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
278 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
279 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
280 client.add_entity_request_handler(Self::handle_search_project);
281 client.add_entity_request_handler(Self::handle_get_project_symbols);
282 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
283 client.add_entity_request_handler(Self::handle_open_buffer_by_path);
284 client.add_entity_request_handler(Self::handle_save_buffer);
285 }
286
287 pub fn local(
288 client: Arc<Client>,
289 user_store: ModelHandle<UserStore>,
290 languages: Arc<LanguageRegistry>,
291 fs: Arc<dyn Fs>,
292 cx: &mut MutableAppContext,
293 ) -> ModelHandle<Self> {
294 cx.add_model(|cx: &mut ModelContext<Self>| {
295 let (remote_id_tx, remote_id_rx) = watch::channel();
296 let _maintain_remote_id_task = cx.spawn_weak({
297 let rpc = client.clone();
298 move |this, mut cx| {
299 async move {
300 let mut status = rpc.status();
301 while let Some(status) = status.next().await {
302 if let Some(this) = this.upgrade(&cx) {
303 let remote_id = if status.is_connected() {
304 let response = rpc.request(proto::RegisterProject {}).await?;
305 Some(response.project_id)
306 } else {
307 None
308 };
309
310 if let Some(project_id) = remote_id {
311 let mut registrations = Vec::new();
312 this.update(&mut cx, |this, cx| {
313 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
314 registrations.push(worktree.update(
315 cx,
316 |worktree, cx| {
317 let worktree = worktree.as_local_mut().unwrap();
318 worktree.register(project_id, cx)
319 },
320 ));
321 }
322 });
323 for registration in registrations {
324 registration.await?;
325 }
326 }
327 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
328 }
329 }
330 Ok(())
331 }
332 .log_err()
333 }
334 });
335
336 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
337 Self {
338 worktrees: Default::default(),
339 collaborators: Default::default(),
340 opened_buffers: Default::default(),
341 shared_buffers: Default::default(),
342 loading_buffers: Default::default(),
343 loading_local_worktrees: Default::default(),
344 buffer_snapshots: Default::default(),
345 client_state: ProjectClientState::Local {
346 is_shared: false,
347 remote_id_tx,
348 remote_id_rx,
349 _maintain_remote_id_task,
350 },
351 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
352 subscriptions: Vec::new(),
353 active_entry: None,
354 languages,
355 client,
356 user_store,
357 fs,
358 next_entry_id: Default::default(),
359 language_servers_with_diagnostics_running: 0,
360 language_servers: Default::default(),
361 started_language_servers: Default::default(),
362 language_server_statuses: Default::default(),
363 language_server_settings: Default::default(),
364 next_language_server_id: 0,
365 nonce: StdRng::from_entropy().gen(),
366 }
367 })
368 }
369
370 pub async fn remote(
371 remote_id: u64,
372 client: Arc<Client>,
373 user_store: ModelHandle<UserStore>,
374 languages: Arc<LanguageRegistry>,
375 fs: Arc<dyn Fs>,
376 cx: &mut AsyncAppContext,
377 ) -> Result<ModelHandle<Self>> {
378 client.authenticate_and_connect(&cx).await?;
379
380 let response = client
381 .request(proto::JoinProject {
382 project_id: remote_id,
383 })
384 .await?;
385
386 let replica_id = response.replica_id as ReplicaId;
387
388 let mut worktrees = Vec::new();
389 for worktree in response.worktrees {
390 let (worktree, load_task) = cx
391 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
392 worktrees.push(worktree);
393 load_task.detach();
394 }
395
396 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
397 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
398 let mut this = Self {
399 worktrees: Vec::new(),
400 loading_buffers: Default::default(),
401 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
402 shared_buffers: Default::default(),
403 loading_local_worktrees: Default::default(),
404 active_entry: None,
405 collaborators: Default::default(),
406 languages,
407 user_store: user_store.clone(),
408 fs,
409 next_entry_id: Default::default(),
410 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
411 client: client.clone(),
412 client_state: ProjectClientState::Remote {
413 sharing_has_stopped: false,
414 remote_id,
415 replica_id,
416 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
417 async move {
418 let mut status = client.status();
419 let is_connected =
420 status.next().await.map_or(false, |s| s.is_connected());
421 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
422 if !is_connected || status.next().await.is_some() {
423 if let Some(this) = this.upgrade(&cx) {
424 this.update(&mut cx, |this, cx| this.project_unshared(cx))
425 }
426 }
427 Ok(())
428 }
429 .log_err()
430 }),
431 },
432 language_servers_with_diagnostics_running: 0,
433 language_servers: Default::default(),
434 started_language_servers: Default::default(),
435 language_server_settings: Default::default(),
436 language_server_statuses: response
437 .language_servers
438 .into_iter()
439 .map(|server| {
440 (
441 server.id as usize,
442 LanguageServerStatus {
443 name: server.name,
444 pending_work: Default::default(),
445 pending_diagnostic_updates: 0,
446 },
447 )
448 })
449 .collect(),
450 next_language_server_id: 0,
451 opened_buffers: Default::default(),
452 buffer_snapshots: Default::default(),
453 nonce: StdRng::from_entropy().gen(),
454 };
455 for worktree in worktrees {
456 this.add_worktree(&worktree, cx);
457 }
458 this
459 });
460
461 let user_ids = response
462 .collaborators
463 .iter()
464 .map(|peer| peer.user_id)
465 .collect();
466 user_store
467 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
468 .await?;
469 let mut collaborators = HashMap::default();
470 for message in response.collaborators {
471 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
472 collaborators.insert(collaborator.peer_id, collaborator);
473 }
474
475 this.update(cx, |this, _| {
476 this.collaborators = collaborators;
477 });
478
479 Ok(this)
480 }
481
482 #[cfg(any(test, feature = "test-support"))]
483 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
484 let languages = Arc::new(LanguageRegistry::test());
485 let http_client = client::test::FakeHttpClient::with_404_response();
486 let client = client::Client::new(http_client.clone());
487 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
488 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
489 }
490
491 #[cfg(any(test, feature = "test-support"))]
492 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
493 self.opened_buffers
494 .get(&remote_id)
495 .and_then(|buffer| buffer.upgrade(cx))
496 }
497
498 #[cfg(any(test, feature = "test-support"))]
499 pub fn languages(&self) -> &Arc<LanguageRegistry> {
500 &self.languages
501 }
502
503 #[cfg(any(test, feature = "test-support"))]
504 pub fn check_invariants(&self, cx: &AppContext) {
505 if self.is_local() {
506 let mut worktree_root_paths = HashMap::default();
507 for worktree in self.worktrees(cx) {
508 let worktree = worktree.read(cx);
509 let abs_path = worktree.as_local().unwrap().abs_path().clone();
510 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
511 assert_eq!(
512 prev_worktree_id,
513 None,
514 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
515 abs_path,
516 worktree.id(),
517 prev_worktree_id
518 )
519 }
520 } else {
521 let replica_id = self.replica_id();
522 for buffer in self.opened_buffers.values() {
523 if let Some(buffer) = buffer.upgrade(cx) {
524 let buffer = buffer.read(cx);
525 assert_eq!(
526 buffer.deferred_ops_len(),
527 0,
528 "replica {}, buffer {} has deferred operations",
529 replica_id,
530 buffer.remote_id()
531 );
532 }
533 }
534 }
535 }
536
537 #[cfg(any(test, feature = "test-support"))]
538 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
539 let path = path.into();
540 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
541 self.opened_buffers.iter().any(|(_, buffer)| {
542 if let Some(buffer) = buffer.upgrade(cx) {
543 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
544 if file.worktree == worktree && file.path() == &path.path {
545 return true;
546 }
547 }
548 }
549 false
550 })
551 } else {
552 false
553 }
554 }
555
556 pub fn fs(&self) -> &Arc<dyn Fs> {
557 &self.fs
558 }
559
560 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
561 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
562 *remote_id_tx.borrow_mut() = remote_id;
563 }
564
565 self.subscriptions.clear();
566 if let Some(remote_id) = remote_id {
567 self.subscriptions
568 .push(self.client.add_model_for_remote_entity(remote_id, cx));
569 }
570 cx.emit(Event::RemoteIdChanged(remote_id))
571 }
572
573 pub fn remote_id(&self) -> Option<u64> {
574 match &self.client_state {
575 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
576 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
577 }
578 }
579
580 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
581 let mut id = None;
582 let mut watch = None;
583 match &self.client_state {
584 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
585 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
586 }
587
588 async move {
589 if let Some(id) = id {
590 return id;
591 }
592 let mut watch = watch.unwrap();
593 loop {
594 let id = *watch.borrow();
595 if let Some(id) = id {
596 return id;
597 }
598 watch.next().await;
599 }
600 }
601 }
602
603 pub fn replica_id(&self) -> ReplicaId {
604 match &self.client_state {
605 ProjectClientState::Local { .. } => 0,
606 ProjectClientState::Remote { replica_id, .. } => *replica_id,
607 }
608 }
609
610 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
611 &self.collaborators
612 }
613
614 pub fn worktrees<'a>(
615 &'a self,
616 cx: &'a AppContext,
617 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
618 self.worktrees
619 .iter()
620 .filter_map(move |worktree| worktree.upgrade(cx))
621 }
622
623 pub fn visible_worktrees<'a>(
624 &'a self,
625 cx: &'a AppContext,
626 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
627 self.worktrees.iter().filter_map(|worktree| {
628 worktree.upgrade(cx).and_then(|worktree| {
629 if worktree.read(cx).is_visible() {
630 Some(worktree)
631 } else {
632 None
633 }
634 })
635 })
636 }
637
638 pub fn worktree_for_id(
639 &self,
640 id: WorktreeId,
641 cx: &AppContext,
642 ) -> Option<ModelHandle<Worktree>> {
643 self.worktrees(cx)
644 .find(|worktree| worktree.read(cx).id() == id)
645 }
646
647 pub fn worktree_for_entry(
648 &self,
649 entry_id: ProjectEntryId,
650 cx: &AppContext,
651 ) -> Option<ModelHandle<Worktree>> {
652 self.worktrees(cx)
653 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
654 }
655
656 pub fn worktree_id_for_entry(
657 &self,
658 entry_id: ProjectEntryId,
659 cx: &AppContext,
660 ) -> Option<WorktreeId> {
661 self.worktree_for_entry(entry_id, cx)
662 .map(|worktree| worktree.read(cx).id())
663 }
664
665 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
666 let rpc = self.client.clone();
667 cx.spawn(|this, mut cx| async move {
668 let project_id = this.update(&mut cx, |this, cx| {
669 if let ProjectClientState::Local {
670 is_shared,
671 remote_id_rx,
672 ..
673 } = &mut this.client_state
674 {
675 *is_shared = true;
676
677 for open_buffer in this.opened_buffers.values_mut() {
678 match open_buffer {
679 OpenBuffer::Strong(_) => {}
680 OpenBuffer::Weak(buffer) => {
681 if let Some(buffer) = buffer.upgrade(cx) {
682 *open_buffer = OpenBuffer::Strong(buffer);
683 }
684 }
685 OpenBuffer::Loading(_) => unreachable!(),
686 }
687 }
688
689 for worktree_handle in this.worktrees.iter_mut() {
690 match worktree_handle {
691 WorktreeHandle::Strong(_) => {}
692 WorktreeHandle::Weak(worktree) => {
693 if let Some(worktree) = worktree.upgrade(cx) {
694 *worktree_handle = WorktreeHandle::Strong(worktree);
695 }
696 }
697 }
698 }
699
700 remote_id_rx
701 .borrow()
702 .ok_or_else(|| anyhow!("no project id"))
703 } else {
704 Err(anyhow!("can't share a remote project"))
705 }
706 })?;
707
708 rpc.request(proto::ShareProject { project_id }).await?;
709
710 let mut tasks = Vec::new();
711 this.update(&mut cx, |this, cx| {
712 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
713 worktree.update(cx, |worktree, cx| {
714 let worktree = worktree.as_local_mut().unwrap();
715 tasks.push(worktree.share(project_id, cx));
716 });
717 }
718 });
719 for task in tasks {
720 task.await?;
721 }
722 this.update(&mut cx, |_, cx| cx.notify());
723 Ok(())
724 })
725 }
726
727 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
728 let rpc = self.client.clone();
729 cx.spawn(|this, mut cx| async move {
730 let project_id = this.update(&mut cx, |this, cx| {
731 if let ProjectClientState::Local {
732 is_shared,
733 remote_id_rx,
734 ..
735 } = &mut this.client_state
736 {
737 *is_shared = false;
738
739 for open_buffer in this.opened_buffers.values_mut() {
740 match open_buffer {
741 OpenBuffer::Strong(buffer) => {
742 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
743 }
744 _ => {}
745 }
746 }
747
748 for worktree_handle in this.worktrees.iter_mut() {
749 match worktree_handle {
750 WorktreeHandle::Strong(worktree) => {
751 if !worktree.read(cx).is_visible() {
752 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
753 }
754 }
755 _ => {}
756 }
757 }
758
759 remote_id_rx
760 .borrow()
761 .ok_or_else(|| anyhow!("no project id"))
762 } else {
763 Err(anyhow!("can't share a remote project"))
764 }
765 })?;
766
767 rpc.send(proto::UnshareProject { project_id })?;
768 this.update(&mut cx, |this, cx| {
769 this.collaborators.clear();
770 this.shared_buffers.clear();
771 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
772 worktree.update(cx, |worktree, _| {
773 worktree.as_local_mut().unwrap().unshare();
774 });
775 }
776 cx.notify()
777 });
778 Ok(())
779 })
780 }
781
782 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
783 if let ProjectClientState::Remote {
784 sharing_has_stopped,
785 ..
786 } = &mut self.client_state
787 {
788 *sharing_has_stopped = true;
789 self.collaborators.clear();
790 cx.notify();
791 }
792 }
793
794 pub fn is_read_only(&self) -> bool {
795 match &self.client_state {
796 ProjectClientState::Local { .. } => false,
797 ProjectClientState::Remote {
798 sharing_has_stopped,
799 ..
800 } => *sharing_has_stopped,
801 }
802 }
803
804 pub fn is_local(&self) -> bool {
805 match &self.client_state {
806 ProjectClientState::Local { .. } => true,
807 ProjectClientState::Remote { .. } => false,
808 }
809 }
810
811 pub fn is_remote(&self) -> bool {
812 !self.is_local()
813 }
814
815 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
816 if self.is_remote() {
817 return Err(anyhow!("creating buffers as a guest is not supported yet"));
818 }
819
820 let buffer = cx.add_model(|cx| {
821 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
822 });
823 self.register_buffer(&buffer, cx)?;
824 Ok(buffer)
825 }
826
827 pub fn open_path(
828 &mut self,
829 path: impl Into<ProjectPath>,
830 cx: &mut ModelContext<Self>,
831 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
832 let task = self.open_buffer(path, cx);
833 cx.spawn_weak(|_, cx| async move {
834 let buffer = task.await?;
835 let project_entry_id = buffer
836 .read_with(&cx, |buffer, cx| {
837 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
838 })
839 .ok_or_else(|| anyhow!("no project entry"))?;
840 Ok((project_entry_id, buffer.into()))
841 })
842 }
843
844 pub fn open_buffer(
845 &mut self,
846 path: impl Into<ProjectPath>,
847 cx: &mut ModelContext<Self>,
848 ) -> Task<Result<ModelHandle<Buffer>>> {
849 let project_path = path.into();
850 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
851 worktree
852 } else {
853 return Task::ready(Err(anyhow!("no such worktree")));
854 };
855
856 // If there is already a buffer for the given path, then return it.
857 let existing_buffer = self.get_open_buffer(&project_path, cx);
858 if let Some(existing_buffer) = existing_buffer {
859 return Task::ready(Ok(existing_buffer));
860 }
861
862 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
863 // If the given path is already being loaded, then wait for that existing
864 // task to complete and return the same buffer.
865 hash_map::Entry::Occupied(e) => e.get().clone(),
866
867 // Otherwise, record the fact that this path is now being loaded.
868 hash_map::Entry::Vacant(entry) => {
869 let (mut tx, rx) = postage::watch::channel();
870 entry.insert(rx.clone());
871
872 let load_buffer = if worktree.read(cx).is_local() {
873 self.open_local_buffer(&project_path.path, &worktree, cx)
874 } else {
875 self.open_remote_buffer(&project_path.path, &worktree, cx)
876 };
877
878 cx.spawn(move |this, mut cx| async move {
879 let load_result = load_buffer.await;
880 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
881 // Record the fact that the buffer is no longer loading.
882 this.loading_buffers.remove(&project_path);
883 let buffer = load_result.map_err(Arc::new)?;
884 Ok(buffer)
885 }));
886 })
887 .detach();
888 rx
889 }
890 };
891
892 cx.foreground().spawn(async move {
893 loop {
894 if let Some(result) = loading_watch.borrow().as_ref() {
895 match result {
896 Ok(buffer) => return Ok(buffer.clone()),
897 Err(error) => return Err(anyhow!("{}", error)),
898 }
899 }
900 loading_watch.next().await;
901 }
902 })
903 }
904
905 fn open_local_buffer(
906 &mut self,
907 path: &Arc<Path>,
908 worktree: &ModelHandle<Worktree>,
909 cx: &mut ModelContext<Self>,
910 ) -> Task<Result<ModelHandle<Buffer>>> {
911 let load_buffer = worktree.update(cx, |worktree, cx| {
912 let worktree = worktree.as_local_mut().unwrap();
913 worktree.load_buffer(path, cx)
914 });
915 cx.spawn(|this, mut cx| async move {
916 let buffer = load_buffer.await?;
917 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
918 Ok(buffer)
919 })
920 }
921
922 fn open_remote_buffer(
923 &mut self,
924 path: &Arc<Path>,
925 worktree: &ModelHandle<Worktree>,
926 cx: &mut ModelContext<Self>,
927 ) -> Task<Result<ModelHandle<Buffer>>> {
928 let rpc = self.client.clone();
929 let project_id = self.remote_id().unwrap();
930 let remote_worktree_id = worktree.read(cx).id();
931 let path = path.clone();
932 let path_string = path.to_string_lossy().to_string();
933 cx.spawn(|this, mut cx| async move {
934 let response = rpc
935 .request(proto::OpenBufferByPath {
936 project_id,
937 worktree_id: remote_worktree_id.to_proto(),
938 path: path_string,
939 })
940 .await?;
941 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
942 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
943 .await
944 })
945 }
946
947 fn open_local_buffer_via_lsp(
948 &mut self,
949 abs_path: lsp::Url,
950 lang_name: Arc<str>,
951 lang_server: Arc<LanguageServer>,
952 cx: &mut ModelContext<Self>,
953 ) -> Task<Result<ModelHandle<Buffer>>> {
954 cx.spawn(|this, mut cx| async move {
955 let abs_path = abs_path
956 .to_file_path()
957 .map_err(|_| anyhow!("can't convert URI to path"))?;
958 let (worktree, relative_path) = if let Some(result) =
959 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
960 {
961 result
962 } else {
963 let worktree = this
964 .update(&mut cx, |this, cx| {
965 this.create_local_worktree(&abs_path, false, cx)
966 })
967 .await?;
968 this.update(&mut cx, |this, cx| {
969 this.language_servers
970 .insert((worktree.read(cx).id(), lang_name), lang_server);
971 });
972 (worktree, PathBuf::new())
973 };
974
975 let project_path = ProjectPath {
976 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
977 path: relative_path.into(),
978 };
979 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
980 .await
981 })
982 }
983
984 pub fn save_buffer_as(
985 &mut self,
986 buffer: ModelHandle<Buffer>,
987 abs_path: PathBuf,
988 cx: &mut ModelContext<Project>,
989 ) -> Task<Result<()>> {
990 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
991 cx.spawn(|this, mut cx| async move {
992 let (worktree, path) = worktree_task.await?;
993 worktree
994 .update(&mut cx, |worktree, cx| {
995 worktree
996 .as_local_mut()
997 .unwrap()
998 .save_buffer_as(buffer.clone(), path, cx)
999 })
1000 .await?;
1001 this.update(&mut cx, |this, cx| {
1002 this.assign_language_to_buffer(&buffer, cx);
1003 this.register_buffer_with_language_server(&buffer, cx);
1004 });
1005 Ok(())
1006 })
1007 }
1008
1009 pub fn get_open_buffer(
1010 &mut self,
1011 path: &ProjectPath,
1012 cx: &mut ModelContext<Self>,
1013 ) -> Option<ModelHandle<Buffer>> {
1014 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1015 self.opened_buffers.values().find_map(|buffer| {
1016 let buffer = buffer.upgrade(cx)?;
1017 let file = File::from_dyn(buffer.read(cx).file())?;
1018 if file.worktree == worktree && file.path() == &path.path {
1019 Some(buffer)
1020 } else {
1021 None
1022 }
1023 })
1024 }
1025
1026 fn register_buffer(
1027 &mut self,
1028 buffer: &ModelHandle<Buffer>,
1029 cx: &mut ModelContext<Self>,
1030 ) -> Result<()> {
1031 let remote_id = buffer.read(cx).remote_id();
1032 let open_buffer = if self.is_remote() || self.is_shared() {
1033 OpenBuffer::Strong(buffer.clone())
1034 } else {
1035 OpenBuffer::Weak(buffer.downgrade())
1036 };
1037
1038 match self.opened_buffers.insert(remote_id, open_buffer) {
1039 None => {}
1040 Some(OpenBuffer::Loading(operations)) => {
1041 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1042 }
1043 Some(OpenBuffer::Weak(existing_handle)) => {
1044 if existing_handle.upgrade(cx).is_some() {
1045 Err(anyhow!(
1046 "already registered buffer with remote id {}",
1047 remote_id
1048 ))?
1049 }
1050 }
1051 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1052 "already registered buffer with remote id {}",
1053 remote_id
1054 ))?,
1055 }
1056 cx.subscribe(buffer, |this, buffer, event, cx| {
1057 this.on_buffer_event(buffer, event, cx);
1058 })
1059 .detach();
1060
1061 self.assign_language_to_buffer(buffer, cx);
1062 self.register_buffer_with_language_server(buffer, cx);
1063
1064 Ok(())
1065 }
1066
1067 fn register_buffer_with_language_server(
1068 &mut self,
1069 buffer_handle: &ModelHandle<Buffer>,
1070 cx: &mut ModelContext<Self>,
1071 ) {
1072 let buffer = buffer_handle.read(cx);
1073 let buffer_id = buffer.remote_id();
1074 if let Some(file) = File::from_dyn(buffer.file()) {
1075 if file.is_local() {
1076 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1077 let initial_snapshot = buffer.text_snapshot();
1078 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1079
1080 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1081 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1082 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1083 .log_err();
1084 }
1085 }
1086
1087 if let Some(server) = language_server {
1088 server
1089 .notify::<lsp::notification::DidOpenTextDocument>(
1090 lsp::DidOpenTextDocumentParams {
1091 text_document: lsp::TextDocumentItem::new(
1092 uri,
1093 Default::default(),
1094 0,
1095 initial_snapshot.text(),
1096 ),
1097 }
1098 .clone(),
1099 )
1100 .log_err();
1101 buffer_handle.update(cx, |buffer, cx| {
1102 buffer.set_completion_triggers(
1103 server
1104 .capabilities()
1105 .completion_provider
1106 .as_ref()
1107 .and_then(|provider| provider.trigger_characters.clone())
1108 .unwrap_or(Vec::new()),
1109 cx,
1110 )
1111 });
1112 self.buffer_snapshots
1113 .insert(buffer_id, vec![(0, initial_snapshot)]);
1114 }
1115
1116 cx.observe_release(buffer_handle, |this, buffer, cx| {
1117 if let Some(file) = File::from_dyn(buffer.file()) {
1118 if file.is_local() {
1119 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1120 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1121 server
1122 .notify::<lsp::notification::DidCloseTextDocument>(
1123 lsp::DidCloseTextDocumentParams {
1124 text_document: lsp::TextDocumentIdentifier::new(
1125 uri.clone(),
1126 ),
1127 },
1128 )
1129 .log_err();
1130 }
1131 }
1132 }
1133 })
1134 .detach();
1135 }
1136 }
1137 }
1138
1139 fn on_buffer_event(
1140 &mut self,
1141 buffer: ModelHandle<Buffer>,
1142 event: &BufferEvent,
1143 cx: &mut ModelContext<Self>,
1144 ) -> Option<()> {
1145 match event {
1146 BufferEvent::Operation(operation) => {
1147 let project_id = self.remote_id()?;
1148 let request = self.client.request(proto::UpdateBuffer {
1149 project_id,
1150 buffer_id: buffer.read(cx).remote_id(),
1151 operations: vec![language::proto::serialize_operation(&operation)],
1152 });
1153 cx.background().spawn(request).detach_and_log_err(cx);
1154 }
1155 BufferEvent::Edited => {
1156 let language_server = self
1157 .language_server_for_buffer(buffer.read(cx), cx)?
1158 .clone();
1159 let buffer = buffer.read(cx);
1160 let file = File::from_dyn(buffer.file())?;
1161 let abs_path = file.as_local()?.abs_path(cx);
1162 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1163 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1164 let (version, prev_snapshot) = buffer_snapshots.last()?;
1165 let next_snapshot = buffer.text_snapshot();
1166 let next_version = version + 1;
1167
1168 let content_changes = buffer
1169 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1170 .map(|edit| {
1171 let edit_start = edit.new.start.0;
1172 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1173 let new_text = next_snapshot
1174 .text_for_range(edit.new.start.1..edit.new.end.1)
1175 .collect();
1176 lsp::TextDocumentContentChangeEvent {
1177 range: Some(lsp::Range::new(
1178 edit_start.to_lsp_position(),
1179 edit_end.to_lsp_position(),
1180 )),
1181 range_length: None,
1182 text: new_text,
1183 }
1184 })
1185 .collect();
1186
1187 buffer_snapshots.push((next_version, next_snapshot));
1188
1189 language_server
1190 .notify::<lsp::notification::DidChangeTextDocument>(
1191 lsp::DidChangeTextDocumentParams {
1192 text_document: lsp::VersionedTextDocumentIdentifier::new(
1193 uri,
1194 next_version,
1195 ),
1196 content_changes,
1197 },
1198 )
1199 .log_err();
1200 }
1201 BufferEvent::Saved => {
1202 let file = File::from_dyn(buffer.read(cx).file())?;
1203 let worktree_id = file.worktree_id(cx);
1204 let abs_path = file.as_local()?.abs_path(cx);
1205 let text_document = lsp::TextDocumentIdentifier {
1206 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1207 };
1208
1209 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1210 server
1211 .notify::<lsp::notification::DidSaveTextDocument>(
1212 lsp::DidSaveTextDocumentParams {
1213 text_document: text_document.clone(),
1214 text: None,
1215 },
1216 )
1217 .log_err();
1218 }
1219 }
1220 _ => {}
1221 }
1222
1223 None
1224 }
1225
1226 fn language_servers_for_worktree(
1227 &self,
1228 worktree_id: WorktreeId,
1229 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1230 self.language_servers.iter().filter_map(
1231 move |((language_server_worktree_id, language_name), server)| {
1232 if *language_server_worktree_id == worktree_id {
1233 Some((language_name.as_ref(), server))
1234 } else {
1235 None
1236 }
1237 },
1238 )
1239 }
1240
1241 fn assign_language_to_buffer(
1242 &mut self,
1243 buffer: &ModelHandle<Buffer>,
1244 cx: &mut ModelContext<Self>,
1245 ) -> Option<()> {
1246 // If the buffer has a language, set it and start the language server if we haven't already.
1247 let full_path = buffer.read(cx).file()?.full_path(cx);
1248 let language = self.languages.select_language(&full_path)?;
1249 buffer.update(cx, |buffer, cx| {
1250 buffer.set_language(Some(language.clone()), cx);
1251 });
1252
1253 let file = File::from_dyn(buffer.read(cx).file())?;
1254 let worktree = file.worktree.read(cx).as_local()?;
1255 let worktree_id = worktree.id();
1256 let worktree_abs_path = worktree.abs_path().clone();
1257 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1258
1259 None
1260 }
1261
1262 fn start_language_server(
1263 &mut self,
1264 worktree_id: WorktreeId,
1265 worktree_path: Arc<Path>,
1266 language: Arc<Language>,
1267 cx: &mut ModelContext<Self>,
1268 ) {
1269 let key = (worktree_id, language.name());
1270 self.started_language_servers
1271 .entry(key.clone())
1272 .or_insert_with(|| {
1273 let server_id = post_inc(&mut self.next_language_server_id);
1274 let language_server = self.languages.start_language_server(
1275 language.clone(),
1276 worktree_path,
1277 self.client.http_client(),
1278 cx,
1279 );
1280 cx.spawn_weak(|this, mut cx| async move {
1281 let mut language_server = language_server?.await.log_err()?;
1282 let this = this.upgrade(&cx)?;
1283 let (language_server_events_tx, language_server_events_rx) =
1284 smol::channel::unbounded();
1285
1286 language_server
1287 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1288 let language_server_events_tx = language_server_events_tx.clone();
1289 move |params| {
1290 language_server_events_tx
1291 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1292 .ok();
1293 }
1294 })
1295 .detach();
1296
1297 language_server
1298 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1299 let settings = this
1300 .read_with(&cx, |this, _| this.language_server_settings.clone());
1301 move |params| {
1302 let settings = settings.lock();
1303 Ok(params
1304 .items
1305 .into_iter()
1306 .map(|item| {
1307 if let Some(section) = &item.section {
1308 settings
1309 .get(section)
1310 .cloned()
1311 .unwrap_or(serde_json::Value::Null)
1312 } else {
1313 settings.clone()
1314 }
1315 })
1316 .collect())
1317 }
1318 })
1319 .detach();
1320
1321 language_server
1322 .on_notification::<lsp::notification::Progress, _>(move |params| {
1323 let token = match params.token {
1324 lsp::NumberOrString::String(token) => token,
1325 lsp::NumberOrString::Number(token) => {
1326 log::info!("skipping numeric progress token {}", token);
1327 return;
1328 }
1329 };
1330
1331 match params.value {
1332 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1333 lsp::WorkDoneProgress::Begin(_) => {
1334 language_server_events_tx
1335 .try_send(LanguageServerEvent::WorkStart { token })
1336 .ok();
1337 }
1338 lsp::WorkDoneProgress::Report(report) => {
1339 language_server_events_tx
1340 .try_send(LanguageServerEvent::WorkProgress {
1341 token,
1342 progress: LanguageServerProgress {
1343 message: report.message,
1344 percentage: report
1345 .percentage
1346 .map(|p| p as usize),
1347 last_update_at: Instant::now(),
1348 },
1349 })
1350 .ok();
1351 }
1352 lsp::WorkDoneProgress::End(_) => {
1353 language_server_events_tx
1354 .try_send(LanguageServerEvent::WorkEnd { token })
1355 .ok();
1356 }
1357 },
1358 }
1359 })
1360 .detach();
1361
1362 // Process all the LSP events.
1363 cx.spawn(|mut cx| {
1364 let this = this.downgrade();
1365 async move {
1366 while let Ok(event) = language_server_events_rx.recv().await {
1367 let this = this.upgrade(&cx)?;
1368 this.update(&mut cx, |this, cx| {
1369 this.on_lsp_event(server_id, event, &language, cx)
1370 });
1371
1372 // Don't starve the main thread when lots of events arrive all at once.
1373 smol::future::yield_now().await;
1374 }
1375 Some(())
1376 }
1377 })
1378 .detach();
1379
1380 let language_server = language_server.initialize().await.log_err()?;
1381 this.update(&mut cx, |this, cx| {
1382 this.language_servers
1383 .insert(key.clone(), language_server.clone());
1384 this.language_server_statuses.insert(
1385 server_id,
1386 LanguageServerStatus {
1387 name: language_server.name().to_string(),
1388 pending_work: Default::default(),
1389 pending_diagnostic_updates: 0,
1390 },
1391 );
1392 language_server
1393 .notify::<lsp::notification::DidChangeConfiguration>(
1394 lsp::DidChangeConfigurationParams {
1395 settings: this.language_server_settings.lock().clone(),
1396 },
1397 )
1398 .ok();
1399
1400 if let Some(project_id) = this.remote_id() {
1401 this.client
1402 .send(proto::StartLanguageServer {
1403 project_id,
1404 server: Some(proto::LanguageServer {
1405 id: server_id as u64,
1406 name: language_server.name().to_string(),
1407 }),
1408 })
1409 .log_err();
1410 }
1411
1412 // Tell the language server about every open buffer in the worktree that matches the language.
1413 for buffer in this.opened_buffers.values() {
1414 if let Some(buffer_handle) = buffer.upgrade(cx) {
1415 let buffer = buffer_handle.read(cx);
1416 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1417 file
1418 } else {
1419 continue;
1420 };
1421 let language = if let Some(language) = buffer.language() {
1422 language
1423 } else {
1424 continue;
1425 };
1426 if (file.worktree.read(cx).id(), language.name()) != key {
1427 continue;
1428 }
1429
1430 let file = file.as_local()?;
1431 let versions = this
1432 .buffer_snapshots
1433 .entry(buffer.remote_id())
1434 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1435 let (version, initial_snapshot) = versions.last().unwrap();
1436 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1437 language_server
1438 .notify::<lsp::notification::DidOpenTextDocument>(
1439 lsp::DidOpenTextDocumentParams {
1440 text_document: lsp::TextDocumentItem::new(
1441 uri,
1442 Default::default(),
1443 *version,
1444 initial_snapshot.text(),
1445 ),
1446 },
1447 )
1448 .log_err()?;
1449 buffer_handle.update(cx, |buffer, cx| {
1450 buffer.set_completion_triggers(
1451 language_server
1452 .capabilities()
1453 .completion_provider
1454 .as_ref()
1455 .and_then(|provider| {
1456 provider.trigger_characters.clone()
1457 })
1458 .unwrap_or(Vec::new()),
1459 cx,
1460 )
1461 });
1462 }
1463 }
1464
1465 cx.notify();
1466 Some(())
1467 });
1468
1469 Some(language_server)
1470 })
1471 });
1472 }
1473
1474 fn on_lsp_event(
1475 &mut self,
1476 language_server_id: usize,
1477 event: LanguageServerEvent,
1478 language: &Arc<Language>,
1479 cx: &mut ModelContext<Self>,
1480 ) {
1481 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1482 let language_server_status =
1483 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1484 status
1485 } else {
1486 return;
1487 };
1488
1489 match event {
1490 LanguageServerEvent::WorkStart { token } => {
1491 if Some(&token) == disk_diagnostics_token {
1492 language_server_status.pending_diagnostic_updates += 1;
1493 if language_server_status.pending_diagnostic_updates == 1 {
1494 self.disk_based_diagnostics_started(cx);
1495 self.broadcast_language_server_update(
1496 language_server_id,
1497 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1498 proto::LspDiskBasedDiagnosticsUpdating {},
1499 ),
1500 );
1501 }
1502 } else {
1503 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1504 self.broadcast_language_server_update(
1505 language_server_id,
1506 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1507 token,
1508 }),
1509 );
1510 }
1511 }
1512 LanguageServerEvent::WorkProgress { token, progress } => {
1513 if Some(&token) != disk_diagnostics_token {
1514 self.on_lsp_work_progress(
1515 language_server_id,
1516 token.clone(),
1517 progress.clone(),
1518 cx,
1519 );
1520 self.broadcast_language_server_update(
1521 language_server_id,
1522 proto::update_language_server::Variant::WorkProgress(
1523 proto::LspWorkProgress {
1524 token,
1525 message: progress.message,
1526 percentage: progress.percentage.map(|p| p as u32),
1527 },
1528 ),
1529 );
1530 }
1531 }
1532 LanguageServerEvent::WorkEnd { token } => {
1533 if Some(&token) == disk_diagnostics_token {
1534 language_server_status.pending_diagnostic_updates -= 1;
1535 if language_server_status.pending_diagnostic_updates == 0 {
1536 self.disk_based_diagnostics_finished(cx);
1537 self.broadcast_language_server_update(
1538 language_server_id,
1539 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1540 proto::LspDiskBasedDiagnosticsUpdated {},
1541 ),
1542 );
1543 }
1544 } else {
1545 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1546 self.broadcast_language_server_update(
1547 language_server_id,
1548 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1549 token,
1550 }),
1551 );
1552 }
1553 }
1554 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1555 language.process_diagnostics(&mut params);
1556
1557 if disk_diagnostics_token.is_none() {
1558 self.disk_based_diagnostics_started(cx);
1559 self.broadcast_language_server_update(
1560 language_server_id,
1561 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1562 proto::LspDiskBasedDiagnosticsUpdating {},
1563 ),
1564 );
1565 }
1566 self.update_diagnostics(
1567 params,
1568 language
1569 .disk_based_diagnostic_sources()
1570 .unwrap_or(&Default::default()),
1571 cx,
1572 )
1573 .log_err();
1574 if disk_diagnostics_token.is_none() {
1575 self.disk_based_diagnostics_finished(cx);
1576 self.broadcast_language_server_update(
1577 language_server_id,
1578 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1579 proto::LspDiskBasedDiagnosticsUpdated {},
1580 ),
1581 );
1582 }
1583 }
1584 }
1585 }
1586
1587 fn on_lsp_work_start(
1588 &mut self,
1589 language_server_id: usize,
1590 token: String,
1591 cx: &mut ModelContext<Self>,
1592 ) {
1593 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1594 status.pending_work.insert(
1595 token,
1596 LanguageServerProgress {
1597 message: None,
1598 percentage: None,
1599 last_update_at: Instant::now(),
1600 },
1601 );
1602 cx.notify();
1603 }
1604 }
1605
1606 fn on_lsp_work_progress(
1607 &mut self,
1608 language_server_id: usize,
1609 token: String,
1610 progress: LanguageServerProgress,
1611 cx: &mut ModelContext<Self>,
1612 ) {
1613 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1614 status.pending_work.insert(token, progress);
1615 cx.notify();
1616 }
1617 }
1618
1619 fn on_lsp_work_end(
1620 &mut self,
1621 language_server_id: usize,
1622 token: String,
1623 cx: &mut ModelContext<Self>,
1624 ) {
1625 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1626 status.pending_work.remove(&token);
1627 cx.notify();
1628 }
1629 }
1630
1631 fn broadcast_language_server_update(
1632 &self,
1633 language_server_id: usize,
1634 event: proto::update_language_server::Variant,
1635 ) {
1636 if let Some(project_id) = self.remote_id() {
1637 self.client
1638 .send(proto::UpdateLanguageServer {
1639 project_id,
1640 language_server_id: language_server_id as u64,
1641 variant: Some(event),
1642 })
1643 .log_err();
1644 }
1645 }
1646
1647 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1648 for server in self.language_servers.values() {
1649 server
1650 .notify::<lsp::notification::DidChangeConfiguration>(
1651 lsp::DidChangeConfigurationParams {
1652 settings: settings.clone(),
1653 },
1654 )
1655 .ok();
1656 }
1657 *self.language_server_settings.lock() = settings;
1658 }
1659
1660 pub fn language_server_statuses(
1661 &self,
1662 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1663 self.language_server_statuses.values()
1664 }
1665
1666 pub fn update_diagnostics(
1667 &mut self,
1668 params: lsp::PublishDiagnosticsParams,
1669 disk_based_sources: &HashSet<String>,
1670 cx: &mut ModelContext<Self>,
1671 ) -> Result<()> {
1672 let abs_path = params
1673 .uri
1674 .to_file_path()
1675 .map_err(|_| anyhow!("URI is not a file"))?;
1676 let mut next_group_id = 0;
1677 let mut diagnostics = Vec::default();
1678 let mut primary_diagnostic_group_ids = HashMap::default();
1679 let mut sources_by_group_id = HashMap::default();
1680 let mut supporting_diagnostics = HashMap::default();
1681 for diagnostic in ¶ms.diagnostics {
1682 let source = diagnostic.source.as_ref();
1683 let code = diagnostic.code.as_ref().map(|code| match code {
1684 lsp::NumberOrString::Number(code) => code.to_string(),
1685 lsp::NumberOrString::String(code) => code.clone(),
1686 });
1687 let range = range_from_lsp(diagnostic.range);
1688 let is_supporting = diagnostic
1689 .related_information
1690 .as_ref()
1691 .map_or(false, |infos| {
1692 infos.iter().any(|info| {
1693 primary_diagnostic_group_ids.contains_key(&(
1694 source,
1695 code.clone(),
1696 range_from_lsp(info.location.range),
1697 ))
1698 })
1699 });
1700
1701 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1702 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1703 });
1704
1705 if is_supporting {
1706 supporting_diagnostics.insert(
1707 (source, code.clone(), range),
1708 (diagnostic.severity, is_unnecessary),
1709 );
1710 } else {
1711 let group_id = post_inc(&mut next_group_id);
1712 let is_disk_based =
1713 source.map_or(false, |source| disk_based_sources.contains(source));
1714
1715 sources_by_group_id.insert(group_id, source);
1716 primary_diagnostic_group_ids
1717 .insert((source, code.clone(), range.clone()), group_id);
1718
1719 diagnostics.push(DiagnosticEntry {
1720 range,
1721 diagnostic: Diagnostic {
1722 code: code.clone(),
1723 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1724 message: diagnostic.message.clone(),
1725 group_id,
1726 is_primary: true,
1727 is_valid: true,
1728 is_disk_based,
1729 is_unnecessary,
1730 },
1731 });
1732 if let Some(infos) = &diagnostic.related_information {
1733 for info in infos {
1734 if info.location.uri == params.uri && !info.message.is_empty() {
1735 let range = range_from_lsp(info.location.range);
1736 diagnostics.push(DiagnosticEntry {
1737 range,
1738 diagnostic: Diagnostic {
1739 code: code.clone(),
1740 severity: DiagnosticSeverity::INFORMATION,
1741 message: info.message.clone(),
1742 group_id,
1743 is_primary: false,
1744 is_valid: true,
1745 is_disk_based,
1746 is_unnecessary: false,
1747 },
1748 });
1749 }
1750 }
1751 }
1752 }
1753 }
1754
1755 for entry in &mut diagnostics {
1756 let diagnostic = &mut entry.diagnostic;
1757 if !diagnostic.is_primary {
1758 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1759 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1760 source,
1761 diagnostic.code.clone(),
1762 entry.range.clone(),
1763 )) {
1764 if let Some(severity) = severity {
1765 diagnostic.severity = severity;
1766 }
1767 diagnostic.is_unnecessary = is_unnecessary;
1768 }
1769 }
1770 }
1771
1772 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1773 Ok(())
1774 }
1775
1776 pub fn update_diagnostic_entries(
1777 &mut self,
1778 abs_path: PathBuf,
1779 version: Option<i32>,
1780 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1781 cx: &mut ModelContext<Project>,
1782 ) -> Result<(), anyhow::Error> {
1783 let (worktree, relative_path) = self
1784 .find_local_worktree(&abs_path, cx)
1785 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1786 if !worktree.read(cx).is_visible() {
1787 return Ok(());
1788 }
1789
1790 let project_path = ProjectPath {
1791 worktree_id: worktree.read(cx).id(),
1792 path: relative_path.into(),
1793 };
1794
1795 for buffer in self.opened_buffers.values() {
1796 if let Some(buffer) = buffer.upgrade(cx) {
1797 if buffer
1798 .read(cx)
1799 .file()
1800 .map_or(false, |file| *file.path() == project_path.path)
1801 {
1802 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1803 break;
1804 }
1805 }
1806 }
1807 worktree.update(cx, |worktree, cx| {
1808 worktree
1809 .as_local_mut()
1810 .ok_or_else(|| anyhow!("not a local worktree"))?
1811 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1812 })?;
1813 cx.emit(Event::DiagnosticsUpdated(project_path));
1814 Ok(())
1815 }
1816
1817 fn update_buffer_diagnostics(
1818 &mut self,
1819 buffer: &ModelHandle<Buffer>,
1820 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1821 version: Option<i32>,
1822 cx: &mut ModelContext<Self>,
1823 ) -> Result<()> {
1824 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1825 Ordering::Equal
1826 .then_with(|| b.is_primary.cmp(&a.is_primary))
1827 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1828 .then_with(|| a.severity.cmp(&b.severity))
1829 .then_with(|| a.message.cmp(&b.message))
1830 }
1831
1832 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1833
1834 diagnostics.sort_unstable_by(|a, b| {
1835 Ordering::Equal
1836 .then_with(|| a.range.start.cmp(&b.range.start))
1837 .then_with(|| b.range.end.cmp(&a.range.end))
1838 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1839 });
1840
1841 let mut sanitized_diagnostics = Vec::new();
1842 let mut edits_since_save = snapshot
1843 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1844 .peekable();
1845 let mut last_edit_old_end = PointUtf16::zero();
1846 let mut last_edit_new_end = PointUtf16::zero();
1847 'outer: for entry in diagnostics {
1848 let mut start = entry.range.start;
1849 let mut end = entry.range.end;
1850
1851 // Some diagnostics are based on files on disk instead of buffers'
1852 // current contents. Adjust these diagnostics' ranges to reflect
1853 // any unsaved edits.
1854 if entry.diagnostic.is_disk_based {
1855 while let Some(edit) = edits_since_save.peek() {
1856 if edit.old.end <= start {
1857 last_edit_old_end = edit.old.end;
1858 last_edit_new_end = edit.new.end;
1859 edits_since_save.next();
1860 } else if edit.old.start <= end && edit.old.end >= start {
1861 continue 'outer;
1862 } else {
1863 break;
1864 }
1865 }
1866
1867 let start_overshoot = start - last_edit_old_end;
1868 start = last_edit_new_end;
1869 start += start_overshoot;
1870
1871 let end_overshoot = end - last_edit_old_end;
1872 end = last_edit_new_end;
1873 end += end_overshoot;
1874 }
1875
1876 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1877 ..snapshot.clip_point_utf16(end, Bias::Right);
1878
1879 // Expand empty ranges by one character
1880 if range.start == range.end {
1881 range.end.column += 1;
1882 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1883 if range.start == range.end && range.end.column > 0 {
1884 range.start.column -= 1;
1885 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1886 }
1887 }
1888
1889 sanitized_diagnostics.push(DiagnosticEntry {
1890 range,
1891 diagnostic: entry.diagnostic,
1892 });
1893 }
1894 drop(edits_since_save);
1895
1896 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1897 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1898 Ok(())
1899 }
1900
1901 pub fn format(
1902 &self,
1903 buffers: HashSet<ModelHandle<Buffer>>,
1904 push_to_history: bool,
1905 cx: &mut ModelContext<Project>,
1906 ) -> Task<Result<ProjectTransaction>> {
1907 let mut local_buffers = Vec::new();
1908 let mut remote_buffers = None;
1909 for buffer_handle in buffers {
1910 let buffer = buffer_handle.read(cx);
1911 if let Some(file) = File::from_dyn(buffer.file()) {
1912 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1913 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1914 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1915 }
1916 } else {
1917 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1918 }
1919 } else {
1920 return Task::ready(Ok(Default::default()));
1921 }
1922 }
1923
1924 let remote_buffers = self.remote_id().zip(remote_buffers);
1925 let client = self.client.clone();
1926
1927 cx.spawn(|this, mut cx| async move {
1928 let mut project_transaction = ProjectTransaction::default();
1929
1930 if let Some((project_id, remote_buffers)) = remote_buffers {
1931 let response = client
1932 .request(proto::FormatBuffers {
1933 project_id,
1934 buffer_ids: remote_buffers
1935 .iter()
1936 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1937 .collect(),
1938 })
1939 .await?
1940 .transaction
1941 .ok_or_else(|| anyhow!("missing transaction"))?;
1942 project_transaction = this
1943 .update(&mut cx, |this, cx| {
1944 this.deserialize_project_transaction(response, push_to_history, cx)
1945 })
1946 .await?;
1947 }
1948
1949 for (buffer, buffer_abs_path, language_server) in local_buffers {
1950 let text_document = lsp::TextDocumentIdentifier::new(
1951 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1952 );
1953 let capabilities = &language_server.capabilities();
1954 let lsp_edits = if capabilities
1955 .document_formatting_provider
1956 .as_ref()
1957 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1958 {
1959 language_server
1960 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1961 text_document,
1962 options: Default::default(),
1963 work_done_progress_params: Default::default(),
1964 })
1965 .await?
1966 } else if capabilities
1967 .document_range_formatting_provider
1968 .as_ref()
1969 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1970 {
1971 let buffer_start = lsp::Position::new(0, 0);
1972 let buffer_end = buffer
1973 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1974 .to_lsp_position();
1975 language_server
1976 .request::<lsp::request::RangeFormatting>(
1977 lsp::DocumentRangeFormattingParams {
1978 text_document,
1979 range: lsp::Range::new(buffer_start, buffer_end),
1980 options: Default::default(),
1981 work_done_progress_params: Default::default(),
1982 },
1983 )
1984 .await?
1985 } else {
1986 continue;
1987 };
1988
1989 if let Some(lsp_edits) = lsp_edits {
1990 let edits = this
1991 .update(&mut cx, |this, cx| {
1992 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1993 })
1994 .await?;
1995 buffer.update(&mut cx, |buffer, cx| {
1996 buffer.finalize_last_transaction();
1997 buffer.start_transaction();
1998 for (range, text) in edits {
1999 buffer.edit([range], text, cx);
2000 }
2001 if buffer.end_transaction(cx).is_some() {
2002 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2003 if !push_to_history {
2004 buffer.forget_transaction(transaction.id);
2005 }
2006 project_transaction.0.insert(cx.handle(), transaction);
2007 }
2008 });
2009 }
2010 }
2011
2012 Ok(project_transaction)
2013 })
2014 }
2015
2016 pub fn definition<T: ToPointUtf16>(
2017 &self,
2018 buffer: &ModelHandle<Buffer>,
2019 position: T,
2020 cx: &mut ModelContext<Self>,
2021 ) -> Task<Result<Vec<Location>>> {
2022 let position = position.to_point_utf16(buffer.read(cx));
2023 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2024 }
2025
2026 pub fn references<T: ToPointUtf16>(
2027 &self,
2028 buffer: &ModelHandle<Buffer>,
2029 position: T,
2030 cx: &mut ModelContext<Self>,
2031 ) -> Task<Result<Vec<Location>>> {
2032 let position = position.to_point_utf16(buffer.read(cx));
2033 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2034 }
2035
2036 pub fn document_highlights<T: ToPointUtf16>(
2037 &self,
2038 buffer: &ModelHandle<Buffer>,
2039 position: T,
2040 cx: &mut ModelContext<Self>,
2041 ) -> Task<Result<Vec<DocumentHighlight>>> {
2042 let position = position.to_point_utf16(buffer.read(cx));
2043
2044 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2045 }
2046
2047 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2048 if self.is_local() {
2049 let mut language_servers = HashMap::default();
2050 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
2051 if let Some((worktree, language)) = self
2052 .worktree_for_id(*worktree_id, cx)
2053 .and_then(|worktree| worktree.read(cx).as_local())
2054 .zip(self.languages.get_language(language_name))
2055 {
2056 language_servers
2057 .entry(Arc::as_ptr(language_server))
2058 .or_insert((
2059 language_server.clone(),
2060 *worktree_id,
2061 worktree.abs_path().clone(),
2062 language.clone(),
2063 ));
2064 }
2065 }
2066
2067 let mut requests = Vec::new();
2068 for (language_server, _, _, _) in language_servers.values() {
2069 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2070 lsp::WorkspaceSymbolParams {
2071 query: query.to_string(),
2072 ..Default::default()
2073 },
2074 ));
2075 }
2076
2077 cx.spawn_weak(|this, cx| async move {
2078 let responses = futures::future::try_join_all(requests).await?;
2079
2080 let mut symbols = Vec::new();
2081 if let Some(this) = this.upgrade(&cx) {
2082 this.read_with(&cx, |this, cx| {
2083 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2084 language_servers.into_values().zip(responses)
2085 {
2086 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2087 |lsp_symbol| {
2088 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2089 let mut worktree_id = source_worktree_id;
2090 let path;
2091 if let Some((worktree, rel_path)) =
2092 this.find_local_worktree(&abs_path, cx)
2093 {
2094 worktree_id = worktree.read(cx).id();
2095 path = rel_path;
2096 } else {
2097 path = relativize_path(&worktree_abs_path, &abs_path);
2098 }
2099
2100 let label = language
2101 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2102 .unwrap_or_else(|| {
2103 CodeLabel::plain(lsp_symbol.name.clone(), None)
2104 });
2105 let signature = this.symbol_signature(worktree_id, &path);
2106
2107 Some(Symbol {
2108 source_worktree_id,
2109 worktree_id,
2110 language_name: language.name().to_string(),
2111 name: lsp_symbol.name,
2112 kind: lsp_symbol.kind,
2113 label,
2114 path,
2115 range: range_from_lsp(lsp_symbol.location.range),
2116 signature,
2117 })
2118 },
2119 ));
2120 }
2121 })
2122 }
2123
2124 Ok(symbols)
2125 })
2126 } else if let Some(project_id) = self.remote_id() {
2127 let request = self.client.request(proto::GetProjectSymbols {
2128 project_id,
2129 query: query.to_string(),
2130 });
2131 cx.spawn_weak(|this, cx| async move {
2132 let response = request.await?;
2133 let mut symbols = Vec::new();
2134 if let Some(this) = this.upgrade(&cx) {
2135 this.read_with(&cx, |this, _| {
2136 symbols.extend(
2137 response
2138 .symbols
2139 .into_iter()
2140 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2141 );
2142 })
2143 }
2144 Ok(symbols)
2145 })
2146 } else {
2147 Task::ready(Ok(Default::default()))
2148 }
2149 }
2150
2151 pub fn open_buffer_for_symbol(
2152 &mut self,
2153 symbol: &Symbol,
2154 cx: &mut ModelContext<Self>,
2155 ) -> Task<Result<ModelHandle<Buffer>>> {
2156 if self.is_local() {
2157 let language_server = if let Some(server) = self.language_servers.get(&(
2158 symbol.source_worktree_id,
2159 Arc::from(symbol.language_name.as_str()),
2160 )) {
2161 server.clone()
2162 } else {
2163 return Task::ready(Err(anyhow!(
2164 "language server for worktree and language not found"
2165 )));
2166 };
2167
2168 let worktree_abs_path = if let Some(worktree_abs_path) = self
2169 .worktree_for_id(symbol.worktree_id, cx)
2170 .and_then(|worktree| worktree.read(cx).as_local())
2171 .map(|local_worktree| local_worktree.abs_path())
2172 {
2173 worktree_abs_path
2174 } else {
2175 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2176 };
2177 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2178 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2179 uri
2180 } else {
2181 return Task::ready(Err(anyhow!("invalid symbol path")));
2182 };
2183
2184 self.open_local_buffer_via_lsp(
2185 symbol_uri,
2186 Arc::from(symbol.language_name.as_str()),
2187 language_server,
2188 cx,
2189 )
2190 } else if let Some(project_id) = self.remote_id() {
2191 let request = self.client.request(proto::OpenBufferForSymbol {
2192 project_id,
2193 symbol: Some(serialize_symbol(symbol)),
2194 });
2195 cx.spawn(|this, mut cx| async move {
2196 let response = request.await?;
2197 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2198 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2199 .await
2200 })
2201 } else {
2202 Task::ready(Err(anyhow!("project does not have a remote id")))
2203 }
2204 }
2205
2206 pub fn completions<T: ToPointUtf16>(
2207 &self,
2208 source_buffer_handle: &ModelHandle<Buffer>,
2209 position: T,
2210 cx: &mut ModelContext<Self>,
2211 ) -> Task<Result<Vec<Completion>>> {
2212 let source_buffer_handle = source_buffer_handle.clone();
2213 let source_buffer = source_buffer_handle.read(cx);
2214 let buffer_id = source_buffer.remote_id();
2215 let language = source_buffer.language().cloned();
2216 let worktree;
2217 let buffer_abs_path;
2218 if let Some(file) = File::from_dyn(source_buffer.file()) {
2219 worktree = file.worktree.clone();
2220 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2221 } else {
2222 return Task::ready(Ok(Default::default()));
2223 };
2224
2225 let position = position.to_point_utf16(source_buffer);
2226 let anchor = source_buffer.anchor_after(position);
2227
2228 if worktree.read(cx).as_local().is_some() {
2229 let buffer_abs_path = buffer_abs_path.unwrap();
2230 let lang_server =
2231 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2232 server.clone()
2233 } else {
2234 return Task::ready(Ok(Default::default()));
2235 };
2236
2237 cx.spawn(|_, cx| async move {
2238 let completions = lang_server
2239 .request::<lsp::request::Completion>(lsp::CompletionParams {
2240 text_document_position: lsp::TextDocumentPositionParams::new(
2241 lsp::TextDocumentIdentifier::new(
2242 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2243 ),
2244 position.to_lsp_position(),
2245 ),
2246 context: Default::default(),
2247 work_done_progress_params: Default::default(),
2248 partial_result_params: Default::default(),
2249 })
2250 .await
2251 .context("lsp completion request failed")?;
2252
2253 let completions = if let Some(completions) = completions {
2254 match completions {
2255 lsp::CompletionResponse::Array(completions) => completions,
2256 lsp::CompletionResponse::List(list) => list.items,
2257 }
2258 } else {
2259 Default::default()
2260 };
2261
2262 source_buffer_handle.read_with(&cx, |this, _| {
2263 Ok(completions
2264 .into_iter()
2265 .filter_map(|lsp_completion| {
2266 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2267 lsp::CompletionTextEdit::Edit(edit) => {
2268 (range_from_lsp(edit.range), edit.new_text.clone())
2269 }
2270 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2271 log::info!("unsupported insert/replace completion");
2272 return None;
2273 }
2274 };
2275
2276 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2277 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2278 if clipped_start == old_range.start && clipped_end == old_range.end {
2279 Some(Completion {
2280 old_range: this.anchor_before(old_range.start)
2281 ..this.anchor_after(old_range.end),
2282 new_text,
2283 label: language
2284 .as_ref()
2285 .and_then(|l| l.label_for_completion(&lsp_completion))
2286 .unwrap_or_else(|| {
2287 CodeLabel::plain(
2288 lsp_completion.label.clone(),
2289 lsp_completion.filter_text.as_deref(),
2290 )
2291 }),
2292 lsp_completion,
2293 })
2294 } else {
2295 None
2296 }
2297 })
2298 .collect())
2299 })
2300 })
2301 } else if let Some(project_id) = self.remote_id() {
2302 let rpc = self.client.clone();
2303 let message = proto::GetCompletions {
2304 project_id,
2305 buffer_id,
2306 position: Some(language::proto::serialize_anchor(&anchor)),
2307 version: serialize_version(&source_buffer.version()),
2308 };
2309 cx.spawn_weak(|_, mut cx| async move {
2310 let response = rpc.request(message).await?;
2311
2312 source_buffer_handle
2313 .update(&mut cx, |buffer, _| {
2314 buffer.wait_for_version(deserialize_version(response.version))
2315 })
2316 .await;
2317
2318 response
2319 .completions
2320 .into_iter()
2321 .map(|completion| {
2322 language::proto::deserialize_completion(completion, language.as_ref())
2323 })
2324 .collect()
2325 })
2326 } else {
2327 Task::ready(Ok(Default::default()))
2328 }
2329 }
2330
2331 pub fn apply_additional_edits_for_completion(
2332 &self,
2333 buffer_handle: ModelHandle<Buffer>,
2334 completion: Completion,
2335 push_to_history: bool,
2336 cx: &mut ModelContext<Self>,
2337 ) -> Task<Result<Option<Transaction>>> {
2338 let buffer = buffer_handle.read(cx);
2339 let buffer_id = buffer.remote_id();
2340
2341 if self.is_local() {
2342 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2343 server.clone()
2344 } else {
2345 return Task::ready(Ok(Default::default()));
2346 };
2347
2348 cx.spawn(|this, mut cx| async move {
2349 let resolved_completion = lang_server
2350 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2351 .await?;
2352 if let Some(edits) = resolved_completion.additional_text_edits {
2353 let edits = this
2354 .update(&mut cx, |this, cx| {
2355 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2356 })
2357 .await?;
2358 buffer_handle.update(&mut cx, |buffer, cx| {
2359 buffer.finalize_last_transaction();
2360 buffer.start_transaction();
2361 for (range, text) in edits {
2362 buffer.edit([range], text, cx);
2363 }
2364 let transaction = if buffer.end_transaction(cx).is_some() {
2365 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2366 if !push_to_history {
2367 buffer.forget_transaction(transaction.id);
2368 }
2369 Some(transaction)
2370 } else {
2371 None
2372 };
2373 Ok(transaction)
2374 })
2375 } else {
2376 Ok(None)
2377 }
2378 })
2379 } else if let Some(project_id) = self.remote_id() {
2380 let client = self.client.clone();
2381 cx.spawn(|_, mut cx| async move {
2382 let response = client
2383 .request(proto::ApplyCompletionAdditionalEdits {
2384 project_id,
2385 buffer_id,
2386 completion: Some(language::proto::serialize_completion(&completion)),
2387 })
2388 .await?;
2389
2390 if let Some(transaction) = response.transaction {
2391 let transaction = language::proto::deserialize_transaction(transaction)?;
2392 buffer_handle
2393 .update(&mut cx, |buffer, _| {
2394 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2395 })
2396 .await;
2397 if push_to_history {
2398 buffer_handle.update(&mut cx, |buffer, _| {
2399 buffer.push_transaction(transaction.clone(), Instant::now());
2400 });
2401 }
2402 Ok(Some(transaction))
2403 } else {
2404 Ok(None)
2405 }
2406 })
2407 } else {
2408 Task::ready(Err(anyhow!("project does not have a remote id")))
2409 }
2410 }
2411
2412 pub fn code_actions<T: ToOffset>(
2413 &self,
2414 buffer_handle: &ModelHandle<Buffer>,
2415 range: Range<T>,
2416 cx: &mut ModelContext<Self>,
2417 ) -> Task<Result<Vec<CodeAction>>> {
2418 let buffer_handle = buffer_handle.clone();
2419 let buffer = buffer_handle.read(cx);
2420 let buffer_id = buffer.remote_id();
2421 let worktree;
2422 let buffer_abs_path;
2423 if let Some(file) = File::from_dyn(buffer.file()) {
2424 worktree = file.worktree.clone();
2425 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2426 } else {
2427 return Task::ready(Ok(Default::default()));
2428 };
2429 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2430
2431 if worktree.read(cx).as_local().is_some() {
2432 let buffer_abs_path = buffer_abs_path.unwrap();
2433 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2434 server.clone()
2435 } else {
2436 return Task::ready(Ok(Default::default()));
2437 };
2438
2439 let lsp_range = lsp::Range::new(
2440 range.start.to_point_utf16(buffer).to_lsp_position(),
2441 range.end.to_point_utf16(buffer).to_lsp_position(),
2442 );
2443 cx.foreground().spawn(async move {
2444 if !lang_server.capabilities().code_action_provider.is_some() {
2445 return Ok(Default::default());
2446 }
2447
2448 Ok(lang_server
2449 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2450 text_document: lsp::TextDocumentIdentifier::new(
2451 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2452 ),
2453 range: lsp_range,
2454 work_done_progress_params: Default::default(),
2455 partial_result_params: Default::default(),
2456 context: lsp::CodeActionContext {
2457 diagnostics: Default::default(),
2458 only: Some(vec![
2459 lsp::CodeActionKind::QUICKFIX,
2460 lsp::CodeActionKind::REFACTOR,
2461 lsp::CodeActionKind::REFACTOR_EXTRACT,
2462 ]),
2463 },
2464 })
2465 .await?
2466 .unwrap_or_default()
2467 .into_iter()
2468 .filter_map(|entry| {
2469 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2470 Some(CodeAction {
2471 range: range.clone(),
2472 lsp_action,
2473 })
2474 } else {
2475 None
2476 }
2477 })
2478 .collect())
2479 })
2480 } else if let Some(project_id) = self.remote_id() {
2481 let rpc = self.client.clone();
2482 let version = buffer.version();
2483 cx.spawn_weak(|_, mut cx| async move {
2484 let response = rpc
2485 .request(proto::GetCodeActions {
2486 project_id,
2487 buffer_id,
2488 start: Some(language::proto::serialize_anchor(&range.start)),
2489 end: Some(language::proto::serialize_anchor(&range.end)),
2490 version: serialize_version(&version),
2491 })
2492 .await?;
2493
2494 buffer_handle
2495 .update(&mut cx, |buffer, _| {
2496 buffer.wait_for_version(deserialize_version(response.version))
2497 })
2498 .await;
2499
2500 response
2501 .actions
2502 .into_iter()
2503 .map(language::proto::deserialize_code_action)
2504 .collect()
2505 })
2506 } else {
2507 Task::ready(Ok(Default::default()))
2508 }
2509 }
2510
2511 pub fn apply_code_action(
2512 &self,
2513 buffer_handle: ModelHandle<Buffer>,
2514 mut action: CodeAction,
2515 push_to_history: bool,
2516 cx: &mut ModelContext<Self>,
2517 ) -> Task<Result<ProjectTransaction>> {
2518 if self.is_local() {
2519 let buffer = buffer_handle.read(cx);
2520 let lang_name = if let Some(lang) = buffer.language() {
2521 lang.name()
2522 } else {
2523 return Task::ready(Ok(Default::default()));
2524 };
2525 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2526 server.clone()
2527 } else {
2528 return Task::ready(Ok(Default::default()));
2529 };
2530 let range = action.range.to_point_utf16(buffer);
2531
2532 cx.spawn(|this, mut cx| async move {
2533 if let Some(lsp_range) = action
2534 .lsp_action
2535 .data
2536 .as_mut()
2537 .and_then(|d| d.get_mut("codeActionParams"))
2538 .and_then(|d| d.get_mut("range"))
2539 {
2540 *lsp_range = serde_json::to_value(&lsp::Range::new(
2541 range.start.to_lsp_position(),
2542 range.end.to_lsp_position(),
2543 ))
2544 .unwrap();
2545 action.lsp_action = lang_server
2546 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2547 .await?;
2548 } else {
2549 let actions = this
2550 .update(&mut cx, |this, cx| {
2551 this.code_actions(&buffer_handle, action.range, cx)
2552 })
2553 .await?;
2554 action.lsp_action = actions
2555 .into_iter()
2556 .find(|a| a.lsp_action.title == action.lsp_action.title)
2557 .ok_or_else(|| anyhow!("code action is outdated"))?
2558 .lsp_action;
2559 }
2560
2561 if let Some(edit) = action.lsp_action.edit {
2562 Self::deserialize_workspace_edit(
2563 this,
2564 edit,
2565 push_to_history,
2566 lang_name,
2567 lang_server,
2568 &mut cx,
2569 )
2570 .await
2571 } else {
2572 Ok(ProjectTransaction::default())
2573 }
2574 })
2575 } else if let Some(project_id) = self.remote_id() {
2576 let client = self.client.clone();
2577 let request = proto::ApplyCodeAction {
2578 project_id,
2579 buffer_id: buffer_handle.read(cx).remote_id(),
2580 action: Some(language::proto::serialize_code_action(&action)),
2581 };
2582 cx.spawn(|this, mut cx| async move {
2583 let response = client
2584 .request(request)
2585 .await?
2586 .transaction
2587 .ok_or_else(|| anyhow!("missing transaction"))?;
2588 this.update(&mut cx, |this, cx| {
2589 this.deserialize_project_transaction(response, push_to_history, cx)
2590 })
2591 .await
2592 })
2593 } else {
2594 Task::ready(Err(anyhow!("project does not have a remote id")))
2595 }
2596 }
2597
2598 async fn deserialize_workspace_edit(
2599 this: ModelHandle<Self>,
2600 edit: lsp::WorkspaceEdit,
2601 push_to_history: bool,
2602 language_name: Arc<str>,
2603 language_server: Arc<LanguageServer>,
2604 cx: &mut AsyncAppContext,
2605 ) -> Result<ProjectTransaction> {
2606 let fs = this.read_with(cx, |this, _| this.fs.clone());
2607 let mut operations = Vec::new();
2608 if let Some(document_changes) = edit.document_changes {
2609 match document_changes {
2610 lsp::DocumentChanges::Edits(edits) => {
2611 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2612 }
2613 lsp::DocumentChanges::Operations(ops) => operations = ops,
2614 }
2615 } else if let Some(changes) = edit.changes {
2616 operations.extend(changes.into_iter().map(|(uri, edits)| {
2617 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2618 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2619 uri,
2620 version: None,
2621 },
2622 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2623 })
2624 }));
2625 }
2626
2627 let mut project_transaction = ProjectTransaction::default();
2628 for operation in operations {
2629 match operation {
2630 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2631 let abs_path = op
2632 .uri
2633 .to_file_path()
2634 .map_err(|_| anyhow!("can't convert URI to path"))?;
2635
2636 if let Some(parent_path) = abs_path.parent() {
2637 fs.create_dir(parent_path).await?;
2638 }
2639 if abs_path.ends_with("/") {
2640 fs.create_dir(&abs_path).await?;
2641 } else {
2642 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2643 .await?;
2644 }
2645 }
2646 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2647 let source_abs_path = op
2648 .old_uri
2649 .to_file_path()
2650 .map_err(|_| anyhow!("can't convert URI to path"))?;
2651 let target_abs_path = op
2652 .new_uri
2653 .to_file_path()
2654 .map_err(|_| anyhow!("can't convert URI to path"))?;
2655 fs.rename(
2656 &source_abs_path,
2657 &target_abs_path,
2658 op.options.map(Into::into).unwrap_or_default(),
2659 )
2660 .await?;
2661 }
2662 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2663 let abs_path = op
2664 .uri
2665 .to_file_path()
2666 .map_err(|_| anyhow!("can't convert URI to path"))?;
2667 let options = op.options.map(Into::into).unwrap_or_default();
2668 if abs_path.ends_with("/") {
2669 fs.remove_dir(&abs_path, options).await?;
2670 } else {
2671 fs.remove_file(&abs_path, options).await?;
2672 }
2673 }
2674 lsp::DocumentChangeOperation::Edit(op) => {
2675 let buffer_to_edit = this
2676 .update(cx, |this, cx| {
2677 this.open_local_buffer_via_lsp(
2678 op.text_document.uri,
2679 language_name.clone(),
2680 language_server.clone(),
2681 cx,
2682 )
2683 })
2684 .await?;
2685
2686 let edits = this
2687 .update(cx, |this, cx| {
2688 let edits = op.edits.into_iter().map(|edit| match edit {
2689 lsp::OneOf::Left(edit) => edit,
2690 lsp::OneOf::Right(edit) => edit.text_edit,
2691 });
2692 this.edits_from_lsp(
2693 &buffer_to_edit,
2694 edits,
2695 op.text_document.version,
2696 cx,
2697 )
2698 })
2699 .await?;
2700
2701 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2702 buffer.finalize_last_transaction();
2703 buffer.start_transaction();
2704 for (range, text) in edits {
2705 buffer.edit([range], text, cx);
2706 }
2707 let transaction = if buffer.end_transaction(cx).is_some() {
2708 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2709 if !push_to_history {
2710 buffer.forget_transaction(transaction.id);
2711 }
2712 Some(transaction)
2713 } else {
2714 None
2715 };
2716
2717 transaction
2718 });
2719 if let Some(transaction) = transaction {
2720 project_transaction.0.insert(buffer_to_edit, transaction);
2721 }
2722 }
2723 }
2724 }
2725
2726 Ok(project_transaction)
2727 }
2728
2729 pub fn prepare_rename<T: ToPointUtf16>(
2730 &self,
2731 buffer: ModelHandle<Buffer>,
2732 position: T,
2733 cx: &mut ModelContext<Self>,
2734 ) -> Task<Result<Option<Range<Anchor>>>> {
2735 let position = position.to_point_utf16(buffer.read(cx));
2736 self.request_lsp(buffer, PrepareRename { position }, cx)
2737 }
2738
2739 pub fn perform_rename<T: ToPointUtf16>(
2740 &self,
2741 buffer: ModelHandle<Buffer>,
2742 position: T,
2743 new_name: String,
2744 push_to_history: bool,
2745 cx: &mut ModelContext<Self>,
2746 ) -> Task<Result<ProjectTransaction>> {
2747 let position = position.to_point_utf16(buffer.read(cx));
2748 self.request_lsp(
2749 buffer,
2750 PerformRename {
2751 position,
2752 new_name,
2753 push_to_history,
2754 },
2755 cx,
2756 )
2757 }
2758
2759 pub fn search(
2760 &self,
2761 query: SearchQuery,
2762 cx: &mut ModelContext<Self>,
2763 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2764 if self.is_local() {
2765 let snapshots = self
2766 .visible_worktrees(cx)
2767 .filter_map(|tree| {
2768 let tree = tree.read(cx).as_local()?;
2769 Some(tree.snapshot())
2770 })
2771 .collect::<Vec<_>>();
2772
2773 let background = cx.background().clone();
2774 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2775 if path_count == 0 {
2776 return Task::ready(Ok(Default::default()));
2777 }
2778 let workers = background.num_cpus().min(path_count);
2779 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2780 cx.background()
2781 .spawn({
2782 let fs = self.fs.clone();
2783 let background = cx.background().clone();
2784 let query = query.clone();
2785 async move {
2786 let fs = &fs;
2787 let query = &query;
2788 let matching_paths_tx = &matching_paths_tx;
2789 let paths_per_worker = (path_count + workers - 1) / workers;
2790 let snapshots = &snapshots;
2791 background
2792 .scoped(|scope| {
2793 for worker_ix in 0..workers {
2794 let worker_start_ix = worker_ix * paths_per_worker;
2795 let worker_end_ix = worker_start_ix + paths_per_worker;
2796 scope.spawn(async move {
2797 let mut snapshot_start_ix = 0;
2798 let mut abs_path = PathBuf::new();
2799 for snapshot in snapshots {
2800 let snapshot_end_ix =
2801 snapshot_start_ix + snapshot.visible_file_count();
2802 if worker_end_ix <= snapshot_start_ix {
2803 break;
2804 } else if worker_start_ix > snapshot_end_ix {
2805 snapshot_start_ix = snapshot_end_ix;
2806 continue;
2807 } else {
2808 let start_in_snapshot = worker_start_ix
2809 .saturating_sub(snapshot_start_ix);
2810 let end_in_snapshot =
2811 cmp::min(worker_end_ix, snapshot_end_ix)
2812 - snapshot_start_ix;
2813
2814 for entry in snapshot
2815 .files(false, start_in_snapshot)
2816 .take(end_in_snapshot - start_in_snapshot)
2817 {
2818 if matching_paths_tx.is_closed() {
2819 break;
2820 }
2821
2822 abs_path.clear();
2823 abs_path.push(&snapshot.abs_path());
2824 abs_path.push(&entry.path);
2825 let matches = if let Some(file) =
2826 fs.open_sync(&abs_path).await.log_err()
2827 {
2828 query.detect(file).unwrap_or(false)
2829 } else {
2830 false
2831 };
2832
2833 if matches {
2834 let project_path =
2835 (snapshot.id(), entry.path.clone());
2836 if matching_paths_tx
2837 .send(project_path)
2838 .await
2839 .is_err()
2840 {
2841 break;
2842 }
2843 }
2844 }
2845
2846 snapshot_start_ix = snapshot_end_ix;
2847 }
2848 }
2849 });
2850 }
2851 })
2852 .await;
2853 }
2854 })
2855 .detach();
2856
2857 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2858 let open_buffers = self
2859 .opened_buffers
2860 .values()
2861 .filter_map(|b| b.upgrade(cx))
2862 .collect::<HashSet<_>>();
2863 cx.spawn(|this, cx| async move {
2864 for buffer in &open_buffers {
2865 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2866 buffers_tx.send((buffer.clone(), snapshot)).await?;
2867 }
2868
2869 let open_buffers = Rc::new(RefCell::new(open_buffers));
2870 while let Some(project_path) = matching_paths_rx.next().await {
2871 if buffers_tx.is_closed() {
2872 break;
2873 }
2874
2875 let this = this.clone();
2876 let open_buffers = open_buffers.clone();
2877 let buffers_tx = buffers_tx.clone();
2878 cx.spawn(|mut cx| async move {
2879 if let Some(buffer) = this
2880 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2881 .await
2882 .log_err()
2883 {
2884 if open_buffers.borrow_mut().insert(buffer.clone()) {
2885 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2886 buffers_tx.send((buffer, snapshot)).await?;
2887 }
2888 }
2889
2890 Ok::<_, anyhow::Error>(())
2891 })
2892 .detach();
2893 }
2894
2895 Ok::<_, anyhow::Error>(())
2896 })
2897 .detach_and_log_err(cx);
2898
2899 let background = cx.background().clone();
2900 cx.background().spawn(async move {
2901 let query = &query;
2902 let mut matched_buffers = Vec::new();
2903 for _ in 0..workers {
2904 matched_buffers.push(HashMap::default());
2905 }
2906 background
2907 .scoped(|scope| {
2908 for worker_matched_buffers in matched_buffers.iter_mut() {
2909 let mut buffers_rx = buffers_rx.clone();
2910 scope.spawn(async move {
2911 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2912 let buffer_matches = query
2913 .search(snapshot.as_rope())
2914 .await
2915 .iter()
2916 .map(|range| {
2917 snapshot.anchor_before(range.start)
2918 ..snapshot.anchor_after(range.end)
2919 })
2920 .collect::<Vec<_>>();
2921 if !buffer_matches.is_empty() {
2922 worker_matched_buffers
2923 .insert(buffer.clone(), buffer_matches);
2924 }
2925 }
2926 });
2927 }
2928 })
2929 .await;
2930 Ok(matched_buffers.into_iter().flatten().collect())
2931 })
2932 } else if let Some(project_id) = self.remote_id() {
2933 let request = self.client.request(query.to_proto(project_id));
2934 cx.spawn(|this, mut cx| async move {
2935 let response = request.await?;
2936 let mut result = HashMap::default();
2937 for location in response.locations {
2938 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2939 let target_buffer = this
2940 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2941 .await?;
2942 let start = location
2943 .start
2944 .and_then(deserialize_anchor)
2945 .ok_or_else(|| anyhow!("missing target start"))?;
2946 let end = location
2947 .end
2948 .and_then(deserialize_anchor)
2949 .ok_or_else(|| anyhow!("missing target end"))?;
2950 result
2951 .entry(target_buffer)
2952 .or_insert(Vec::new())
2953 .push(start..end)
2954 }
2955 Ok(result)
2956 })
2957 } else {
2958 Task::ready(Ok(Default::default()))
2959 }
2960 }
2961
2962 fn request_lsp<R: LspCommand>(
2963 &self,
2964 buffer_handle: ModelHandle<Buffer>,
2965 request: R,
2966 cx: &mut ModelContext<Self>,
2967 ) -> Task<Result<R::Response>>
2968 where
2969 <R::LspRequest as lsp::request::Request>::Result: Send,
2970 {
2971 let buffer = buffer_handle.read(cx);
2972 if self.is_local() {
2973 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2974 if let Some((file, language_server)) =
2975 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2976 {
2977 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2978 return cx.spawn(|this, cx| async move {
2979 if !request.check_capabilities(&language_server.capabilities()) {
2980 return Ok(Default::default());
2981 }
2982
2983 let response = language_server
2984 .request::<R::LspRequest>(lsp_params)
2985 .await
2986 .context("lsp request failed")?;
2987 request
2988 .response_from_lsp(response, this, buffer_handle, cx)
2989 .await
2990 });
2991 }
2992 } else if let Some(project_id) = self.remote_id() {
2993 let rpc = self.client.clone();
2994 let message = request.to_proto(project_id, buffer);
2995 return cx.spawn(|this, cx| async move {
2996 let response = rpc.request(message).await?;
2997 request
2998 .response_from_proto(response, this, buffer_handle, cx)
2999 .await
3000 });
3001 }
3002 Task::ready(Ok(Default::default()))
3003 }
3004
3005 pub fn find_or_create_local_worktree(
3006 &mut self,
3007 abs_path: impl AsRef<Path>,
3008 visible: bool,
3009 cx: &mut ModelContext<Self>,
3010 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3011 let abs_path = abs_path.as_ref();
3012 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3013 Task::ready(Ok((tree.clone(), relative_path.into())))
3014 } else {
3015 let worktree = self.create_local_worktree(abs_path, visible, cx);
3016 cx.foreground()
3017 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3018 }
3019 }
3020
3021 pub fn find_local_worktree(
3022 &self,
3023 abs_path: &Path,
3024 cx: &AppContext,
3025 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3026 for tree in self.worktrees(cx) {
3027 if let Some(relative_path) = tree
3028 .read(cx)
3029 .as_local()
3030 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3031 {
3032 return Some((tree.clone(), relative_path.into()));
3033 }
3034 }
3035 None
3036 }
3037
3038 pub fn is_shared(&self) -> bool {
3039 match &self.client_state {
3040 ProjectClientState::Local { is_shared, .. } => *is_shared,
3041 ProjectClientState::Remote { .. } => false,
3042 }
3043 }
3044
3045 fn create_local_worktree(
3046 &mut self,
3047 abs_path: impl AsRef<Path>,
3048 visible: bool,
3049 cx: &mut ModelContext<Self>,
3050 ) -> Task<Result<ModelHandle<Worktree>>> {
3051 let fs = self.fs.clone();
3052 let client = self.client.clone();
3053 let next_entry_id = self.next_entry_id.clone();
3054 let path: Arc<Path> = abs_path.as_ref().into();
3055 let task = self
3056 .loading_local_worktrees
3057 .entry(path.clone())
3058 .or_insert_with(|| {
3059 cx.spawn(|project, mut cx| {
3060 async move {
3061 let worktree = Worktree::local(
3062 client.clone(),
3063 path.clone(),
3064 visible,
3065 fs,
3066 next_entry_id,
3067 &mut cx,
3068 )
3069 .await;
3070 project.update(&mut cx, |project, _| {
3071 project.loading_local_worktrees.remove(&path);
3072 });
3073 let worktree = worktree?;
3074
3075 let (remote_project_id, is_shared) =
3076 project.update(&mut cx, |project, cx| {
3077 project.add_worktree(&worktree, cx);
3078 (project.remote_id(), project.is_shared())
3079 });
3080
3081 if let Some(project_id) = remote_project_id {
3082 if is_shared {
3083 worktree
3084 .update(&mut cx, |worktree, cx| {
3085 worktree.as_local_mut().unwrap().share(project_id, cx)
3086 })
3087 .await?;
3088 } else {
3089 worktree
3090 .update(&mut cx, |worktree, cx| {
3091 worktree.as_local_mut().unwrap().register(project_id, cx)
3092 })
3093 .await?;
3094 }
3095 }
3096
3097 Ok(worktree)
3098 }
3099 .map_err(|err| Arc::new(err))
3100 })
3101 .shared()
3102 })
3103 .clone();
3104 cx.foreground().spawn(async move {
3105 match task.await {
3106 Ok(worktree) => Ok(worktree),
3107 Err(err) => Err(anyhow!("{}", err)),
3108 }
3109 })
3110 }
3111
3112 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3113 self.worktrees.retain(|worktree| {
3114 worktree
3115 .upgrade(cx)
3116 .map_or(false, |w| w.read(cx).id() != id)
3117 });
3118 cx.notify();
3119 }
3120
3121 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3122 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3123 if worktree.read(cx).is_local() {
3124 cx.subscribe(&worktree, |this, worktree, _, cx| {
3125 this.update_local_worktree_buffers(worktree, cx);
3126 })
3127 .detach();
3128 }
3129
3130 let push_strong_handle = {
3131 let worktree = worktree.read(cx);
3132 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3133 };
3134 if push_strong_handle {
3135 self.worktrees
3136 .push(WorktreeHandle::Strong(worktree.clone()));
3137 } else {
3138 cx.observe_release(&worktree, |this, _, cx| {
3139 this.worktrees
3140 .retain(|worktree| worktree.upgrade(cx).is_some());
3141 cx.notify();
3142 })
3143 .detach();
3144 self.worktrees
3145 .push(WorktreeHandle::Weak(worktree.downgrade()));
3146 }
3147 cx.notify();
3148 }
3149
3150 fn update_local_worktree_buffers(
3151 &mut self,
3152 worktree_handle: ModelHandle<Worktree>,
3153 cx: &mut ModelContext<Self>,
3154 ) {
3155 let snapshot = worktree_handle.read(cx).snapshot();
3156 let mut buffers_to_delete = Vec::new();
3157 for (buffer_id, buffer) in &self.opened_buffers {
3158 if let Some(buffer) = buffer.upgrade(cx) {
3159 buffer.update(cx, |buffer, cx| {
3160 if let Some(old_file) = File::from_dyn(buffer.file()) {
3161 if old_file.worktree != worktree_handle {
3162 return;
3163 }
3164
3165 let new_file = if let Some(entry) = old_file
3166 .entry_id
3167 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3168 {
3169 File {
3170 is_local: true,
3171 entry_id: Some(entry.id),
3172 mtime: entry.mtime,
3173 path: entry.path.clone(),
3174 worktree: worktree_handle.clone(),
3175 }
3176 } else if let Some(entry) =
3177 snapshot.entry_for_path(old_file.path().as_ref())
3178 {
3179 File {
3180 is_local: true,
3181 entry_id: Some(entry.id),
3182 mtime: entry.mtime,
3183 path: entry.path.clone(),
3184 worktree: worktree_handle.clone(),
3185 }
3186 } else {
3187 File {
3188 is_local: true,
3189 entry_id: None,
3190 path: old_file.path().clone(),
3191 mtime: old_file.mtime(),
3192 worktree: worktree_handle.clone(),
3193 }
3194 };
3195
3196 if let Some(project_id) = self.remote_id() {
3197 self.client
3198 .send(proto::UpdateBufferFile {
3199 project_id,
3200 buffer_id: *buffer_id as u64,
3201 file: Some(new_file.to_proto()),
3202 })
3203 .log_err();
3204 }
3205 buffer.file_updated(Box::new(new_file), cx).detach();
3206 }
3207 });
3208 } else {
3209 buffers_to_delete.push(*buffer_id);
3210 }
3211 }
3212
3213 for buffer_id in buffers_to_delete {
3214 self.opened_buffers.remove(&buffer_id);
3215 }
3216 }
3217
3218 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3219 let new_active_entry = entry.and_then(|project_path| {
3220 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3221 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3222 Some(entry.id)
3223 });
3224 if new_active_entry != self.active_entry {
3225 self.active_entry = new_active_entry;
3226 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3227 }
3228 }
3229
3230 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3231 self.language_servers_with_diagnostics_running > 0
3232 }
3233
3234 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3235 let mut summary = DiagnosticSummary::default();
3236 for (_, path_summary) in self.diagnostic_summaries(cx) {
3237 summary.error_count += path_summary.error_count;
3238 summary.warning_count += path_summary.warning_count;
3239 summary.info_count += path_summary.info_count;
3240 summary.hint_count += path_summary.hint_count;
3241 }
3242 summary
3243 }
3244
3245 pub fn diagnostic_summaries<'a>(
3246 &'a self,
3247 cx: &'a AppContext,
3248 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3249 self.worktrees(cx).flat_map(move |worktree| {
3250 let worktree = worktree.read(cx);
3251 let worktree_id = worktree.id();
3252 worktree
3253 .diagnostic_summaries()
3254 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3255 })
3256 }
3257
3258 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3259 self.language_servers_with_diagnostics_running += 1;
3260 if self.language_servers_with_diagnostics_running == 1 {
3261 cx.emit(Event::DiskBasedDiagnosticsStarted);
3262 }
3263 }
3264
3265 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3266 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3267 self.language_servers_with_diagnostics_running -= 1;
3268 if self.language_servers_with_diagnostics_running == 0 {
3269 cx.emit(Event::DiskBasedDiagnosticsFinished);
3270 }
3271 }
3272
3273 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3274 self.active_entry
3275 }
3276
3277 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3278 self.worktree_for_id(path.worktree_id, cx)?
3279 .read(cx)
3280 .entry_for_path(&path.path)
3281 .map(|entry| entry.id)
3282 }
3283
3284 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3285 let worktree = self.worktree_for_entry(entry_id, cx)?;
3286 let worktree = worktree.read(cx);
3287 let worktree_id = worktree.id();
3288 let path = worktree.entry_for_id(entry_id)?.path.clone();
3289 Some(ProjectPath { worktree_id, path })
3290 }
3291
3292 // RPC message handlers
3293
3294 async fn handle_unshare_project(
3295 this: ModelHandle<Self>,
3296 _: TypedEnvelope<proto::UnshareProject>,
3297 _: Arc<Client>,
3298 mut cx: AsyncAppContext,
3299 ) -> Result<()> {
3300 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3301 Ok(())
3302 }
3303
3304 async fn handle_add_collaborator(
3305 this: ModelHandle<Self>,
3306 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3307 _: Arc<Client>,
3308 mut cx: AsyncAppContext,
3309 ) -> Result<()> {
3310 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3311 let collaborator = envelope
3312 .payload
3313 .collaborator
3314 .take()
3315 .ok_or_else(|| anyhow!("empty collaborator"))?;
3316
3317 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3318 this.update(&mut cx, |this, cx| {
3319 this.collaborators
3320 .insert(collaborator.peer_id, collaborator);
3321 cx.notify();
3322 });
3323
3324 Ok(())
3325 }
3326
3327 async fn handle_remove_collaborator(
3328 this: ModelHandle<Self>,
3329 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3330 _: Arc<Client>,
3331 mut cx: AsyncAppContext,
3332 ) -> Result<()> {
3333 this.update(&mut cx, |this, cx| {
3334 let peer_id = PeerId(envelope.payload.peer_id);
3335 let replica_id = this
3336 .collaborators
3337 .remove(&peer_id)
3338 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3339 .replica_id;
3340 for (_, buffer) in &this.opened_buffers {
3341 if let Some(buffer) = buffer.upgrade(cx) {
3342 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3343 }
3344 }
3345 cx.notify();
3346 Ok(())
3347 })
3348 }
3349
3350 async fn handle_register_worktree(
3351 this: ModelHandle<Self>,
3352 envelope: TypedEnvelope<proto::RegisterWorktree>,
3353 client: Arc<Client>,
3354 mut cx: AsyncAppContext,
3355 ) -> Result<()> {
3356 this.update(&mut cx, |this, cx| {
3357 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3358 let replica_id = this.replica_id();
3359 let worktree = proto::Worktree {
3360 id: envelope.payload.worktree_id,
3361 root_name: envelope.payload.root_name,
3362 entries: Default::default(),
3363 diagnostic_summaries: Default::default(),
3364 visible: envelope.payload.visible,
3365 };
3366 let (worktree, load_task) =
3367 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3368 this.add_worktree(&worktree, cx);
3369 load_task.detach();
3370 Ok(())
3371 })
3372 }
3373
3374 async fn handle_unregister_worktree(
3375 this: ModelHandle<Self>,
3376 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3377 _: Arc<Client>,
3378 mut cx: AsyncAppContext,
3379 ) -> Result<()> {
3380 this.update(&mut cx, |this, cx| {
3381 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3382 this.remove_worktree(worktree_id, cx);
3383 Ok(())
3384 })
3385 }
3386
3387 async fn handle_update_worktree(
3388 this: ModelHandle<Self>,
3389 envelope: TypedEnvelope<proto::UpdateWorktree>,
3390 _: Arc<Client>,
3391 mut cx: AsyncAppContext,
3392 ) -> Result<()> {
3393 this.update(&mut cx, |this, cx| {
3394 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3395 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3396 worktree.update(cx, |worktree, _| {
3397 let worktree = worktree.as_remote_mut().unwrap();
3398 worktree.update_from_remote(envelope)
3399 })?;
3400 }
3401 Ok(())
3402 })
3403 }
3404
3405 async fn handle_update_diagnostic_summary(
3406 this: ModelHandle<Self>,
3407 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3408 _: Arc<Client>,
3409 mut cx: AsyncAppContext,
3410 ) -> Result<()> {
3411 this.update(&mut cx, |this, cx| {
3412 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3413 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3414 if let Some(summary) = envelope.payload.summary {
3415 let project_path = ProjectPath {
3416 worktree_id,
3417 path: Path::new(&summary.path).into(),
3418 };
3419 worktree.update(cx, |worktree, _| {
3420 worktree
3421 .as_remote_mut()
3422 .unwrap()
3423 .update_diagnostic_summary(project_path.path.clone(), &summary);
3424 });
3425 cx.emit(Event::DiagnosticsUpdated(project_path));
3426 }
3427 }
3428 Ok(())
3429 })
3430 }
3431
3432 async fn handle_start_language_server(
3433 this: ModelHandle<Self>,
3434 envelope: TypedEnvelope<proto::StartLanguageServer>,
3435 _: Arc<Client>,
3436 mut cx: AsyncAppContext,
3437 ) -> Result<()> {
3438 let server = envelope
3439 .payload
3440 .server
3441 .ok_or_else(|| anyhow!("invalid server"))?;
3442 this.update(&mut cx, |this, cx| {
3443 this.language_server_statuses.insert(
3444 server.id as usize,
3445 LanguageServerStatus {
3446 name: server.name,
3447 pending_work: Default::default(),
3448 pending_diagnostic_updates: 0,
3449 },
3450 );
3451 cx.notify();
3452 });
3453 Ok(())
3454 }
3455
3456 async fn handle_update_language_server(
3457 this: ModelHandle<Self>,
3458 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3459 _: Arc<Client>,
3460 mut cx: AsyncAppContext,
3461 ) -> Result<()> {
3462 let language_server_id = envelope.payload.language_server_id as usize;
3463 match envelope
3464 .payload
3465 .variant
3466 .ok_or_else(|| anyhow!("invalid variant"))?
3467 {
3468 proto::update_language_server::Variant::WorkStart(payload) => {
3469 this.update(&mut cx, |this, cx| {
3470 this.on_lsp_work_start(language_server_id, payload.token, cx);
3471 })
3472 }
3473 proto::update_language_server::Variant::WorkProgress(payload) => {
3474 this.update(&mut cx, |this, cx| {
3475 this.on_lsp_work_progress(
3476 language_server_id,
3477 payload.token,
3478 LanguageServerProgress {
3479 message: payload.message,
3480 percentage: payload.percentage.map(|p| p as usize),
3481 last_update_at: Instant::now(),
3482 },
3483 cx,
3484 );
3485 })
3486 }
3487 proto::update_language_server::Variant::WorkEnd(payload) => {
3488 this.update(&mut cx, |this, cx| {
3489 this.on_lsp_work_end(language_server_id, payload.token, cx);
3490 })
3491 }
3492 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3493 this.update(&mut cx, |this, cx| {
3494 this.disk_based_diagnostics_started(cx);
3495 })
3496 }
3497 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3498 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3499 }
3500 }
3501
3502 Ok(())
3503 }
3504
3505 async fn handle_update_buffer(
3506 this: ModelHandle<Self>,
3507 envelope: TypedEnvelope<proto::UpdateBuffer>,
3508 _: Arc<Client>,
3509 mut cx: AsyncAppContext,
3510 ) -> Result<()> {
3511 this.update(&mut cx, |this, cx| {
3512 let payload = envelope.payload.clone();
3513 let buffer_id = payload.buffer_id;
3514 let ops = payload
3515 .operations
3516 .into_iter()
3517 .map(|op| language::proto::deserialize_operation(op))
3518 .collect::<Result<Vec<_>, _>>()?;
3519 match this.opened_buffers.entry(buffer_id) {
3520 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3521 OpenBuffer::Strong(buffer) => {
3522 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3523 }
3524 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3525 OpenBuffer::Weak(_) => {}
3526 },
3527 hash_map::Entry::Vacant(e) => {
3528 e.insert(OpenBuffer::Loading(ops));
3529 }
3530 }
3531 Ok(())
3532 })
3533 }
3534
3535 async fn handle_update_buffer_file(
3536 this: ModelHandle<Self>,
3537 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3538 _: Arc<Client>,
3539 mut cx: AsyncAppContext,
3540 ) -> Result<()> {
3541 this.update(&mut cx, |this, cx| {
3542 let payload = envelope.payload.clone();
3543 let buffer_id = payload.buffer_id;
3544 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3545 let worktree = this
3546 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3547 .ok_or_else(|| anyhow!("no such worktree"))?;
3548 let file = File::from_proto(file, worktree.clone(), cx)?;
3549 let buffer = this
3550 .opened_buffers
3551 .get_mut(&buffer_id)
3552 .and_then(|b| b.upgrade(cx))
3553 .ok_or_else(|| anyhow!("no such buffer"))?;
3554 buffer.update(cx, |buffer, cx| {
3555 buffer.file_updated(Box::new(file), cx).detach();
3556 });
3557 Ok(())
3558 })
3559 }
3560
3561 async fn handle_save_buffer(
3562 this: ModelHandle<Self>,
3563 envelope: TypedEnvelope<proto::SaveBuffer>,
3564 _: Arc<Client>,
3565 mut cx: AsyncAppContext,
3566 ) -> Result<proto::BufferSaved> {
3567 let buffer_id = envelope.payload.buffer_id;
3568 let requested_version = deserialize_version(envelope.payload.version);
3569
3570 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3571 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3572 let buffer = this
3573 .opened_buffers
3574 .get(&buffer_id)
3575 .map(|buffer| buffer.upgrade(cx).unwrap())
3576 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3577 Ok::<_, anyhow::Error>((project_id, buffer))
3578 })?;
3579 buffer
3580 .update(&mut cx, |buffer, _| {
3581 buffer.wait_for_version(requested_version)
3582 })
3583 .await;
3584
3585 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3586 Ok(proto::BufferSaved {
3587 project_id,
3588 buffer_id,
3589 version: serialize_version(&saved_version),
3590 mtime: Some(mtime.into()),
3591 })
3592 }
3593
3594 async fn handle_format_buffers(
3595 this: ModelHandle<Self>,
3596 envelope: TypedEnvelope<proto::FormatBuffers>,
3597 _: Arc<Client>,
3598 mut cx: AsyncAppContext,
3599 ) -> Result<proto::FormatBuffersResponse> {
3600 let sender_id = envelope.original_sender_id()?;
3601 let format = this.update(&mut cx, |this, cx| {
3602 let mut buffers = HashSet::default();
3603 for buffer_id in &envelope.payload.buffer_ids {
3604 buffers.insert(
3605 this.opened_buffers
3606 .get(buffer_id)
3607 .map(|buffer| buffer.upgrade(cx).unwrap())
3608 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3609 );
3610 }
3611 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3612 })?;
3613
3614 let project_transaction = format.await?;
3615 let project_transaction = this.update(&mut cx, |this, cx| {
3616 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3617 });
3618 Ok(proto::FormatBuffersResponse {
3619 transaction: Some(project_transaction),
3620 })
3621 }
3622
3623 async fn handle_get_completions(
3624 this: ModelHandle<Self>,
3625 envelope: TypedEnvelope<proto::GetCompletions>,
3626 _: Arc<Client>,
3627 mut cx: AsyncAppContext,
3628 ) -> Result<proto::GetCompletionsResponse> {
3629 let position = envelope
3630 .payload
3631 .position
3632 .and_then(language::proto::deserialize_anchor)
3633 .ok_or_else(|| anyhow!("invalid position"))?;
3634 let version = deserialize_version(envelope.payload.version);
3635 let buffer = this.read_with(&cx, |this, cx| {
3636 this.opened_buffers
3637 .get(&envelope.payload.buffer_id)
3638 .map(|buffer| buffer.upgrade(cx).unwrap())
3639 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3640 })?;
3641 buffer
3642 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3643 .await;
3644 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3645 let completions = this
3646 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3647 .await?;
3648
3649 Ok(proto::GetCompletionsResponse {
3650 completions: completions
3651 .iter()
3652 .map(language::proto::serialize_completion)
3653 .collect(),
3654 version: serialize_version(&version),
3655 })
3656 }
3657
3658 async fn handle_apply_additional_edits_for_completion(
3659 this: ModelHandle<Self>,
3660 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3661 _: Arc<Client>,
3662 mut cx: AsyncAppContext,
3663 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3664 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3665 let buffer = this
3666 .opened_buffers
3667 .get(&envelope.payload.buffer_id)
3668 .map(|buffer| buffer.upgrade(cx).unwrap())
3669 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3670 let language = buffer.read(cx).language();
3671 let completion = language::proto::deserialize_completion(
3672 envelope
3673 .payload
3674 .completion
3675 .ok_or_else(|| anyhow!("invalid completion"))?,
3676 language,
3677 )?;
3678 Ok::<_, anyhow::Error>(
3679 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3680 )
3681 })?;
3682
3683 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3684 transaction: apply_additional_edits
3685 .await?
3686 .as_ref()
3687 .map(language::proto::serialize_transaction),
3688 })
3689 }
3690
3691 async fn handle_get_code_actions(
3692 this: ModelHandle<Self>,
3693 envelope: TypedEnvelope<proto::GetCodeActions>,
3694 _: Arc<Client>,
3695 mut cx: AsyncAppContext,
3696 ) -> Result<proto::GetCodeActionsResponse> {
3697 let start = envelope
3698 .payload
3699 .start
3700 .and_then(language::proto::deserialize_anchor)
3701 .ok_or_else(|| anyhow!("invalid start"))?;
3702 let end = envelope
3703 .payload
3704 .end
3705 .and_then(language::proto::deserialize_anchor)
3706 .ok_or_else(|| anyhow!("invalid end"))?;
3707 let buffer = this.update(&mut cx, |this, cx| {
3708 this.opened_buffers
3709 .get(&envelope.payload.buffer_id)
3710 .map(|buffer| buffer.upgrade(cx).unwrap())
3711 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3712 })?;
3713 buffer
3714 .update(&mut cx, |buffer, _| {
3715 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3716 })
3717 .await;
3718
3719 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3720 let code_actions = this.update(&mut cx, |this, cx| {
3721 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3722 })?;
3723
3724 Ok(proto::GetCodeActionsResponse {
3725 actions: code_actions
3726 .await?
3727 .iter()
3728 .map(language::proto::serialize_code_action)
3729 .collect(),
3730 version: serialize_version(&version),
3731 })
3732 }
3733
3734 async fn handle_apply_code_action(
3735 this: ModelHandle<Self>,
3736 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3737 _: Arc<Client>,
3738 mut cx: AsyncAppContext,
3739 ) -> Result<proto::ApplyCodeActionResponse> {
3740 let sender_id = envelope.original_sender_id()?;
3741 let action = language::proto::deserialize_code_action(
3742 envelope
3743 .payload
3744 .action
3745 .ok_or_else(|| anyhow!("invalid action"))?,
3746 )?;
3747 let apply_code_action = this.update(&mut cx, |this, cx| {
3748 let buffer = this
3749 .opened_buffers
3750 .get(&envelope.payload.buffer_id)
3751 .map(|buffer| buffer.upgrade(cx).unwrap())
3752 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3753 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3754 })?;
3755
3756 let project_transaction = apply_code_action.await?;
3757 let project_transaction = this.update(&mut cx, |this, cx| {
3758 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3759 });
3760 Ok(proto::ApplyCodeActionResponse {
3761 transaction: Some(project_transaction),
3762 })
3763 }
3764
3765 async fn handle_lsp_command<T: LspCommand>(
3766 this: ModelHandle<Self>,
3767 envelope: TypedEnvelope<T::ProtoRequest>,
3768 _: Arc<Client>,
3769 mut cx: AsyncAppContext,
3770 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3771 where
3772 <T::LspRequest as lsp::request::Request>::Result: Send,
3773 {
3774 let sender_id = envelope.original_sender_id()?;
3775 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3776 let buffer_handle = this.read_with(&cx, |this, _| {
3777 this.opened_buffers
3778 .get(&buffer_id)
3779 .and_then(|buffer| buffer.upgrade(&cx))
3780 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3781 })?;
3782 let request = T::from_proto(
3783 envelope.payload,
3784 this.clone(),
3785 buffer_handle.clone(),
3786 cx.clone(),
3787 )
3788 .await?;
3789 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3790 let response = this
3791 .update(&mut cx, |this, cx| {
3792 this.request_lsp(buffer_handle, request, cx)
3793 })
3794 .await?;
3795 this.update(&mut cx, |this, cx| {
3796 Ok(T::response_to_proto(
3797 response,
3798 this,
3799 sender_id,
3800 &buffer_version,
3801 cx,
3802 ))
3803 })
3804 }
3805
3806 async fn handle_get_project_symbols(
3807 this: ModelHandle<Self>,
3808 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3809 _: Arc<Client>,
3810 mut cx: AsyncAppContext,
3811 ) -> Result<proto::GetProjectSymbolsResponse> {
3812 let symbols = this
3813 .update(&mut cx, |this, cx| {
3814 this.symbols(&envelope.payload.query, cx)
3815 })
3816 .await?;
3817
3818 Ok(proto::GetProjectSymbolsResponse {
3819 symbols: symbols.iter().map(serialize_symbol).collect(),
3820 })
3821 }
3822
3823 async fn handle_search_project(
3824 this: ModelHandle<Self>,
3825 envelope: TypedEnvelope<proto::SearchProject>,
3826 _: Arc<Client>,
3827 mut cx: AsyncAppContext,
3828 ) -> Result<proto::SearchProjectResponse> {
3829 let peer_id = envelope.original_sender_id()?;
3830 let query = SearchQuery::from_proto(envelope.payload)?;
3831 let result = this
3832 .update(&mut cx, |this, cx| this.search(query, cx))
3833 .await?;
3834
3835 this.update(&mut cx, |this, cx| {
3836 let mut locations = Vec::new();
3837 for (buffer, ranges) in result {
3838 for range in ranges {
3839 let start = serialize_anchor(&range.start);
3840 let end = serialize_anchor(&range.end);
3841 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3842 locations.push(proto::Location {
3843 buffer: Some(buffer),
3844 start: Some(start),
3845 end: Some(end),
3846 });
3847 }
3848 }
3849 Ok(proto::SearchProjectResponse { locations })
3850 })
3851 }
3852
3853 async fn handle_open_buffer_for_symbol(
3854 this: ModelHandle<Self>,
3855 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3856 _: Arc<Client>,
3857 mut cx: AsyncAppContext,
3858 ) -> Result<proto::OpenBufferForSymbolResponse> {
3859 let peer_id = envelope.original_sender_id()?;
3860 let symbol = envelope
3861 .payload
3862 .symbol
3863 .ok_or_else(|| anyhow!("invalid symbol"))?;
3864 let symbol = this.read_with(&cx, |this, _| {
3865 let symbol = this.deserialize_symbol(symbol)?;
3866 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3867 if signature == symbol.signature {
3868 Ok(symbol)
3869 } else {
3870 Err(anyhow!("invalid symbol signature"))
3871 }
3872 })?;
3873 let buffer = this
3874 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3875 .await?;
3876
3877 Ok(proto::OpenBufferForSymbolResponse {
3878 buffer: Some(this.update(&mut cx, |this, cx| {
3879 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3880 })),
3881 })
3882 }
3883
3884 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3885 let mut hasher = Sha256::new();
3886 hasher.update(worktree_id.to_proto().to_be_bytes());
3887 hasher.update(path.to_string_lossy().as_bytes());
3888 hasher.update(self.nonce.to_be_bytes());
3889 hasher.finalize().as_slice().try_into().unwrap()
3890 }
3891
3892 async fn handle_open_buffer_by_path(
3893 this: ModelHandle<Self>,
3894 envelope: TypedEnvelope<proto::OpenBufferByPath>,
3895 _: Arc<Client>,
3896 mut cx: AsyncAppContext,
3897 ) -> Result<proto::OpenBufferResponse> {
3898 let peer_id = envelope.original_sender_id()?;
3899 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3900 let open_buffer = this.update(&mut cx, |this, cx| {
3901 this.open_buffer(
3902 ProjectPath {
3903 worktree_id,
3904 path: PathBuf::from(envelope.payload.path).into(),
3905 },
3906 cx,
3907 )
3908 });
3909
3910 let buffer = open_buffer.await?;
3911 this.update(&mut cx, |this, cx| {
3912 Ok(proto::OpenBufferResponse {
3913 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3914 })
3915 })
3916 }
3917
3918 fn serialize_project_transaction_for_peer(
3919 &mut self,
3920 project_transaction: ProjectTransaction,
3921 peer_id: PeerId,
3922 cx: &AppContext,
3923 ) -> proto::ProjectTransaction {
3924 let mut serialized_transaction = proto::ProjectTransaction {
3925 buffers: Default::default(),
3926 transactions: Default::default(),
3927 };
3928 for (buffer, transaction) in project_transaction.0 {
3929 serialized_transaction
3930 .buffers
3931 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3932 serialized_transaction
3933 .transactions
3934 .push(language::proto::serialize_transaction(&transaction));
3935 }
3936 serialized_transaction
3937 }
3938
3939 fn deserialize_project_transaction(
3940 &mut self,
3941 message: proto::ProjectTransaction,
3942 push_to_history: bool,
3943 cx: &mut ModelContext<Self>,
3944 ) -> Task<Result<ProjectTransaction>> {
3945 cx.spawn(|this, mut cx| async move {
3946 let mut project_transaction = ProjectTransaction::default();
3947 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3948 let buffer = this
3949 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3950 .await?;
3951 let transaction = language::proto::deserialize_transaction(transaction)?;
3952 project_transaction.0.insert(buffer, transaction);
3953 }
3954
3955 for (buffer, transaction) in &project_transaction.0 {
3956 buffer
3957 .update(&mut cx, |buffer, _| {
3958 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3959 })
3960 .await;
3961
3962 if push_to_history {
3963 buffer.update(&mut cx, |buffer, _| {
3964 buffer.push_transaction(transaction.clone(), Instant::now());
3965 });
3966 }
3967 }
3968
3969 Ok(project_transaction)
3970 })
3971 }
3972
3973 fn serialize_buffer_for_peer(
3974 &mut self,
3975 buffer: &ModelHandle<Buffer>,
3976 peer_id: PeerId,
3977 cx: &AppContext,
3978 ) -> proto::Buffer {
3979 let buffer_id = buffer.read(cx).remote_id();
3980 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3981 if shared_buffers.insert(buffer_id) {
3982 proto::Buffer {
3983 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3984 }
3985 } else {
3986 proto::Buffer {
3987 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3988 }
3989 }
3990 }
3991
3992 fn deserialize_buffer(
3993 &mut self,
3994 buffer: proto::Buffer,
3995 cx: &mut ModelContext<Self>,
3996 ) -> Task<Result<ModelHandle<Buffer>>> {
3997 let replica_id = self.replica_id();
3998
3999 let opened_buffer_tx = self.opened_buffer.0.clone();
4000 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4001 cx.spawn(|this, mut cx| async move {
4002 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4003 proto::buffer::Variant::Id(id) => {
4004 let buffer = loop {
4005 let buffer = this.read_with(&cx, |this, cx| {
4006 this.opened_buffers
4007 .get(&id)
4008 .and_then(|buffer| buffer.upgrade(cx))
4009 });
4010 if let Some(buffer) = buffer {
4011 break buffer;
4012 }
4013 opened_buffer_rx
4014 .next()
4015 .await
4016 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4017 };
4018 Ok(buffer)
4019 }
4020 proto::buffer::Variant::State(mut buffer) => {
4021 let mut buffer_worktree = None;
4022 let mut buffer_file = None;
4023 if let Some(file) = buffer.file.take() {
4024 this.read_with(&cx, |this, cx| {
4025 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4026 let worktree =
4027 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4028 anyhow!("no worktree found for id {}", file.worktree_id)
4029 })?;
4030 buffer_file =
4031 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4032 as Box<dyn language::File>);
4033 buffer_worktree = Some(worktree);
4034 Ok::<_, anyhow::Error>(())
4035 })?;
4036 }
4037
4038 let buffer = cx.add_model(|cx| {
4039 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4040 });
4041
4042 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4043
4044 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4045 Ok(buffer)
4046 }
4047 }
4048 })
4049 }
4050
4051 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4052 let language = self
4053 .languages
4054 .get_language(&serialized_symbol.language_name);
4055 let start = serialized_symbol
4056 .start
4057 .ok_or_else(|| anyhow!("invalid start"))?;
4058 let end = serialized_symbol
4059 .end
4060 .ok_or_else(|| anyhow!("invalid end"))?;
4061 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4062 Ok(Symbol {
4063 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
4064 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
4065 language_name: serialized_symbol.language_name.clone(),
4066 label: language
4067 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4068 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4069 name: serialized_symbol.name,
4070 path: PathBuf::from(serialized_symbol.path),
4071 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4072 kind,
4073 signature: serialized_symbol
4074 .signature
4075 .try_into()
4076 .map_err(|_| anyhow!("invalid signature"))?,
4077 })
4078 }
4079
4080 async fn handle_buffer_saved(
4081 this: ModelHandle<Self>,
4082 envelope: TypedEnvelope<proto::BufferSaved>,
4083 _: Arc<Client>,
4084 mut cx: AsyncAppContext,
4085 ) -> Result<()> {
4086 let version = deserialize_version(envelope.payload.version);
4087 let mtime = envelope
4088 .payload
4089 .mtime
4090 .ok_or_else(|| anyhow!("missing mtime"))?
4091 .into();
4092
4093 this.update(&mut cx, |this, cx| {
4094 let buffer = this
4095 .opened_buffers
4096 .get(&envelope.payload.buffer_id)
4097 .and_then(|buffer| buffer.upgrade(cx));
4098 if let Some(buffer) = buffer {
4099 buffer.update(cx, |buffer, cx| {
4100 buffer.did_save(version, mtime, None, cx);
4101 });
4102 }
4103 Ok(())
4104 })
4105 }
4106
4107 async fn handle_buffer_reloaded(
4108 this: ModelHandle<Self>,
4109 envelope: TypedEnvelope<proto::BufferReloaded>,
4110 _: Arc<Client>,
4111 mut cx: AsyncAppContext,
4112 ) -> Result<()> {
4113 let payload = envelope.payload.clone();
4114 let version = deserialize_version(payload.version);
4115 let mtime = payload
4116 .mtime
4117 .ok_or_else(|| anyhow!("missing mtime"))?
4118 .into();
4119 this.update(&mut cx, |this, cx| {
4120 let buffer = this
4121 .opened_buffers
4122 .get(&payload.buffer_id)
4123 .and_then(|buffer| buffer.upgrade(cx));
4124 if let Some(buffer) = buffer {
4125 buffer.update(cx, |buffer, cx| {
4126 buffer.did_reload(version, mtime, cx);
4127 });
4128 }
4129 Ok(())
4130 })
4131 }
4132
4133 pub fn match_paths<'a>(
4134 &self,
4135 query: &'a str,
4136 include_ignored: bool,
4137 smart_case: bool,
4138 max_results: usize,
4139 cancel_flag: &'a AtomicBool,
4140 cx: &AppContext,
4141 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4142 let worktrees = self
4143 .worktrees(cx)
4144 .filter(|worktree| worktree.read(cx).is_visible())
4145 .collect::<Vec<_>>();
4146 let include_root_name = worktrees.len() > 1;
4147 let candidate_sets = worktrees
4148 .into_iter()
4149 .map(|worktree| CandidateSet {
4150 snapshot: worktree.read(cx).snapshot(),
4151 include_ignored,
4152 include_root_name,
4153 })
4154 .collect::<Vec<_>>();
4155
4156 let background = cx.background().clone();
4157 async move {
4158 fuzzy::match_paths(
4159 candidate_sets.as_slice(),
4160 query,
4161 smart_case,
4162 max_results,
4163 cancel_flag,
4164 background,
4165 )
4166 .await
4167 }
4168 }
4169
4170 fn edits_from_lsp(
4171 &mut self,
4172 buffer: &ModelHandle<Buffer>,
4173 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4174 version: Option<i32>,
4175 cx: &mut ModelContext<Self>,
4176 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4177 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4178 cx.background().spawn(async move {
4179 let snapshot = snapshot?;
4180 let mut lsp_edits = lsp_edits
4181 .into_iter()
4182 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4183 .peekable();
4184
4185 let mut edits = Vec::new();
4186 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4187 // Combine any LSP edits that are adjacent.
4188 //
4189 // Also, combine LSP edits that are separated from each other by only
4190 // a newline. This is important because for some code actions,
4191 // Rust-analyzer rewrites the entire buffer via a series of edits that
4192 // are separated by unchanged newline characters.
4193 //
4194 // In order for the diffing logic below to work properly, any edits that
4195 // cancel each other out must be combined into one.
4196 while let Some((next_range, next_text)) = lsp_edits.peek() {
4197 if next_range.start > range.end {
4198 if next_range.start.row > range.end.row + 1
4199 || next_range.start.column > 0
4200 || snapshot.clip_point_utf16(
4201 PointUtf16::new(range.end.row, u32::MAX),
4202 Bias::Left,
4203 ) > range.end
4204 {
4205 break;
4206 }
4207 new_text.push('\n');
4208 }
4209 range.end = next_range.end;
4210 new_text.push_str(&next_text);
4211 lsp_edits.next();
4212 }
4213
4214 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4215 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4216 {
4217 return Err(anyhow!("invalid edits received from language server"));
4218 }
4219
4220 // For multiline edits, perform a diff of the old and new text so that
4221 // we can identify the changes more precisely, preserving the locations
4222 // of any anchors positioned in the unchanged regions.
4223 if range.end.row > range.start.row {
4224 let mut offset = range.start.to_offset(&snapshot);
4225 let old_text = snapshot.text_for_range(range).collect::<String>();
4226
4227 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4228 let mut moved_since_edit = true;
4229 for change in diff.iter_all_changes() {
4230 let tag = change.tag();
4231 let value = change.value();
4232 match tag {
4233 ChangeTag::Equal => {
4234 offset += value.len();
4235 moved_since_edit = true;
4236 }
4237 ChangeTag::Delete => {
4238 let start = snapshot.anchor_after(offset);
4239 let end = snapshot.anchor_before(offset + value.len());
4240 if moved_since_edit {
4241 edits.push((start..end, String::new()));
4242 } else {
4243 edits.last_mut().unwrap().0.end = end;
4244 }
4245 offset += value.len();
4246 moved_since_edit = false;
4247 }
4248 ChangeTag::Insert => {
4249 if moved_since_edit {
4250 let anchor = snapshot.anchor_after(offset);
4251 edits.push((anchor.clone()..anchor, value.to_string()));
4252 } else {
4253 edits.last_mut().unwrap().1.push_str(value);
4254 }
4255 moved_since_edit = false;
4256 }
4257 }
4258 }
4259 } else if range.end == range.start {
4260 let anchor = snapshot.anchor_after(range.start);
4261 edits.push((anchor.clone()..anchor, new_text));
4262 } else {
4263 let edit_start = snapshot.anchor_after(range.start);
4264 let edit_end = snapshot.anchor_before(range.end);
4265 edits.push((edit_start..edit_end, new_text));
4266 }
4267 }
4268
4269 Ok(edits)
4270 })
4271 }
4272
4273 fn buffer_snapshot_for_lsp_version(
4274 &mut self,
4275 buffer: &ModelHandle<Buffer>,
4276 version: Option<i32>,
4277 cx: &AppContext,
4278 ) -> Result<TextBufferSnapshot> {
4279 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4280
4281 if let Some(version) = version {
4282 let buffer_id = buffer.read(cx).remote_id();
4283 let snapshots = self
4284 .buffer_snapshots
4285 .get_mut(&buffer_id)
4286 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4287 let mut found_snapshot = None;
4288 snapshots.retain(|(snapshot_version, snapshot)| {
4289 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4290 false
4291 } else {
4292 if *snapshot_version == version {
4293 found_snapshot = Some(snapshot.clone());
4294 }
4295 true
4296 }
4297 });
4298
4299 found_snapshot.ok_or_else(|| {
4300 anyhow!(
4301 "snapshot not found for buffer {} at version {}",
4302 buffer_id,
4303 version
4304 )
4305 })
4306 } else {
4307 Ok((buffer.read(cx)).text_snapshot())
4308 }
4309 }
4310
4311 fn language_server_for_buffer(
4312 &self,
4313 buffer: &Buffer,
4314 cx: &AppContext,
4315 ) -> Option<&Arc<LanguageServer>> {
4316 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4317 let worktree_id = file.worktree_id(cx);
4318 self.language_servers.get(&(worktree_id, language.name()))
4319 } else {
4320 None
4321 }
4322 }
4323}
4324
4325impl WorktreeHandle {
4326 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4327 match self {
4328 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4329 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4330 }
4331 }
4332}
4333
4334impl OpenBuffer {
4335 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4336 match self {
4337 OpenBuffer::Strong(handle) => Some(handle.clone()),
4338 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4339 OpenBuffer::Loading(_) => None,
4340 }
4341 }
4342}
4343
4344struct CandidateSet {
4345 snapshot: Snapshot,
4346 include_ignored: bool,
4347 include_root_name: bool,
4348}
4349
4350impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4351 type Candidates = CandidateSetIter<'a>;
4352
4353 fn id(&self) -> usize {
4354 self.snapshot.id().to_usize()
4355 }
4356
4357 fn len(&self) -> usize {
4358 if self.include_ignored {
4359 self.snapshot.file_count()
4360 } else {
4361 self.snapshot.visible_file_count()
4362 }
4363 }
4364
4365 fn prefix(&self) -> Arc<str> {
4366 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4367 self.snapshot.root_name().into()
4368 } else if self.include_root_name {
4369 format!("{}/", self.snapshot.root_name()).into()
4370 } else {
4371 "".into()
4372 }
4373 }
4374
4375 fn candidates(&'a self, start: usize) -> Self::Candidates {
4376 CandidateSetIter {
4377 traversal: self.snapshot.files(self.include_ignored, start),
4378 }
4379 }
4380}
4381
4382struct CandidateSetIter<'a> {
4383 traversal: Traversal<'a>,
4384}
4385
4386impl<'a> Iterator for CandidateSetIter<'a> {
4387 type Item = PathMatchCandidate<'a>;
4388
4389 fn next(&mut self) -> Option<Self::Item> {
4390 self.traversal.next().map(|entry| {
4391 if let EntryKind::File(char_bag) = entry.kind {
4392 PathMatchCandidate {
4393 path: &entry.path,
4394 char_bag,
4395 }
4396 } else {
4397 unreachable!()
4398 }
4399 })
4400 }
4401}
4402
4403impl Entity for Project {
4404 type Event = Event;
4405
4406 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4407 match &self.client_state {
4408 ProjectClientState::Local { remote_id_rx, .. } => {
4409 if let Some(project_id) = *remote_id_rx.borrow() {
4410 self.client
4411 .send(proto::UnregisterProject { project_id })
4412 .log_err();
4413 }
4414 }
4415 ProjectClientState::Remote { remote_id, .. } => {
4416 self.client
4417 .send(proto::LeaveProject {
4418 project_id: *remote_id,
4419 })
4420 .log_err();
4421 }
4422 }
4423 }
4424
4425 fn app_will_quit(
4426 &mut self,
4427 _: &mut MutableAppContext,
4428 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4429 let shutdown_futures = self
4430 .language_servers
4431 .drain()
4432 .filter_map(|(_, server)| server.shutdown())
4433 .collect::<Vec<_>>();
4434 Some(
4435 async move {
4436 futures::future::join_all(shutdown_futures).await;
4437 }
4438 .boxed(),
4439 )
4440 }
4441}
4442
4443impl Collaborator {
4444 fn from_proto(
4445 message: proto::Collaborator,
4446 user_store: &ModelHandle<UserStore>,
4447 cx: &mut AsyncAppContext,
4448 ) -> impl Future<Output = Result<Self>> {
4449 let user = user_store.update(cx, |user_store, cx| {
4450 user_store.fetch_user(message.user_id, cx)
4451 });
4452
4453 async move {
4454 Ok(Self {
4455 peer_id: PeerId(message.peer_id),
4456 user: user.await?,
4457 replica_id: message.replica_id as ReplicaId,
4458 })
4459 }
4460 }
4461}
4462
4463impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4464 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4465 Self {
4466 worktree_id,
4467 path: path.as_ref().into(),
4468 }
4469 }
4470}
4471
4472impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4473 fn from(options: lsp::CreateFileOptions) -> Self {
4474 Self {
4475 overwrite: options.overwrite.unwrap_or(false),
4476 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4477 }
4478 }
4479}
4480
4481impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4482 fn from(options: lsp::RenameFileOptions) -> Self {
4483 Self {
4484 overwrite: options.overwrite.unwrap_or(false),
4485 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4486 }
4487 }
4488}
4489
4490impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4491 fn from(options: lsp::DeleteFileOptions) -> Self {
4492 Self {
4493 recursive: options.recursive.unwrap_or(false),
4494 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4495 }
4496 }
4497}
4498
4499fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4500 proto::Symbol {
4501 source_worktree_id: symbol.source_worktree_id.to_proto(),
4502 worktree_id: symbol.worktree_id.to_proto(),
4503 language_name: symbol.language_name.clone(),
4504 name: symbol.name.clone(),
4505 kind: unsafe { mem::transmute(symbol.kind) },
4506 path: symbol.path.to_string_lossy().to_string(),
4507 start: Some(proto::Point {
4508 row: symbol.range.start.row,
4509 column: symbol.range.start.column,
4510 }),
4511 end: Some(proto::Point {
4512 row: symbol.range.end.row,
4513 column: symbol.range.end.column,
4514 }),
4515 signature: symbol.signature.to_vec(),
4516 }
4517}
4518
4519fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4520 let mut path_components = path.components();
4521 let mut base_components = base.components();
4522 let mut components: Vec<Component> = Vec::new();
4523 loop {
4524 match (path_components.next(), base_components.next()) {
4525 (None, None) => break,
4526 (Some(a), None) => {
4527 components.push(a);
4528 components.extend(path_components.by_ref());
4529 break;
4530 }
4531 (None, _) => components.push(Component::ParentDir),
4532 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4533 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4534 (Some(a), Some(_)) => {
4535 components.push(Component::ParentDir);
4536 for _ in base_components {
4537 components.push(Component::ParentDir);
4538 }
4539 components.push(a);
4540 components.extend(path_components.by_ref());
4541 break;
4542 }
4543 }
4544 }
4545 components.iter().map(|c| c.as_os_str()).collect()
4546}
4547
4548impl Item for Buffer {
4549 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4550 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4551 }
4552}
4553
4554#[cfg(test)]
4555mod tests {
4556 use super::{Event, *};
4557 use fs::RealFs;
4558 use futures::StreamExt;
4559 use gpui::test::subscribe;
4560 use language::{
4561 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4562 ToPoint,
4563 };
4564 use lsp::Url;
4565 use serde_json::json;
4566 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4567 use unindent::Unindent as _;
4568 use util::test::temp_tree;
4569 use worktree::WorktreeHandle as _;
4570
4571 #[gpui::test]
4572 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4573 let dir = temp_tree(json!({
4574 "root": {
4575 "apple": "",
4576 "banana": {
4577 "carrot": {
4578 "date": "",
4579 "endive": "",
4580 }
4581 },
4582 "fennel": {
4583 "grape": "",
4584 }
4585 }
4586 }));
4587
4588 let root_link_path = dir.path().join("root_link");
4589 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4590 unix::fs::symlink(
4591 &dir.path().join("root/fennel"),
4592 &dir.path().join("root/finnochio"),
4593 )
4594 .unwrap();
4595
4596 let project = Project::test(Arc::new(RealFs), cx);
4597
4598 let (tree, _) = project
4599 .update(cx, |project, cx| {
4600 project.find_or_create_local_worktree(&root_link_path, true, cx)
4601 })
4602 .await
4603 .unwrap();
4604
4605 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4606 .await;
4607 cx.read(|cx| {
4608 let tree = tree.read(cx);
4609 assert_eq!(tree.file_count(), 5);
4610 assert_eq!(
4611 tree.inode_for_path("fennel/grape"),
4612 tree.inode_for_path("finnochio/grape")
4613 );
4614 });
4615
4616 let cancel_flag = Default::default();
4617 let results = project
4618 .read_with(cx, |project, cx| {
4619 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4620 })
4621 .await;
4622 assert_eq!(
4623 results
4624 .into_iter()
4625 .map(|result| result.path)
4626 .collect::<Vec<Arc<Path>>>(),
4627 vec![
4628 PathBuf::from("banana/carrot/date").into(),
4629 PathBuf::from("banana/carrot/endive").into(),
4630 ]
4631 );
4632 }
4633
4634 #[gpui::test]
4635 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4636 cx.foreground().forbid_parking();
4637
4638 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4639 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4640 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4641 completion_provider: Some(lsp::CompletionOptions {
4642 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4643 ..Default::default()
4644 }),
4645 ..Default::default()
4646 });
4647 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4648 completion_provider: Some(lsp::CompletionOptions {
4649 trigger_characters: Some(vec![":".to_string()]),
4650 ..Default::default()
4651 }),
4652 ..Default::default()
4653 });
4654
4655 let rust_language = Arc::new(Language::new(
4656 LanguageConfig {
4657 name: "Rust".into(),
4658 path_suffixes: vec!["rs".to_string()],
4659 language_server: Some(rust_lsp_config),
4660 ..Default::default()
4661 },
4662 Some(tree_sitter_rust::language()),
4663 ));
4664 let json_language = Arc::new(Language::new(
4665 LanguageConfig {
4666 name: "JSON".into(),
4667 path_suffixes: vec!["json".to_string()],
4668 language_server: Some(json_lsp_config),
4669 ..Default::default()
4670 },
4671 None,
4672 ));
4673
4674 let fs = FakeFs::new(cx.background());
4675 fs.insert_tree(
4676 "/the-root",
4677 json!({
4678 "test.rs": "const A: i32 = 1;",
4679 "test2.rs": "",
4680 "Cargo.toml": "a = 1",
4681 "package.json": "{\"a\": 1}",
4682 }),
4683 )
4684 .await;
4685
4686 let project = Project::test(fs, cx);
4687 project.update(cx, |project, _| {
4688 project.languages.add(rust_language);
4689 project.languages.add(json_language);
4690 });
4691
4692 let worktree_id = project
4693 .update(cx, |project, cx| {
4694 project.find_or_create_local_worktree("/the-root", true, cx)
4695 })
4696 .await
4697 .unwrap()
4698 .0
4699 .read_with(cx, |tree, _| tree.id());
4700
4701 // Open a buffer without an associated language server.
4702 let toml_buffer = project
4703 .update(cx, |project, cx| {
4704 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4705 })
4706 .await
4707 .unwrap();
4708
4709 // Open a buffer with an associated language server.
4710 let rust_buffer = project
4711 .update(cx, |project, cx| {
4712 project.open_buffer((worktree_id, "test.rs"), cx)
4713 })
4714 .await
4715 .unwrap();
4716
4717 // A server is started up, and it is notified about Rust files.
4718 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4719 assert_eq!(
4720 fake_rust_server
4721 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4722 .await
4723 .text_document,
4724 lsp::TextDocumentItem {
4725 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4726 version: 0,
4727 text: "const A: i32 = 1;".to_string(),
4728 language_id: Default::default()
4729 }
4730 );
4731
4732 // The buffer is configured based on the language server's capabilities.
4733 rust_buffer.read_with(cx, |buffer, _| {
4734 assert_eq!(
4735 buffer.completion_triggers(),
4736 &[".".to_string(), "::".to_string()]
4737 );
4738 });
4739 toml_buffer.read_with(cx, |buffer, _| {
4740 assert!(buffer.completion_triggers().is_empty());
4741 });
4742
4743 // Edit a buffer. The changes are reported to the language server.
4744 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4745 assert_eq!(
4746 fake_rust_server
4747 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4748 .await
4749 .text_document,
4750 lsp::VersionedTextDocumentIdentifier::new(
4751 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4752 1
4753 )
4754 );
4755
4756 // Open a third buffer with a different associated language server.
4757 let json_buffer = project
4758 .update(cx, |project, cx| {
4759 project.open_buffer((worktree_id, "package.json"), cx)
4760 })
4761 .await
4762 .unwrap();
4763
4764 // Another language server is started up, and it is notified about
4765 // all three open buffers.
4766 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4767 assert_eq!(
4768 fake_json_server
4769 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4770 .await
4771 .text_document,
4772 lsp::TextDocumentItem {
4773 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4774 version: 0,
4775 text: "{\"a\": 1}".to_string(),
4776 language_id: Default::default()
4777 }
4778 );
4779
4780 // This buffer is configured based on the second language server's
4781 // capabilities.
4782 json_buffer.read_with(cx, |buffer, _| {
4783 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4784 });
4785
4786 // When opening another buffer whose language server is already running,
4787 // it is also configured based on the existing language server's capabilities.
4788 let rust_buffer2 = project
4789 .update(cx, |project, cx| {
4790 project.open_buffer((worktree_id, "test2.rs"), cx)
4791 })
4792 .await
4793 .unwrap();
4794 rust_buffer2.read_with(cx, |buffer, _| {
4795 assert_eq!(
4796 buffer.completion_triggers(),
4797 &[".".to_string(), "::".to_string()]
4798 );
4799 });
4800
4801 // Changes are reported only to servers matching the buffer's language.
4802 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4803 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4804 assert_eq!(
4805 fake_rust_server
4806 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4807 .await
4808 .text_document,
4809 lsp::VersionedTextDocumentIdentifier::new(
4810 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4811 1
4812 )
4813 );
4814
4815 // Save notifications are reported to all servers.
4816 toml_buffer
4817 .update(cx, |buffer, cx| buffer.save(cx))
4818 .await
4819 .unwrap();
4820 assert_eq!(
4821 fake_rust_server
4822 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4823 .await
4824 .text_document,
4825 lsp::TextDocumentIdentifier::new(
4826 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4827 )
4828 );
4829 assert_eq!(
4830 fake_json_server
4831 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4832 .await
4833 .text_document,
4834 lsp::TextDocumentIdentifier::new(
4835 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4836 )
4837 );
4838
4839 // Close notifications are reported only to servers matching the buffer's language.
4840 cx.update(|_| drop(json_buffer));
4841 let close_message = lsp::DidCloseTextDocumentParams {
4842 text_document: lsp::TextDocumentIdentifier::new(
4843 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4844 ),
4845 };
4846 assert_eq!(
4847 fake_json_server
4848 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4849 .await,
4850 close_message,
4851 );
4852 }
4853
4854 #[gpui::test]
4855 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4856 cx.foreground().forbid_parking();
4857
4858 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4859 let progress_token = language_server_config
4860 .disk_based_diagnostics_progress_token
4861 .clone()
4862 .unwrap();
4863
4864 let language = Arc::new(Language::new(
4865 LanguageConfig {
4866 name: "Rust".into(),
4867 path_suffixes: vec!["rs".to_string()],
4868 language_server: Some(language_server_config),
4869 ..Default::default()
4870 },
4871 Some(tree_sitter_rust::language()),
4872 ));
4873
4874 let fs = FakeFs::new(cx.background());
4875 fs.insert_tree(
4876 "/dir",
4877 json!({
4878 "a.rs": "fn a() { A }",
4879 "b.rs": "const y: i32 = 1",
4880 }),
4881 )
4882 .await;
4883
4884 let project = Project::test(fs, cx);
4885 project.update(cx, |project, _| project.languages.add(language));
4886
4887 let (tree, _) = project
4888 .update(cx, |project, cx| {
4889 project.find_or_create_local_worktree("/dir", true, cx)
4890 })
4891 .await
4892 .unwrap();
4893 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4894
4895 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4896 .await;
4897
4898 // Cause worktree to start the fake language server
4899 let _buffer = project
4900 .update(cx, |project, cx| {
4901 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4902 })
4903 .await
4904 .unwrap();
4905
4906 let mut events = subscribe(&project, cx);
4907
4908 let mut fake_server = fake_servers.next().await.unwrap();
4909 fake_server.start_progress(&progress_token).await;
4910 assert_eq!(
4911 events.next().await.unwrap(),
4912 Event::DiskBasedDiagnosticsStarted
4913 );
4914
4915 fake_server.start_progress(&progress_token).await;
4916 fake_server.end_progress(&progress_token).await;
4917 fake_server.start_progress(&progress_token).await;
4918
4919 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4920 lsp::PublishDiagnosticsParams {
4921 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4922 version: None,
4923 diagnostics: vec![lsp::Diagnostic {
4924 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4925 severity: Some(lsp::DiagnosticSeverity::ERROR),
4926 message: "undefined variable 'A'".to_string(),
4927 ..Default::default()
4928 }],
4929 },
4930 );
4931 assert_eq!(
4932 events.next().await.unwrap(),
4933 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4934 );
4935
4936 fake_server.end_progress(&progress_token).await;
4937 fake_server.end_progress(&progress_token).await;
4938 assert_eq!(
4939 events.next().await.unwrap(),
4940 Event::DiskBasedDiagnosticsUpdated
4941 );
4942 assert_eq!(
4943 events.next().await.unwrap(),
4944 Event::DiskBasedDiagnosticsFinished
4945 );
4946
4947 let buffer = project
4948 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4949 .await
4950 .unwrap();
4951
4952 buffer.read_with(cx, |buffer, _| {
4953 let snapshot = buffer.snapshot();
4954 let diagnostics = snapshot
4955 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4956 .collect::<Vec<_>>();
4957 assert_eq!(
4958 diagnostics,
4959 &[DiagnosticEntry {
4960 range: Point::new(0, 9)..Point::new(0, 10),
4961 diagnostic: Diagnostic {
4962 severity: lsp::DiagnosticSeverity::ERROR,
4963 message: "undefined variable 'A'".to_string(),
4964 group_id: 0,
4965 is_primary: true,
4966 ..Default::default()
4967 }
4968 }]
4969 )
4970 });
4971 }
4972
4973 #[gpui::test]
4974 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4975 cx.foreground().forbid_parking();
4976
4977 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4978 lsp_config
4979 .disk_based_diagnostic_sources
4980 .insert("disk".to_string());
4981 let language = Arc::new(Language::new(
4982 LanguageConfig {
4983 name: "Rust".into(),
4984 path_suffixes: vec!["rs".to_string()],
4985 language_server: Some(lsp_config),
4986 ..Default::default()
4987 },
4988 Some(tree_sitter_rust::language()),
4989 ));
4990
4991 let text = "
4992 fn a() { A }
4993 fn b() { BB }
4994 fn c() { CCC }
4995 "
4996 .unindent();
4997
4998 let fs = FakeFs::new(cx.background());
4999 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5000
5001 let project = Project::test(fs, cx);
5002 project.update(cx, |project, _| project.languages.add(language));
5003
5004 let worktree_id = project
5005 .update(cx, |project, cx| {
5006 project.find_or_create_local_worktree("/dir", true, cx)
5007 })
5008 .await
5009 .unwrap()
5010 .0
5011 .read_with(cx, |tree, _| tree.id());
5012
5013 let buffer = project
5014 .update(cx, |project, cx| {
5015 project.open_buffer((worktree_id, "a.rs"), cx)
5016 })
5017 .await
5018 .unwrap();
5019
5020 let mut fake_server = fake_servers.next().await.unwrap();
5021 let open_notification = fake_server
5022 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5023 .await;
5024
5025 // Edit the buffer, moving the content down
5026 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5027 let change_notification_1 = fake_server
5028 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5029 .await;
5030 assert!(
5031 change_notification_1.text_document.version > open_notification.text_document.version
5032 );
5033
5034 // Report some diagnostics for the initial version of the buffer
5035 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5036 lsp::PublishDiagnosticsParams {
5037 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5038 version: Some(open_notification.text_document.version),
5039 diagnostics: vec![
5040 lsp::Diagnostic {
5041 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5042 severity: Some(DiagnosticSeverity::ERROR),
5043 message: "undefined variable 'A'".to_string(),
5044 source: Some("disk".to_string()),
5045 ..Default::default()
5046 },
5047 lsp::Diagnostic {
5048 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5049 severity: Some(DiagnosticSeverity::ERROR),
5050 message: "undefined variable 'BB'".to_string(),
5051 source: Some("disk".to_string()),
5052 ..Default::default()
5053 },
5054 lsp::Diagnostic {
5055 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5056 severity: Some(DiagnosticSeverity::ERROR),
5057 source: Some("disk".to_string()),
5058 message: "undefined variable 'CCC'".to_string(),
5059 ..Default::default()
5060 },
5061 ],
5062 },
5063 );
5064
5065 // The diagnostics have moved down since they were created.
5066 buffer.next_notification(cx).await;
5067 buffer.read_with(cx, |buffer, _| {
5068 assert_eq!(
5069 buffer
5070 .snapshot()
5071 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5072 .collect::<Vec<_>>(),
5073 &[
5074 DiagnosticEntry {
5075 range: Point::new(3, 9)..Point::new(3, 11),
5076 diagnostic: Diagnostic {
5077 severity: DiagnosticSeverity::ERROR,
5078 message: "undefined variable 'BB'".to_string(),
5079 is_disk_based: true,
5080 group_id: 1,
5081 is_primary: true,
5082 ..Default::default()
5083 },
5084 },
5085 DiagnosticEntry {
5086 range: Point::new(4, 9)..Point::new(4, 12),
5087 diagnostic: Diagnostic {
5088 severity: DiagnosticSeverity::ERROR,
5089 message: "undefined variable 'CCC'".to_string(),
5090 is_disk_based: true,
5091 group_id: 2,
5092 is_primary: true,
5093 ..Default::default()
5094 }
5095 }
5096 ]
5097 );
5098 assert_eq!(
5099 chunks_with_diagnostics(buffer, 0..buffer.len()),
5100 [
5101 ("\n\nfn a() { ".to_string(), None),
5102 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5103 (" }\nfn b() { ".to_string(), None),
5104 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5105 (" }\nfn c() { ".to_string(), None),
5106 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5107 (" }\n".to_string(), None),
5108 ]
5109 );
5110 assert_eq!(
5111 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5112 [
5113 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5114 (" }\nfn c() { ".to_string(), None),
5115 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5116 ]
5117 );
5118 });
5119
5120 // Ensure overlapping diagnostics are highlighted correctly.
5121 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5122 lsp::PublishDiagnosticsParams {
5123 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5124 version: Some(open_notification.text_document.version),
5125 diagnostics: vec![
5126 lsp::Diagnostic {
5127 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5128 severity: Some(DiagnosticSeverity::ERROR),
5129 message: "undefined variable 'A'".to_string(),
5130 source: Some("disk".to_string()),
5131 ..Default::default()
5132 },
5133 lsp::Diagnostic {
5134 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5135 severity: Some(DiagnosticSeverity::WARNING),
5136 message: "unreachable statement".to_string(),
5137 source: Some("disk".to_string()),
5138 ..Default::default()
5139 },
5140 ],
5141 },
5142 );
5143
5144 buffer.next_notification(cx).await;
5145 buffer.read_with(cx, |buffer, _| {
5146 assert_eq!(
5147 buffer
5148 .snapshot()
5149 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5150 .collect::<Vec<_>>(),
5151 &[
5152 DiagnosticEntry {
5153 range: Point::new(2, 9)..Point::new(2, 12),
5154 diagnostic: Diagnostic {
5155 severity: DiagnosticSeverity::WARNING,
5156 message: "unreachable statement".to_string(),
5157 is_disk_based: true,
5158 group_id: 1,
5159 is_primary: true,
5160 ..Default::default()
5161 }
5162 },
5163 DiagnosticEntry {
5164 range: Point::new(2, 9)..Point::new(2, 10),
5165 diagnostic: Diagnostic {
5166 severity: DiagnosticSeverity::ERROR,
5167 message: "undefined variable 'A'".to_string(),
5168 is_disk_based: true,
5169 group_id: 0,
5170 is_primary: true,
5171 ..Default::default()
5172 },
5173 }
5174 ]
5175 );
5176 assert_eq!(
5177 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5178 [
5179 ("fn a() { ".to_string(), None),
5180 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5181 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5182 ("\n".to_string(), None),
5183 ]
5184 );
5185 assert_eq!(
5186 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5187 [
5188 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5189 ("\n".to_string(), None),
5190 ]
5191 );
5192 });
5193
5194 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5195 // changes since the last save.
5196 buffer.update(cx, |buffer, cx| {
5197 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5198 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5199 });
5200 let change_notification_2 =
5201 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
5202 assert!(
5203 change_notification_2.await.text_document.version
5204 > change_notification_1.text_document.version
5205 );
5206
5207 // Handle out-of-order diagnostics
5208 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5209 lsp::PublishDiagnosticsParams {
5210 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5211 version: Some(open_notification.text_document.version),
5212 diagnostics: vec![
5213 lsp::Diagnostic {
5214 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5215 severity: Some(DiagnosticSeverity::ERROR),
5216 message: "undefined variable 'BB'".to_string(),
5217 source: Some("disk".to_string()),
5218 ..Default::default()
5219 },
5220 lsp::Diagnostic {
5221 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5222 severity: Some(DiagnosticSeverity::WARNING),
5223 message: "undefined variable 'A'".to_string(),
5224 source: Some("disk".to_string()),
5225 ..Default::default()
5226 },
5227 ],
5228 },
5229 );
5230
5231 buffer.next_notification(cx).await;
5232 buffer.read_with(cx, |buffer, _| {
5233 assert_eq!(
5234 buffer
5235 .snapshot()
5236 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5237 .collect::<Vec<_>>(),
5238 &[
5239 DiagnosticEntry {
5240 range: Point::new(2, 21)..Point::new(2, 22),
5241 diagnostic: Diagnostic {
5242 severity: DiagnosticSeverity::WARNING,
5243 message: "undefined variable 'A'".to_string(),
5244 is_disk_based: true,
5245 group_id: 1,
5246 is_primary: true,
5247 ..Default::default()
5248 }
5249 },
5250 DiagnosticEntry {
5251 range: Point::new(3, 9)..Point::new(3, 11),
5252 diagnostic: Diagnostic {
5253 severity: DiagnosticSeverity::ERROR,
5254 message: "undefined variable 'BB'".to_string(),
5255 is_disk_based: true,
5256 group_id: 0,
5257 is_primary: true,
5258 ..Default::default()
5259 },
5260 }
5261 ]
5262 );
5263 });
5264 }
5265
5266 #[gpui::test]
5267 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5268 cx.foreground().forbid_parking();
5269
5270 let text = concat!(
5271 "let one = ;\n", //
5272 "let two = \n",
5273 "let three = 3;\n",
5274 );
5275
5276 let fs = FakeFs::new(cx.background());
5277 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5278
5279 let project = Project::test(fs, cx);
5280 let worktree_id = project
5281 .update(cx, |project, cx| {
5282 project.find_or_create_local_worktree("/dir", true, cx)
5283 })
5284 .await
5285 .unwrap()
5286 .0
5287 .read_with(cx, |tree, _| tree.id());
5288
5289 let buffer = project
5290 .update(cx, |project, cx| {
5291 project.open_buffer((worktree_id, "a.rs"), cx)
5292 })
5293 .await
5294 .unwrap();
5295
5296 project.update(cx, |project, cx| {
5297 project
5298 .update_buffer_diagnostics(
5299 &buffer,
5300 vec![
5301 DiagnosticEntry {
5302 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5303 diagnostic: Diagnostic {
5304 severity: DiagnosticSeverity::ERROR,
5305 message: "syntax error 1".to_string(),
5306 ..Default::default()
5307 },
5308 },
5309 DiagnosticEntry {
5310 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5311 diagnostic: Diagnostic {
5312 severity: DiagnosticSeverity::ERROR,
5313 message: "syntax error 2".to_string(),
5314 ..Default::default()
5315 },
5316 },
5317 ],
5318 None,
5319 cx,
5320 )
5321 .unwrap();
5322 });
5323
5324 // An empty range is extended forward to include the following character.
5325 // At the end of a line, an empty range is extended backward to include
5326 // the preceding character.
5327 buffer.read_with(cx, |buffer, _| {
5328 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5329 assert_eq!(
5330 chunks
5331 .iter()
5332 .map(|(s, d)| (s.as_str(), *d))
5333 .collect::<Vec<_>>(),
5334 &[
5335 ("let one = ", None),
5336 (";", Some(DiagnosticSeverity::ERROR)),
5337 ("\nlet two =", None),
5338 (" ", Some(DiagnosticSeverity::ERROR)),
5339 ("\nlet three = 3;\n", None)
5340 ]
5341 );
5342 });
5343 }
5344
5345 #[gpui::test]
5346 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5347 cx.foreground().forbid_parking();
5348
5349 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5350 let language = Arc::new(Language::new(
5351 LanguageConfig {
5352 name: "Rust".into(),
5353 path_suffixes: vec!["rs".to_string()],
5354 language_server: Some(lsp_config),
5355 ..Default::default()
5356 },
5357 Some(tree_sitter_rust::language()),
5358 ));
5359
5360 let text = "
5361 fn a() {
5362 f1();
5363 }
5364 fn b() {
5365 f2();
5366 }
5367 fn c() {
5368 f3();
5369 }
5370 "
5371 .unindent();
5372
5373 let fs = FakeFs::new(cx.background());
5374 fs.insert_tree(
5375 "/dir",
5376 json!({
5377 "a.rs": text.clone(),
5378 }),
5379 )
5380 .await;
5381
5382 let project = Project::test(fs, cx);
5383 project.update(cx, |project, _| project.languages.add(language));
5384
5385 let worktree_id = project
5386 .update(cx, |project, cx| {
5387 project.find_or_create_local_worktree("/dir", true, cx)
5388 })
5389 .await
5390 .unwrap()
5391 .0
5392 .read_with(cx, |tree, _| tree.id());
5393
5394 let buffer = project
5395 .update(cx, |project, cx| {
5396 project.open_buffer((worktree_id, "a.rs"), cx)
5397 })
5398 .await
5399 .unwrap();
5400
5401 let mut fake_server = fake_servers.next().await.unwrap();
5402 let lsp_document_version = fake_server
5403 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5404 .await
5405 .text_document
5406 .version;
5407
5408 // Simulate editing the buffer after the language server computes some edits.
5409 buffer.update(cx, |buffer, cx| {
5410 buffer.edit(
5411 [Point::new(0, 0)..Point::new(0, 0)],
5412 "// above first function\n",
5413 cx,
5414 );
5415 buffer.edit(
5416 [Point::new(2, 0)..Point::new(2, 0)],
5417 " // inside first function\n",
5418 cx,
5419 );
5420 buffer.edit(
5421 [Point::new(6, 4)..Point::new(6, 4)],
5422 "// inside second function ",
5423 cx,
5424 );
5425
5426 assert_eq!(
5427 buffer.text(),
5428 "
5429 // above first function
5430 fn a() {
5431 // inside first function
5432 f1();
5433 }
5434 fn b() {
5435 // inside second function f2();
5436 }
5437 fn c() {
5438 f3();
5439 }
5440 "
5441 .unindent()
5442 );
5443 });
5444
5445 let edits = project
5446 .update(cx, |project, cx| {
5447 project.edits_from_lsp(
5448 &buffer,
5449 vec![
5450 // replace body of first function
5451 lsp::TextEdit {
5452 range: lsp::Range::new(
5453 lsp::Position::new(0, 0),
5454 lsp::Position::new(3, 0),
5455 ),
5456 new_text: "
5457 fn a() {
5458 f10();
5459 }
5460 "
5461 .unindent(),
5462 },
5463 // edit inside second function
5464 lsp::TextEdit {
5465 range: lsp::Range::new(
5466 lsp::Position::new(4, 6),
5467 lsp::Position::new(4, 6),
5468 ),
5469 new_text: "00".into(),
5470 },
5471 // edit inside third function via two distinct edits
5472 lsp::TextEdit {
5473 range: lsp::Range::new(
5474 lsp::Position::new(7, 5),
5475 lsp::Position::new(7, 5),
5476 ),
5477 new_text: "4000".into(),
5478 },
5479 lsp::TextEdit {
5480 range: lsp::Range::new(
5481 lsp::Position::new(7, 5),
5482 lsp::Position::new(7, 6),
5483 ),
5484 new_text: "".into(),
5485 },
5486 ],
5487 Some(lsp_document_version),
5488 cx,
5489 )
5490 })
5491 .await
5492 .unwrap();
5493
5494 buffer.update(cx, |buffer, cx| {
5495 for (range, new_text) in edits {
5496 buffer.edit([range], new_text, cx);
5497 }
5498 assert_eq!(
5499 buffer.text(),
5500 "
5501 // above first function
5502 fn a() {
5503 // inside first function
5504 f10();
5505 }
5506 fn b() {
5507 // inside second function f200();
5508 }
5509 fn c() {
5510 f4000();
5511 }
5512 "
5513 .unindent()
5514 );
5515 });
5516 }
5517
5518 #[gpui::test]
5519 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5520 cx.foreground().forbid_parking();
5521
5522 let text = "
5523 use a::b;
5524 use a::c;
5525
5526 fn f() {
5527 b();
5528 c();
5529 }
5530 "
5531 .unindent();
5532
5533 let fs = FakeFs::new(cx.background());
5534 fs.insert_tree(
5535 "/dir",
5536 json!({
5537 "a.rs": text.clone(),
5538 }),
5539 )
5540 .await;
5541
5542 let project = Project::test(fs, cx);
5543 let worktree_id = project
5544 .update(cx, |project, cx| {
5545 project.find_or_create_local_worktree("/dir", true, cx)
5546 })
5547 .await
5548 .unwrap()
5549 .0
5550 .read_with(cx, |tree, _| tree.id());
5551
5552 let buffer = project
5553 .update(cx, |project, cx| {
5554 project.open_buffer((worktree_id, "a.rs"), cx)
5555 })
5556 .await
5557 .unwrap();
5558
5559 // Simulate the language server sending us a small edit in the form of a very large diff.
5560 // Rust-analyzer does this when performing a merge-imports code action.
5561 let edits = project
5562 .update(cx, |project, cx| {
5563 project.edits_from_lsp(
5564 &buffer,
5565 [
5566 // Replace the first use statement without editing the semicolon.
5567 lsp::TextEdit {
5568 range: lsp::Range::new(
5569 lsp::Position::new(0, 4),
5570 lsp::Position::new(0, 8),
5571 ),
5572 new_text: "a::{b, c}".into(),
5573 },
5574 // Reinsert the remainder of the file between the semicolon and the final
5575 // newline of the file.
5576 lsp::TextEdit {
5577 range: lsp::Range::new(
5578 lsp::Position::new(0, 9),
5579 lsp::Position::new(0, 9),
5580 ),
5581 new_text: "\n\n".into(),
5582 },
5583 lsp::TextEdit {
5584 range: lsp::Range::new(
5585 lsp::Position::new(0, 9),
5586 lsp::Position::new(0, 9),
5587 ),
5588 new_text: "
5589 fn f() {
5590 b();
5591 c();
5592 }"
5593 .unindent(),
5594 },
5595 // Delete everything after the first newline of the file.
5596 lsp::TextEdit {
5597 range: lsp::Range::new(
5598 lsp::Position::new(1, 0),
5599 lsp::Position::new(7, 0),
5600 ),
5601 new_text: "".into(),
5602 },
5603 ],
5604 None,
5605 cx,
5606 )
5607 })
5608 .await
5609 .unwrap();
5610
5611 buffer.update(cx, |buffer, cx| {
5612 let edits = edits
5613 .into_iter()
5614 .map(|(range, text)| {
5615 (
5616 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5617 text,
5618 )
5619 })
5620 .collect::<Vec<_>>();
5621
5622 assert_eq!(
5623 edits,
5624 [
5625 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5626 (Point::new(1, 0)..Point::new(2, 0), "".into())
5627 ]
5628 );
5629
5630 for (range, new_text) in edits {
5631 buffer.edit([range], new_text, cx);
5632 }
5633 assert_eq!(
5634 buffer.text(),
5635 "
5636 use a::{b, c};
5637
5638 fn f() {
5639 b();
5640 c();
5641 }
5642 "
5643 .unindent()
5644 );
5645 });
5646 }
5647
5648 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5649 buffer: &Buffer,
5650 range: Range<T>,
5651 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5652 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5653 for chunk in buffer.snapshot().chunks(range, true) {
5654 if chunks.last().map_or(false, |prev_chunk| {
5655 prev_chunk.1 == chunk.diagnostic_severity
5656 }) {
5657 chunks.last_mut().unwrap().0.push_str(chunk.text);
5658 } else {
5659 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5660 }
5661 }
5662 chunks
5663 }
5664
5665 #[gpui::test]
5666 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5667 let dir = temp_tree(json!({
5668 "root": {
5669 "dir1": {},
5670 "dir2": {
5671 "dir3": {}
5672 }
5673 }
5674 }));
5675
5676 let project = Project::test(Arc::new(RealFs), cx);
5677 let (tree, _) = project
5678 .update(cx, |project, cx| {
5679 project.find_or_create_local_worktree(&dir.path(), true, cx)
5680 })
5681 .await
5682 .unwrap();
5683
5684 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5685 .await;
5686
5687 let cancel_flag = Default::default();
5688 let results = project
5689 .read_with(cx, |project, cx| {
5690 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5691 })
5692 .await;
5693
5694 assert!(results.is_empty());
5695 }
5696
5697 #[gpui::test]
5698 async fn test_definition(cx: &mut gpui::TestAppContext) {
5699 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5700 let language = Arc::new(Language::new(
5701 LanguageConfig {
5702 name: "Rust".into(),
5703 path_suffixes: vec!["rs".to_string()],
5704 language_server: Some(language_server_config),
5705 ..Default::default()
5706 },
5707 Some(tree_sitter_rust::language()),
5708 ));
5709
5710 let fs = FakeFs::new(cx.background());
5711 fs.insert_tree(
5712 "/dir",
5713 json!({
5714 "a.rs": "const fn a() { A }",
5715 "b.rs": "const y: i32 = crate::a()",
5716 }),
5717 )
5718 .await;
5719
5720 let project = Project::test(fs, cx);
5721 project.update(cx, |project, _| {
5722 Arc::get_mut(&mut project.languages).unwrap().add(language);
5723 });
5724
5725 let (tree, _) = project
5726 .update(cx, |project, cx| {
5727 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5728 })
5729 .await
5730 .unwrap();
5731 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5732 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5733 .await;
5734
5735 let buffer = project
5736 .update(cx, |project, cx| {
5737 project.open_buffer(
5738 ProjectPath {
5739 worktree_id,
5740 path: Path::new("").into(),
5741 },
5742 cx,
5743 )
5744 })
5745 .await
5746 .unwrap();
5747
5748 let mut fake_server = fake_servers.next().await.unwrap();
5749 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5750 let params = params.text_document_position_params;
5751 assert_eq!(
5752 params.text_document.uri.to_file_path().unwrap(),
5753 Path::new("/dir/b.rs"),
5754 );
5755 assert_eq!(params.position, lsp::Position::new(0, 22));
5756
5757 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5758 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5759 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5760 )))
5761 });
5762
5763 let mut definitions = project
5764 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5765 .await
5766 .unwrap();
5767
5768 assert_eq!(definitions.len(), 1);
5769 let definition = definitions.pop().unwrap();
5770 cx.update(|cx| {
5771 let target_buffer = definition.buffer.read(cx);
5772 assert_eq!(
5773 target_buffer
5774 .file()
5775 .unwrap()
5776 .as_local()
5777 .unwrap()
5778 .abs_path(cx),
5779 Path::new("/dir/a.rs"),
5780 );
5781 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5782 assert_eq!(
5783 list_worktrees(&project, cx),
5784 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5785 );
5786
5787 drop(definition);
5788 });
5789 cx.read(|cx| {
5790 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5791 });
5792
5793 fn list_worktrees<'a>(
5794 project: &'a ModelHandle<Project>,
5795 cx: &'a AppContext,
5796 ) -> Vec<(&'a Path, bool)> {
5797 project
5798 .read(cx)
5799 .worktrees(cx)
5800 .map(|worktree| {
5801 let worktree = worktree.read(cx);
5802 (
5803 worktree.as_local().unwrap().abs_path().as_ref(),
5804 worktree.is_visible(),
5805 )
5806 })
5807 .collect::<Vec<_>>()
5808 }
5809 }
5810
5811 #[gpui::test]
5812 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5813 let fs = FakeFs::new(cx.background());
5814 fs.insert_tree(
5815 "/dir",
5816 json!({
5817 "file1": "the old contents",
5818 }),
5819 )
5820 .await;
5821
5822 let project = Project::test(fs.clone(), cx);
5823 let worktree_id = project
5824 .update(cx, |p, cx| {
5825 p.find_or_create_local_worktree("/dir", true, cx)
5826 })
5827 .await
5828 .unwrap()
5829 .0
5830 .read_with(cx, |tree, _| tree.id());
5831
5832 let buffer = project
5833 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5834 .await
5835 .unwrap();
5836 buffer
5837 .update(cx, |buffer, cx| {
5838 assert_eq!(buffer.text(), "the old contents");
5839 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5840 buffer.save(cx)
5841 })
5842 .await
5843 .unwrap();
5844
5845 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5846 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5847 }
5848
5849 #[gpui::test]
5850 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5851 let fs = FakeFs::new(cx.background());
5852 fs.insert_tree(
5853 "/dir",
5854 json!({
5855 "file1": "the old contents",
5856 }),
5857 )
5858 .await;
5859
5860 let project = Project::test(fs.clone(), cx);
5861 let worktree_id = project
5862 .update(cx, |p, cx| {
5863 p.find_or_create_local_worktree("/dir/file1", true, cx)
5864 })
5865 .await
5866 .unwrap()
5867 .0
5868 .read_with(cx, |tree, _| tree.id());
5869
5870 let buffer = project
5871 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5872 .await
5873 .unwrap();
5874 buffer
5875 .update(cx, |buffer, cx| {
5876 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5877 buffer.save(cx)
5878 })
5879 .await
5880 .unwrap();
5881
5882 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5883 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5884 }
5885
5886 #[gpui::test]
5887 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5888 let fs = FakeFs::new(cx.background());
5889 fs.insert_tree("/dir", json!({})).await;
5890
5891 let project = Project::test(fs.clone(), cx);
5892 let (worktree, _) = project
5893 .update(cx, |project, cx| {
5894 project.find_or_create_local_worktree("/dir", true, cx)
5895 })
5896 .await
5897 .unwrap();
5898 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5899
5900 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5901 buffer.update(cx, |buffer, cx| {
5902 buffer.edit([0..0], "abc", cx);
5903 assert!(buffer.is_dirty());
5904 assert!(!buffer.has_conflict());
5905 });
5906 project
5907 .update(cx, |project, cx| {
5908 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5909 })
5910 .await
5911 .unwrap();
5912 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5913 buffer.read_with(cx, |buffer, cx| {
5914 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5915 assert!(!buffer.is_dirty());
5916 assert!(!buffer.has_conflict());
5917 });
5918
5919 let opened_buffer = project
5920 .update(cx, |project, cx| {
5921 project.open_buffer((worktree_id, "file1"), cx)
5922 })
5923 .await
5924 .unwrap();
5925 assert_eq!(opened_buffer, buffer);
5926 }
5927
5928 #[gpui::test(retries = 5)]
5929 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5930 let dir = temp_tree(json!({
5931 "a": {
5932 "file1": "",
5933 "file2": "",
5934 "file3": "",
5935 },
5936 "b": {
5937 "c": {
5938 "file4": "",
5939 "file5": "",
5940 }
5941 }
5942 }));
5943
5944 let project = Project::test(Arc::new(RealFs), cx);
5945 let rpc = project.read_with(cx, |p, _| p.client.clone());
5946
5947 let (tree, _) = project
5948 .update(cx, |p, cx| {
5949 p.find_or_create_local_worktree(dir.path(), true, cx)
5950 })
5951 .await
5952 .unwrap();
5953 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5954
5955 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5956 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5957 async move { buffer.await.unwrap() }
5958 };
5959 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5960 tree.read_with(cx, |tree, _| {
5961 tree.entry_for_path(path)
5962 .expect(&format!("no entry for path {}", path))
5963 .id
5964 })
5965 };
5966
5967 let buffer2 = buffer_for_path("a/file2", cx).await;
5968 let buffer3 = buffer_for_path("a/file3", cx).await;
5969 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5970 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5971
5972 let file2_id = id_for_path("a/file2", &cx);
5973 let file3_id = id_for_path("a/file3", &cx);
5974 let file4_id = id_for_path("b/c/file4", &cx);
5975
5976 // Wait for the initial scan.
5977 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5978 .await;
5979
5980 // Create a remote copy of this worktree.
5981 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5982 let (remote, load_task) = cx.update(|cx| {
5983 Worktree::remote(
5984 1,
5985 1,
5986 initial_snapshot.to_proto(&Default::default(), true),
5987 rpc.clone(),
5988 cx,
5989 )
5990 });
5991 load_task.await;
5992
5993 cx.read(|cx| {
5994 assert!(!buffer2.read(cx).is_dirty());
5995 assert!(!buffer3.read(cx).is_dirty());
5996 assert!(!buffer4.read(cx).is_dirty());
5997 assert!(!buffer5.read(cx).is_dirty());
5998 });
5999
6000 // Rename and delete files and directories.
6001 tree.flush_fs_events(&cx).await;
6002 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6003 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6004 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6005 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6006 tree.flush_fs_events(&cx).await;
6007
6008 let expected_paths = vec![
6009 "a",
6010 "a/file1",
6011 "a/file2.new",
6012 "b",
6013 "d",
6014 "d/file3",
6015 "d/file4",
6016 ];
6017
6018 cx.read(|app| {
6019 assert_eq!(
6020 tree.read(app)
6021 .paths()
6022 .map(|p| p.to_str().unwrap())
6023 .collect::<Vec<_>>(),
6024 expected_paths
6025 );
6026
6027 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6028 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6029 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6030
6031 assert_eq!(
6032 buffer2.read(app).file().unwrap().path().as_ref(),
6033 Path::new("a/file2.new")
6034 );
6035 assert_eq!(
6036 buffer3.read(app).file().unwrap().path().as_ref(),
6037 Path::new("d/file3")
6038 );
6039 assert_eq!(
6040 buffer4.read(app).file().unwrap().path().as_ref(),
6041 Path::new("d/file4")
6042 );
6043 assert_eq!(
6044 buffer5.read(app).file().unwrap().path().as_ref(),
6045 Path::new("b/c/file5")
6046 );
6047
6048 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6049 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6050 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6051 assert!(buffer5.read(app).file().unwrap().is_deleted());
6052 });
6053
6054 // Update the remote worktree. Check that it becomes consistent with the
6055 // local worktree.
6056 remote.update(cx, |remote, cx| {
6057 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6058 &initial_snapshot,
6059 1,
6060 1,
6061 true,
6062 );
6063 remote
6064 .as_remote_mut()
6065 .unwrap()
6066 .snapshot
6067 .apply_remote_update(update_message)
6068 .unwrap();
6069
6070 assert_eq!(
6071 remote
6072 .paths()
6073 .map(|p| p.to_str().unwrap())
6074 .collect::<Vec<_>>(),
6075 expected_paths
6076 );
6077 });
6078 }
6079
6080 #[gpui::test]
6081 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6082 let fs = FakeFs::new(cx.background());
6083 fs.insert_tree(
6084 "/the-dir",
6085 json!({
6086 "a.txt": "a-contents",
6087 "b.txt": "b-contents",
6088 }),
6089 )
6090 .await;
6091
6092 let project = Project::test(fs.clone(), cx);
6093 let worktree_id = project
6094 .update(cx, |p, cx| {
6095 p.find_or_create_local_worktree("/the-dir", true, cx)
6096 })
6097 .await
6098 .unwrap()
6099 .0
6100 .read_with(cx, |tree, _| tree.id());
6101
6102 // Spawn multiple tasks to open paths, repeating some paths.
6103 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6104 (
6105 p.open_buffer((worktree_id, "a.txt"), cx),
6106 p.open_buffer((worktree_id, "b.txt"), cx),
6107 p.open_buffer((worktree_id, "a.txt"), cx),
6108 )
6109 });
6110
6111 let buffer_a_1 = buffer_a_1.await.unwrap();
6112 let buffer_a_2 = buffer_a_2.await.unwrap();
6113 let buffer_b = buffer_b.await.unwrap();
6114 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6115 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6116
6117 // There is only one buffer per path.
6118 let buffer_a_id = buffer_a_1.id();
6119 assert_eq!(buffer_a_2.id(), buffer_a_id);
6120
6121 // Open the same path again while it is still open.
6122 drop(buffer_a_1);
6123 let buffer_a_3 = project
6124 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6125 .await
6126 .unwrap();
6127
6128 // There's still only one buffer per path.
6129 assert_eq!(buffer_a_3.id(), buffer_a_id);
6130 }
6131
6132 #[gpui::test]
6133 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6134 use std::fs;
6135
6136 let dir = temp_tree(json!({
6137 "file1": "abc",
6138 "file2": "def",
6139 "file3": "ghi",
6140 }));
6141
6142 let project = Project::test(Arc::new(RealFs), cx);
6143 let (worktree, _) = project
6144 .update(cx, |p, cx| {
6145 p.find_or_create_local_worktree(dir.path(), true, cx)
6146 })
6147 .await
6148 .unwrap();
6149 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6150
6151 worktree.flush_fs_events(&cx).await;
6152 worktree
6153 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6154 .await;
6155
6156 let buffer1 = project
6157 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6158 .await
6159 .unwrap();
6160 let events = Rc::new(RefCell::new(Vec::new()));
6161
6162 // initially, the buffer isn't dirty.
6163 buffer1.update(cx, |buffer, cx| {
6164 cx.subscribe(&buffer1, {
6165 let events = events.clone();
6166 move |_, _, event, _| match event {
6167 BufferEvent::Operation(_) => {}
6168 _ => events.borrow_mut().push(event.clone()),
6169 }
6170 })
6171 .detach();
6172
6173 assert!(!buffer.is_dirty());
6174 assert!(events.borrow().is_empty());
6175
6176 buffer.edit(vec![1..2], "", cx);
6177 });
6178
6179 // after the first edit, the buffer is dirty, and emits a dirtied event.
6180 buffer1.update(cx, |buffer, cx| {
6181 assert!(buffer.text() == "ac");
6182 assert!(buffer.is_dirty());
6183 assert_eq!(
6184 *events.borrow(),
6185 &[language::Event::Edited, language::Event::Dirtied]
6186 );
6187 events.borrow_mut().clear();
6188 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6189 });
6190
6191 // after saving, the buffer is not dirty, and emits a saved event.
6192 buffer1.update(cx, |buffer, cx| {
6193 assert!(!buffer.is_dirty());
6194 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6195 events.borrow_mut().clear();
6196
6197 buffer.edit(vec![1..1], "B", cx);
6198 buffer.edit(vec![2..2], "D", cx);
6199 });
6200
6201 // after editing again, the buffer is dirty, and emits another dirty event.
6202 buffer1.update(cx, |buffer, cx| {
6203 assert!(buffer.text() == "aBDc");
6204 assert!(buffer.is_dirty());
6205 assert_eq!(
6206 *events.borrow(),
6207 &[
6208 language::Event::Edited,
6209 language::Event::Dirtied,
6210 language::Event::Edited,
6211 ],
6212 );
6213 events.borrow_mut().clear();
6214
6215 // TODO - currently, after restoring the buffer to its
6216 // previously-saved state, the is still considered dirty.
6217 buffer.edit([1..3], "", cx);
6218 assert!(buffer.text() == "ac");
6219 assert!(buffer.is_dirty());
6220 });
6221
6222 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6223
6224 // When a file is deleted, the buffer is considered dirty.
6225 let events = Rc::new(RefCell::new(Vec::new()));
6226 let buffer2 = project
6227 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6228 .await
6229 .unwrap();
6230 buffer2.update(cx, |_, cx| {
6231 cx.subscribe(&buffer2, {
6232 let events = events.clone();
6233 move |_, _, event, _| events.borrow_mut().push(event.clone())
6234 })
6235 .detach();
6236 });
6237
6238 fs::remove_file(dir.path().join("file2")).unwrap();
6239 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6240 assert_eq!(
6241 *events.borrow(),
6242 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6243 );
6244
6245 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6246 let events = Rc::new(RefCell::new(Vec::new()));
6247 let buffer3 = project
6248 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6249 .await
6250 .unwrap();
6251 buffer3.update(cx, |_, cx| {
6252 cx.subscribe(&buffer3, {
6253 let events = events.clone();
6254 move |_, _, event, _| events.borrow_mut().push(event.clone())
6255 })
6256 .detach();
6257 });
6258
6259 worktree.flush_fs_events(&cx).await;
6260 buffer3.update(cx, |buffer, cx| {
6261 buffer.edit(Some(0..0), "x", cx);
6262 });
6263 events.borrow_mut().clear();
6264 fs::remove_file(dir.path().join("file3")).unwrap();
6265 buffer3
6266 .condition(&cx, |_, _| !events.borrow().is_empty())
6267 .await;
6268 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6269 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6270 }
6271
6272 #[gpui::test]
6273 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6274 use std::fs;
6275
6276 let initial_contents = "aaa\nbbbbb\nc\n";
6277 let dir = temp_tree(json!({ "the-file": initial_contents }));
6278
6279 let project = Project::test(Arc::new(RealFs), cx);
6280 let (worktree, _) = project
6281 .update(cx, |p, cx| {
6282 p.find_or_create_local_worktree(dir.path(), true, cx)
6283 })
6284 .await
6285 .unwrap();
6286 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6287
6288 worktree
6289 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6290 .await;
6291
6292 let abs_path = dir.path().join("the-file");
6293 let buffer = project
6294 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6295 .await
6296 .unwrap();
6297
6298 // TODO
6299 // Add a cursor on each row.
6300 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6301 // assert!(!buffer.is_dirty());
6302 // buffer.add_selection_set(
6303 // &(0..3)
6304 // .map(|row| Selection {
6305 // id: row as usize,
6306 // start: Point::new(row, 1),
6307 // end: Point::new(row, 1),
6308 // reversed: false,
6309 // goal: SelectionGoal::None,
6310 // })
6311 // .collect::<Vec<_>>(),
6312 // cx,
6313 // )
6314 // });
6315
6316 // Change the file on disk, adding two new lines of text, and removing
6317 // one line.
6318 buffer.read_with(cx, |buffer, _| {
6319 assert!(!buffer.is_dirty());
6320 assert!(!buffer.has_conflict());
6321 });
6322 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6323 fs::write(&abs_path, new_contents).unwrap();
6324
6325 // Because the buffer was not modified, it is reloaded from disk. Its
6326 // contents are edited according to the diff between the old and new
6327 // file contents.
6328 buffer
6329 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6330 .await;
6331
6332 buffer.update(cx, |buffer, _| {
6333 assert_eq!(buffer.text(), new_contents);
6334 assert!(!buffer.is_dirty());
6335 assert!(!buffer.has_conflict());
6336
6337 // TODO
6338 // let cursor_positions = buffer
6339 // .selection_set(selection_set_id)
6340 // .unwrap()
6341 // .selections::<Point>(&*buffer)
6342 // .map(|selection| {
6343 // assert_eq!(selection.start, selection.end);
6344 // selection.start
6345 // })
6346 // .collect::<Vec<_>>();
6347 // assert_eq!(
6348 // cursor_positions,
6349 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6350 // );
6351 });
6352
6353 // Modify the buffer
6354 buffer.update(cx, |buffer, cx| {
6355 buffer.edit(vec![0..0], " ", cx);
6356 assert!(buffer.is_dirty());
6357 assert!(!buffer.has_conflict());
6358 });
6359
6360 // Change the file on disk again, adding blank lines to the beginning.
6361 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6362
6363 // Because the buffer is modified, it doesn't reload from disk, but is
6364 // marked as having a conflict.
6365 buffer
6366 .condition(&cx, |buffer, _| buffer.has_conflict())
6367 .await;
6368 }
6369
6370 #[gpui::test]
6371 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6372 cx.foreground().forbid_parking();
6373
6374 let fs = FakeFs::new(cx.background());
6375 fs.insert_tree(
6376 "/the-dir",
6377 json!({
6378 "a.rs": "
6379 fn foo(mut v: Vec<usize>) {
6380 for x in &v {
6381 v.push(1);
6382 }
6383 }
6384 "
6385 .unindent(),
6386 }),
6387 )
6388 .await;
6389
6390 let project = Project::test(fs.clone(), cx);
6391 let (worktree, _) = project
6392 .update(cx, |p, cx| {
6393 p.find_or_create_local_worktree("/the-dir", true, cx)
6394 })
6395 .await
6396 .unwrap();
6397 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6398
6399 let buffer = project
6400 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6401 .await
6402 .unwrap();
6403
6404 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6405 let message = lsp::PublishDiagnosticsParams {
6406 uri: buffer_uri.clone(),
6407 diagnostics: vec![
6408 lsp::Diagnostic {
6409 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6410 severity: Some(DiagnosticSeverity::WARNING),
6411 message: "error 1".to_string(),
6412 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6413 location: lsp::Location {
6414 uri: buffer_uri.clone(),
6415 range: lsp::Range::new(
6416 lsp::Position::new(1, 8),
6417 lsp::Position::new(1, 9),
6418 ),
6419 },
6420 message: "error 1 hint 1".to_string(),
6421 }]),
6422 ..Default::default()
6423 },
6424 lsp::Diagnostic {
6425 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6426 severity: Some(DiagnosticSeverity::HINT),
6427 message: "error 1 hint 1".to_string(),
6428 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6429 location: lsp::Location {
6430 uri: buffer_uri.clone(),
6431 range: lsp::Range::new(
6432 lsp::Position::new(1, 8),
6433 lsp::Position::new(1, 9),
6434 ),
6435 },
6436 message: "original diagnostic".to_string(),
6437 }]),
6438 ..Default::default()
6439 },
6440 lsp::Diagnostic {
6441 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6442 severity: Some(DiagnosticSeverity::ERROR),
6443 message: "error 2".to_string(),
6444 related_information: Some(vec![
6445 lsp::DiagnosticRelatedInformation {
6446 location: lsp::Location {
6447 uri: buffer_uri.clone(),
6448 range: lsp::Range::new(
6449 lsp::Position::new(1, 13),
6450 lsp::Position::new(1, 15),
6451 ),
6452 },
6453 message: "error 2 hint 1".to_string(),
6454 },
6455 lsp::DiagnosticRelatedInformation {
6456 location: lsp::Location {
6457 uri: buffer_uri.clone(),
6458 range: lsp::Range::new(
6459 lsp::Position::new(1, 13),
6460 lsp::Position::new(1, 15),
6461 ),
6462 },
6463 message: "error 2 hint 2".to_string(),
6464 },
6465 ]),
6466 ..Default::default()
6467 },
6468 lsp::Diagnostic {
6469 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6470 severity: Some(DiagnosticSeverity::HINT),
6471 message: "error 2 hint 1".to_string(),
6472 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6473 location: lsp::Location {
6474 uri: buffer_uri.clone(),
6475 range: lsp::Range::new(
6476 lsp::Position::new(2, 8),
6477 lsp::Position::new(2, 17),
6478 ),
6479 },
6480 message: "original diagnostic".to_string(),
6481 }]),
6482 ..Default::default()
6483 },
6484 lsp::Diagnostic {
6485 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6486 severity: Some(DiagnosticSeverity::HINT),
6487 message: "error 2 hint 2".to_string(),
6488 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6489 location: lsp::Location {
6490 uri: buffer_uri.clone(),
6491 range: lsp::Range::new(
6492 lsp::Position::new(2, 8),
6493 lsp::Position::new(2, 17),
6494 ),
6495 },
6496 message: "original diagnostic".to_string(),
6497 }]),
6498 ..Default::default()
6499 },
6500 ],
6501 version: None,
6502 };
6503
6504 project
6505 .update(cx, |p, cx| {
6506 p.update_diagnostics(message, &Default::default(), cx)
6507 })
6508 .unwrap();
6509 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6510
6511 assert_eq!(
6512 buffer
6513 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6514 .collect::<Vec<_>>(),
6515 &[
6516 DiagnosticEntry {
6517 range: Point::new(1, 8)..Point::new(1, 9),
6518 diagnostic: Diagnostic {
6519 severity: DiagnosticSeverity::WARNING,
6520 message: "error 1".to_string(),
6521 group_id: 0,
6522 is_primary: true,
6523 ..Default::default()
6524 }
6525 },
6526 DiagnosticEntry {
6527 range: Point::new(1, 8)..Point::new(1, 9),
6528 diagnostic: Diagnostic {
6529 severity: DiagnosticSeverity::HINT,
6530 message: "error 1 hint 1".to_string(),
6531 group_id: 0,
6532 is_primary: false,
6533 ..Default::default()
6534 }
6535 },
6536 DiagnosticEntry {
6537 range: Point::new(1, 13)..Point::new(1, 15),
6538 diagnostic: Diagnostic {
6539 severity: DiagnosticSeverity::HINT,
6540 message: "error 2 hint 1".to_string(),
6541 group_id: 1,
6542 is_primary: false,
6543 ..Default::default()
6544 }
6545 },
6546 DiagnosticEntry {
6547 range: Point::new(1, 13)..Point::new(1, 15),
6548 diagnostic: Diagnostic {
6549 severity: DiagnosticSeverity::HINT,
6550 message: "error 2 hint 2".to_string(),
6551 group_id: 1,
6552 is_primary: false,
6553 ..Default::default()
6554 }
6555 },
6556 DiagnosticEntry {
6557 range: Point::new(2, 8)..Point::new(2, 17),
6558 diagnostic: Diagnostic {
6559 severity: DiagnosticSeverity::ERROR,
6560 message: "error 2".to_string(),
6561 group_id: 1,
6562 is_primary: true,
6563 ..Default::default()
6564 }
6565 }
6566 ]
6567 );
6568
6569 assert_eq!(
6570 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6571 &[
6572 DiagnosticEntry {
6573 range: Point::new(1, 8)..Point::new(1, 9),
6574 diagnostic: Diagnostic {
6575 severity: DiagnosticSeverity::WARNING,
6576 message: "error 1".to_string(),
6577 group_id: 0,
6578 is_primary: true,
6579 ..Default::default()
6580 }
6581 },
6582 DiagnosticEntry {
6583 range: Point::new(1, 8)..Point::new(1, 9),
6584 diagnostic: Diagnostic {
6585 severity: DiagnosticSeverity::HINT,
6586 message: "error 1 hint 1".to_string(),
6587 group_id: 0,
6588 is_primary: false,
6589 ..Default::default()
6590 }
6591 },
6592 ]
6593 );
6594 assert_eq!(
6595 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6596 &[
6597 DiagnosticEntry {
6598 range: Point::new(1, 13)..Point::new(1, 15),
6599 diagnostic: Diagnostic {
6600 severity: DiagnosticSeverity::HINT,
6601 message: "error 2 hint 1".to_string(),
6602 group_id: 1,
6603 is_primary: false,
6604 ..Default::default()
6605 }
6606 },
6607 DiagnosticEntry {
6608 range: Point::new(1, 13)..Point::new(1, 15),
6609 diagnostic: Diagnostic {
6610 severity: DiagnosticSeverity::HINT,
6611 message: "error 2 hint 2".to_string(),
6612 group_id: 1,
6613 is_primary: false,
6614 ..Default::default()
6615 }
6616 },
6617 DiagnosticEntry {
6618 range: Point::new(2, 8)..Point::new(2, 17),
6619 diagnostic: Diagnostic {
6620 severity: DiagnosticSeverity::ERROR,
6621 message: "error 2".to_string(),
6622 group_id: 1,
6623 is_primary: true,
6624 ..Default::default()
6625 }
6626 }
6627 ]
6628 );
6629 }
6630
6631 #[gpui::test]
6632 async fn test_rename(cx: &mut gpui::TestAppContext) {
6633 cx.foreground().forbid_parking();
6634
6635 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6636 let language = Arc::new(Language::new(
6637 LanguageConfig {
6638 name: "Rust".into(),
6639 path_suffixes: vec!["rs".to_string()],
6640 language_server: Some(language_server_config),
6641 ..Default::default()
6642 },
6643 Some(tree_sitter_rust::language()),
6644 ));
6645
6646 let fs = FakeFs::new(cx.background());
6647 fs.insert_tree(
6648 "/dir",
6649 json!({
6650 "one.rs": "const ONE: usize = 1;",
6651 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6652 }),
6653 )
6654 .await;
6655
6656 let project = Project::test(fs.clone(), cx);
6657 project.update(cx, |project, _| {
6658 Arc::get_mut(&mut project.languages).unwrap().add(language);
6659 });
6660
6661 let (tree, _) = project
6662 .update(cx, |project, cx| {
6663 project.find_or_create_local_worktree("/dir", true, cx)
6664 })
6665 .await
6666 .unwrap();
6667 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6668 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6669 .await;
6670
6671 let buffer = project
6672 .update(cx, |project, cx| {
6673 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6674 })
6675 .await
6676 .unwrap();
6677
6678 let mut fake_server = fake_servers.next().await.unwrap();
6679
6680 let response = project.update(cx, |project, cx| {
6681 project.prepare_rename(buffer.clone(), 7, cx)
6682 });
6683 fake_server
6684 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6685 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6686 assert_eq!(params.position, lsp::Position::new(0, 7));
6687 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6688 lsp::Position::new(0, 6),
6689 lsp::Position::new(0, 9),
6690 )))
6691 })
6692 .next()
6693 .await
6694 .unwrap();
6695 let range = response.await.unwrap().unwrap();
6696 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6697 assert_eq!(range, 6..9);
6698
6699 let response = project.update(cx, |project, cx| {
6700 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6701 });
6702 fake_server
6703 .handle_request::<lsp::request::Rename, _>(|params, _| {
6704 assert_eq!(
6705 params.text_document_position.text_document.uri.as_str(),
6706 "file:///dir/one.rs"
6707 );
6708 assert_eq!(
6709 params.text_document_position.position,
6710 lsp::Position::new(0, 7)
6711 );
6712 assert_eq!(params.new_name, "THREE");
6713 Some(lsp::WorkspaceEdit {
6714 changes: Some(
6715 [
6716 (
6717 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6718 vec![lsp::TextEdit::new(
6719 lsp::Range::new(
6720 lsp::Position::new(0, 6),
6721 lsp::Position::new(0, 9),
6722 ),
6723 "THREE".to_string(),
6724 )],
6725 ),
6726 (
6727 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6728 vec![
6729 lsp::TextEdit::new(
6730 lsp::Range::new(
6731 lsp::Position::new(0, 24),
6732 lsp::Position::new(0, 27),
6733 ),
6734 "THREE".to_string(),
6735 ),
6736 lsp::TextEdit::new(
6737 lsp::Range::new(
6738 lsp::Position::new(0, 35),
6739 lsp::Position::new(0, 38),
6740 ),
6741 "THREE".to_string(),
6742 ),
6743 ],
6744 ),
6745 ]
6746 .into_iter()
6747 .collect(),
6748 ),
6749 ..Default::default()
6750 })
6751 })
6752 .next()
6753 .await
6754 .unwrap();
6755 let mut transaction = response.await.unwrap().0;
6756 assert_eq!(transaction.len(), 2);
6757 assert_eq!(
6758 transaction
6759 .remove_entry(&buffer)
6760 .unwrap()
6761 .0
6762 .read_with(cx, |buffer, _| buffer.text()),
6763 "const THREE: usize = 1;"
6764 );
6765 assert_eq!(
6766 transaction
6767 .into_keys()
6768 .next()
6769 .unwrap()
6770 .read_with(cx, |buffer, _| buffer.text()),
6771 "const TWO: usize = one::THREE + one::THREE;"
6772 );
6773 }
6774
6775 #[gpui::test]
6776 async fn test_search(cx: &mut gpui::TestAppContext) {
6777 let fs = FakeFs::new(cx.background());
6778 fs.insert_tree(
6779 "/dir",
6780 json!({
6781 "one.rs": "const ONE: usize = 1;",
6782 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6783 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6784 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6785 }),
6786 )
6787 .await;
6788 let project = Project::test(fs.clone(), cx);
6789 let (tree, _) = project
6790 .update(cx, |project, cx| {
6791 project.find_or_create_local_worktree("/dir", true, cx)
6792 })
6793 .await
6794 .unwrap();
6795 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6796 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6797 .await;
6798
6799 assert_eq!(
6800 search(&project, SearchQuery::text("TWO", false, true), cx)
6801 .await
6802 .unwrap(),
6803 HashMap::from_iter([
6804 ("two.rs".to_string(), vec![6..9]),
6805 ("three.rs".to_string(), vec![37..40])
6806 ])
6807 );
6808
6809 let buffer_4 = project
6810 .update(cx, |project, cx| {
6811 project.open_buffer((worktree_id, "four.rs"), cx)
6812 })
6813 .await
6814 .unwrap();
6815 buffer_4.update(cx, |buffer, cx| {
6816 buffer.edit([20..28, 31..43], "two::TWO", cx);
6817 });
6818
6819 assert_eq!(
6820 search(&project, SearchQuery::text("TWO", false, true), cx)
6821 .await
6822 .unwrap(),
6823 HashMap::from_iter([
6824 ("two.rs".to_string(), vec![6..9]),
6825 ("three.rs".to_string(), vec![37..40]),
6826 ("four.rs".to_string(), vec![25..28, 36..39])
6827 ])
6828 );
6829
6830 async fn search(
6831 project: &ModelHandle<Project>,
6832 query: SearchQuery,
6833 cx: &mut gpui::TestAppContext,
6834 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6835 let results = project
6836 .update(cx, |project, cx| project.search(query, cx))
6837 .await?;
6838
6839 Ok(results
6840 .into_iter()
6841 .map(|(buffer, ranges)| {
6842 buffer.read_with(cx, |buffer, _| {
6843 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6844 let ranges = ranges
6845 .into_iter()
6846 .map(|range| range.to_offset(buffer))
6847 .collect::<Vec<_>>();
6848 (path, ranges)
6849 })
6850 })
6851 .collect())
6852 }
6853 }
6854}