1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use parking_lot::Mutex;
27use postage::watch;
28use rand::prelude::*;
29use search::SearchQuery;
30use sha2::{Digest, Sha256};
31use similar::{ChangeTag, TextDiff};
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{
42 atomic::{AtomicBool, AtomicUsize},
43 Arc,
44 },
45 time::Instant,
46};
47use util::{post_inc, ResultExt, TryFutureExt as _};
48
49pub use fs::*;
50pub use worktree::*;
51
52pub struct Project {
53 worktrees: Vec<WorktreeHandle>,
54 active_entry: Option<ProjectEntry>,
55 languages: Arc<LanguageRegistry>,
56 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
57 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
58 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
59 language_server_settings: Arc<Mutex<serde_json::Value>>,
60 next_language_server_id: usize,
61 client: Arc<client::Client>,
62 next_entry_id: Arc<AtomicUsize>,
63 user_store: ModelHandle<UserStore>,
64 fs: Arc<dyn Fs>,
65 client_state: ProjectClientState,
66 collaborators: HashMap<PeerId, Collaborator>,
67 subscriptions: Vec<client::Subscription>,
68 language_servers_with_diagnostics_running: isize,
69 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
70 shared_buffers: HashMap<PeerId, HashSet<u64>>,
71 loading_buffers: HashMap<
72 ProjectPath,
73 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
74 >,
75 loading_local_worktrees:
76 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
77 opened_buffers: HashMap<u64, OpenBuffer>,
78 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
79 nonce: u128,
80}
81
82enum OpenBuffer {
83 Strong(ModelHandle<Buffer>),
84 Weak(WeakModelHandle<Buffer>),
85 Loading(Vec<Operation>),
86}
87
88enum WorktreeHandle {
89 Strong(ModelHandle<Worktree>),
90 Weak(WeakModelHandle<Worktree>),
91}
92
93enum ProjectClientState {
94 Local {
95 is_shared: bool,
96 remote_id_tx: watch::Sender<Option<u64>>,
97 remote_id_rx: watch::Receiver<Option<u64>>,
98 _maintain_remote_id_task: Task<Option<()>>,
99 },
100 Remote {
101 sharing_has_stopped: bool,
102 remote_id: u64,
103 replica_id: ReplicaId,
104 _detect_unshare_task: Task<Option<()>>,
105 },
106}
107
108#[derive(Clone, Debug)]
109pub struct Collaborator {
110 pub user: Arc<User>,
111 pub peer_id: PeerId,
112 pub replica_id: ReplicaId,
113}
114
115#[derive(Clone, Debug, PartialEq)]
116pub enum Event {
117 ActiveEntryChanged(Option<ProjectEntry>),
118 WorktreeRemoved(WorktreeId),
119 DiskBasedDiagnosticsStarted,
120 DiskBasedDiagnosticsUpdated,
121 DiskBasedDiagnosticsFinished,
122 DiagnosticsUpdated(ProjectPath),
123}
124
125enum LanguageServerEvent {
126 WorkStart {
127 token: String,
128 },
129 WorkProgress {
130 token: String,
131 progress: LanguageServerProgress,
132 },
133 WorkEnd {
134 token: String,
135 },
136 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
137}
138
139pub struct LanguageServerStatus {
140 pub name: String,
141 pub pending_work: BTreeMap<String, LanguageServerProgress>,
142 pending_diagnostic_updates: isize,
143}
144
145#[derive(Clone, Debug)]
146pub struct LanguageServerProgress {
147 pub message: Option<String>,
148 pub percentage: Option<usize>,
149 pub last_update_at: Instant,
150}
151
152#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
153pub struct ProjectPath {
154 pub worktree_id: WorktreeId,
155 pub path: Arc<Path>,
156}
157
158#[derive(Clone, Debug, Default, PartialEq)]
159pub struct DiagnosticSummary {
160 pub error_count: usize,
161 pub warning_count: usize,
162 pub info_count: usize,
163 pub hint_count: usize,
164}
165
166#[derive(Debug)]
167pub struct Location {
168 pub buffer: ModelHandle<Buffer>,
169 pub range: Range<language::Anchor>,
170}
171
172#[derive(Debug)]
173pub struct DocumentHighlight {
174 pub range: Range<language::Anchor>,
175 pub kind: DocumentHighlightKind,
176}
177
178#[derive(Clone, Debug)]
179pub struct Symbol {
180 pub source_worktree_id: WorktreeId,
181 pub worktree_id: WorktreeId,
182 pub language_name: String,
183 pub path: PathBuf,
184 pub label: CodeLabel,
185 pub name: String,
186 pub kind: lsp::SymbolKind,
187 pub range: Range<PointUtf16>,
188 pub signature: [u8; 32],
189}
190
191#[derive(Default)]
192pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
193
194impl DiagnosticSummary {
195 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
196 let mut this = Self {
197 error_count: 0,
198 warning_count: 0,
199 info_count: 0,
200 hint_count: 0,
201 };
202
203 for entry in diagnostics {
204 if entry.diagnostic.is_primary {
205 match entry.diagnostic.severity {
206 DiagnosticSeverity::ERROR => this.error_count += 1,
207 DiagnosticSeverity::WARNING => this.warning_count += 1,
208 DiagnosticSeverity::INFORMATION => this.info_count += 1,
209 DiagnosticSeverity::HINT => this.hint_count += 1,
210 _ => {}
211 }
212 }
213 }
214
215 this
216 }
217
218 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
219 proto::DiagnosticSummary {
220 path: path.to_string_lossy().to_string(),
221 error_count: self.error_count as u32,
222 warning_count: self.warning_count as u32,
223 info_count: self.info_count as u32,
224 hint_count: self.hint_count as u32,
225 }
226 }
227}
228
229#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
230pub struct ProjectEntry {
231 pub worktree_id: WorktreeId,
232 pub entry_id: usize,
233}
234
235impl Project {
236 pub fn init(client: &Arc<Client>) {
237 client.add_entity_message_handler(Self::handle_add_collaborator);
238 client.add_entity_message_handler(Self::handle_buffer_reloaded);
239 client.add_entity_message_handler(Self::handle_buffer_saved);
240 client.add_entity_message_handler(Self::handle_start_language_server);
241 client.add_entity_message_handler(Self::handle_update_language_server);
242 client.add_entity_message_handler(Self::handle_remove_collaborator);
243 client.add_entity_message_handler(Self::handle_register_worktree);
244 client.add_entity_message_handler(Self::handle_unregister_worktree);
245 client.add_entity_message_handler(Self::handle_unshare_project);
246 client.add_entity_message_handler(Self::handle_update_buffer_file);
247 client.add_entity_message_handler(Self::handle_update_buffer);
248 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
249 client.add_entity_message_handler(Self::handle_update_worktree);
250 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
251 client.add_entity_request_handler(Self::handle_apply_code_action);
252 client.add_entity_request_handler(Self::handle_format_buffers);
253 client.add_entity_request_handler(Self::handle_get_code_actions);
254 client.add_entity_request_handler(Self::handle_get_completions);
255 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
256 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
257 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
258 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
259 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
260 client.add_entity_request_handler(Self::handle_search_project);
261 client.add_entity_request_handler(Self::handle_get_project_symbols);
262 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
263 client.add_entity_request_handler(Self::handle_open_buffer);
264 client.add_entity_request_handler(Self::handle_save_buffer);
265 }
266
267 pub fn local(
268 client: Arc<Client>,
269 user_store: ModelHandle<UserStore>,
270 languages: Arc<LanguageRegistry>,
271 fs: Arc<dyn Fs>,
272 cx: &mut MutableAppContext,
273 ) -> ModelHandle<Self> {
274 cx.add_model(|cx: &mut ModelContext<Self>| {
275 let (remote_id_tx, remote_id_rx) = watch::channel();
276 let _maintain_remote_id_task = cx.spawn_weak({
277 let rpc = client.clone();
278 move |this, mut cx| {
279 async move {
280 let mut status = rpc.status();
281 while let Some(status) = status.next().await {
282 if let Some(this) = this.upgrade(&cx) {
283 let remote_id = if status.is_connected() {
284 let response = rpc.request(proto::RegisterProject {}).await?;
285 Some(response.project_id)
286 } else {
287 None
288 };
289
290 if let Some(project_id) = remote_id {
291 let mut registrations = Vec::new();
292 this.update(&mut cx, |this, cx| {
293 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
294 registrations.push(worktree.update(
295 cx,
296 |worktree, cx| {
297 let worktree = worktree.as_local_mut().unwrap();
298 worktree.register(project_id, cx)
299 },
300 ));
301 }
302 });
303 for registration in registrations {
304 registration.await?;
305 }
306 }
307 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
308 }
309 }
310 Ok(())
311 }
312 .log_err()
313 }
314 });
315
316 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
317 Self {
318 worktrees: Default::default(),
319 collaborators: Default::default(),
320 opened_buffers: Default::default(),
321 shared_buffers: Default::default(),
322 loading_buffers: Default::default(),
323 loading_local_worktrees: Default::default(),
324 buffer_snapshots: Default::default(),
325 client_state: ProjectClientState::Local {
326 is_shared: false,
327 remote_id_tx,
328 remote_id_rx,
329 _maintain_remote_id_task,
330 },
331 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
332 subscriptions: Vec::new(),
333 active_entry: None,
334 languages,
335 client,
336 user_store,
337 fs,
338 next_entry_id: Default::default(),
339 language_servers_with_diagnostics_running: 0,
340 language_servers: Default::default(),
341 started_language_servers: Default::default(),
342 language_server_statuses: Default::default(),
343 language_server_settings: Default::default(),
344 next_language_server_id: 0,
345 nonce: StdRng::from_entropy().gen(),
346 }
347 })
348 }
349
350 pub async fn remote(
351 remote_id: u64,
352 client: Arc<Client>,
353 user_store: ModelHandle<UserStore>,
354 languages: Arc<LanguageRegistry>,
355 fs: Arc<dyn Fs>,
356 cx: &mut AsyncAppContext,
357 ) -> Result<ModelHandle<Self>> {
358 client.authenticate_and_connect(&cx).await?;
359
360 let response = client
361 .request(proto::JoinProject {
362 project_id: remote_id,
363 })
364 .await?;
365
366 let replica_id = response.replica_id as ReplicaId;
367
368 let mut worktrees = Vec::new();
369 for worktree in response.worktrees {
370 let (worktree, load_task) = cx
371 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
372 worktrees.push(worktree);
373 load_task.detach();
374 }
375
376 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
377 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
378 let mut this = Self {
379 worktrees: Vec::new(),
380 loading_buffers: Default::default(),
381 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
382 shared_buffers: Default::default(),
383 loading_local_worktrees: Default::default(),
384 active_entry: None,
385 collaborators: Default::default(),
386 languages,
387 user_store: user_store.clone(),
388 fs,
389 next_entry_id: Default::default(),
390 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
391 client: client.clone(),
392 client_state: ProjectClientState::Remote {
393 sharing_has_stopped: false,
394 remote_id,
395 replica_id,
396 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
397 async move {
398 let mut status = client.status();
399 let is_connected =
400 status.next().await.map_or(false, |s| s.is_connected());
401 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
402 if !is_connected || status.next().await.is_some() {
403 if let Some(this) = this.upgrade(&cx) {
404 this.update(&mut cx, |this, cx| this.project_unshared(cx))
405 }
406 }
407 Ok(())
408 }
409 .log_err()
410 }),
411 },
412 language_servers_with_diagnostics_running: 0,
413 language_servers: Default::default(),
414 started_language_servers: Default::default(),
415 language_server_settings: Default::default(),
416 language_server_statuses: response
417 .language_servers
418 .into_iter()
419 .map(|server| {
420 (
421 server.id as usize,
422 LanguageServerStatus {
423 name: server.name,
424 pending_work: Default::default(),
425 pending_diagnostic_updates: 0,
426 },
427 )
428 })
429 .collect(),
430 next_language_server_id: 0,
431 opened_buffers: Default::default(),
432 buffer_snapshots: Default::default(),
433 nonce: StdRng::from_entropy().gen(),
434 };
435 for worktree in worktrees {
436 this.add_worktree(&worktree, cx);
437 }
438 this
439 });
440
441 let user_ids = response
442 .collaborators
443 .iter()
444 .map(|peer| peer.user_id)
445 .collect();
446 user_store
447 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
448 .await?;
449 let mut collaborators = HashMap::default();
450 for message in response.collaborators {
451 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
452 collaborators.insert(collaborator.peer_id, collaborator);
453 }
454
455 this.update(cx, |this, _| {
456 this.collaborators = collaborators;
457 });
458
459 Ok(this)
460 }
461
462 #[cfg(any(test, feature = "test-support"))]
463 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
464 let languages = Arc::new(LanguageRegistry::test());
465 let http_client = client::test::FakeHttpClient::with_404_response();
466 let client = client::Client::new(http_client.clone());
467 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
468 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
469 }
470
471 #[cfg(any(test, feature = "test-support"))]
472 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
473 self.opened_buffers
474 .get(&remote_id)
475 .and_then(|buffer| buffer.upgrade(cx))
476 }
477
478 #[cfg(any(test, feature = "test-support"))]
479 pub fn languages(&self) -> &Arc<LanguageRegistry> {
480 &self.languages
481 }
482
483 #[cfg(any(test, feature = "test-support"))]
484 pub fn check_invariants(&self, cx: &AppContext) {
485 if self.is_local() {
486 let mut worktree_root_paths = HashMap::default();
487 for worktree in self.worktrees(cx) {
488 let worktree = worktree.read(cx);
489 let abs_path = worktree.as_local().unwrap().abs_path().clone();
490 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
491 assert_eq!(
492 prev_worktree_id,
493 None,
494 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
495 abs_path,
496 worktree.id(),
497 prev_worktree_id
498 )
499 }
500 } else {
501 let replica_id = self.replica_id();
502 for buffer in self.opened_buffers.values() {
503 if let Some(buffer) = buffer.upgrade(cx) {
504 let buffer = buffer.read(cx);
505 assert_eq!(
506 buffer.deferred_ops_len(),
507 0,
508 "replica {}, buffer {} has deferred operations",
509 replica_id,
510 buffer.remote_id()
511 );
512 }
513 }
514 }
515 }
516
517 #[cfg(any(test, feature = "test-support"))]
518 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
519 let path = path.into();
520 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
521 self.opened_buffers.iter().any(|(_, buffer)| {
522 if let Some(buffer) = buffer.upgrade(cx) {
523 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
524 if file.worktree == worktree && file.path() == &path.path {
525 return true;
526 }
527 }
528 }
529 false
530 })
531 } else {
532 false
533 }
534 }
535
536 pub fn fs(&self) -> &Arc<dyn Fs> {
537 &self.fs
538 }
539
540 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
541 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
542 *remote_id_tx.borrow_mut() = remote_id;
543 }
544
545 self.subscriptions.clear();
546 if let Some(remote_id) = remote_id {
547 self.subscriptions
548 .push(self.client.add_model_for_remote_entity(remote_id, cx));
549 }
550 }
551
552 pub fn remote_id(&self) -> Option<u64> {
553 match &self.client_state {
554 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
555 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
556 }
557 }
558
559 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
560 let mut id = None;
561 let mut watch = None;
562 match &self.client_state {
563 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
564 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
565 }
566
567 async move {
568 if let Some(id) = id {
569 return id;
570 }
571 let mut watch = watch.unwrap();
572 loop {
573 let id = *watch.borrow();
574 if let Some(id) = id {
575 return id;
576 }
577 watch.next().await;
578 }
579 }
580 }
581
582 pub fn replica_id(&self) -> ReplicaId {
583 match &self.client_state {
584 ProjectClientState::Local { .. } => 0,
585 ProjectClientState::Remote { replica_id, .. } => *replica_id,
586 }
587 }
588
589 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
590 &self.collaborators
591 }
592
593 pub fn worktrees<'a>(
594 &'a self,
595 cx: &'a AppContext,
596 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
597 self.worktrees
598 .iter()
599 .filter_map(move |worktree| worktree.upgrade(cx))
600 }
601
602 pub fn visible_worktrees<'a>(
603 &'a self,
604 cx: &'a AppContext,
605 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
606 self.worktrees.iter().filter_map(|worktree| {
607 worktree.upgrade(cx).and_then(|worktree| {
608 if worktree.read(cx).is_visible() {
609 Some(worktree)
610 } else {
611 None
612 }
613 })
614 })
615 }
616
617 pub fn worktree_for_id(
618 &self,
619 id: WorktreeId,
620 cx: &AppContext,
621 ) -> Option<ModelHandle<Worktree>> {
622 self.worktrees(cx)
623 .find(|worktree| worktree.read(cx).id() == id)
624 }
625
626 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
627 let rpc = self.client.clone();
628 cx.spawn(|this, mut cx| async move {
629 let project_id = this.update(&mut cx, |this, cx| {
630 if let ProjectClientState::Local {
631 is_shared,
632 remote_id_rx,
633 ..
634 } = &mut this.client_state
635 {
636 *is_shared = true;
637
638 for open_buffer in this.opened_buffers.values_mut() {
639 match open_buffer {
640 OpenBuffer::Strong(_) => {}
641 OpenBuffer::Weak(buffer) => {
642 if let Some(buffer) = buffer.upgrade(cx) {
643 *open_buffer = OpenBuffer::Strong(buffer);
644 }
645 }
646 OpenBuffer::Loading(_) => unreachable!(),
647 }
648 }
649
650 for worktree_handle in this.worktrees.iter_mut() {
651 match worktree_handle {
652 WorktreeHandle::Strong(_) => {}
653 WorktreeHandle::Weak(worktree) => {
654 if let Some(worktree) = worktree.upgrade(cx) {
655 *worktree_handle = WorktreeHandle::Strong(worktree);
656 }
657 }
658 }
659 }
660
661 remote_id_rx
662 .borrow()
663 .ok_or_else(|| anyhow!("no project id"))
664 } else {
665 Err(anyhow!("can't share a remote project"))
666 }
667 })?;
668
669 rpc.request(proto::ShareProject { project_id }).await?;
670
671 let mut tasks = Vec::new();
672 this.update(&mut cx, |this, cx| {
673 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
674 worktree.update(cx, |worktree, cx| {
675 let worktree = worktree.as_local_mut().unwrap();
676 tasks.push(worktree.share(project_id, cx));
677 });
678 }
679 });
680 for task in tasks {
681 task.await?;
682 }
683 this.update(&mut cx, |_, cx| cx.notify());
684 Ok(())
685 })
686 }
687
688 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
689 let rpc = self.client.clone();
690 cx.spawn(|this, mut cx| async move {
691 let project_id = this.update(&mut cx, |this, cx| {
692 if let ProjectClientState::Local {
693 is_shared,
694 remote_id_rx,
695 ..
696 } = &mut this.client_state
697 {
698 *is_shared = false;
699
700 for open_buffer in this.opened_buffers.values_mut() {
701 match open_buffer {
702 OpenBuffer::Strong(buffer) => {
703 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
704 }
705 _ => {}
706 }
707 }
708
709 for worktree_handle in this.worktrees.iter_mut() {
710 match worktree_handle {
711 WorktreeHandle::Strong(worktree) => {
712 if !worktree.read(cx).is_visible() {
713 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
714 }
715 }
716 _ => {}
717 }
718 }
719
720 remote_id_rx
721 .borrow()
722 .ok_or_else(|| anyhow!("no project id"))
723 } else {
724 Err(anyhow!("can't share a remote project"))
725 }
726 })?;
727
728 rpc.send(proto::UnshareProject { project_id })?;
729 this.update(&mut cx, |this, cx| {
730 this.collaborators.clear();
731 this.shared_buffers.clear();
732 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
733 worktree.update(cx, |worktree, _| {
734 worktree.as_local_mut().unwrap().unshare();
735 });
736 }
737 cx.notify()
738 });
739 Ok(())
740 })
741 }
742
743 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
744 if let ProjectClientState::Remote {
745 sharing_has_stopped,
746 ..
747 } = &mut self.client_state
748 {
749 *sharing_has_stopped = true;
750 self.collaborators.clear();
751 cx.notify();
752 }
753 }
754
755 pub fn is_read_only(&self) -> bool {
756 match &self.client_state {
757 ProjectClientState::Local { .. } => false,
758 ProjectClientState::Remote {
759 sharing_has_stopped,
760 ..
761 } => *sharing_has_stopped,
762 }
763 }
764
765 pub fn is_local(&self) -> bool {
766 match &self.client_state {
767 ProjectClientState::Local { .. } => true,
768 ProjectClientState::Remote { .. } => false,
769 }
770 }
771
772 pub fn is_remote(&self) -> bool {
773 !self.is_local()
774 }
775
776 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
777 if self.is_remote() {
778 return Err(anyhow!("creating buffers as a guest is not supported yet"));
779 }
780
781 let buffer = cx.add_model(|cx| {
782 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
783 });
784 self.register_buffer(&buffer, cx)?;
785 Ok(buffer)
786 }
787
788 pub fn open_buffer(
789 &mut self,
790 path: impl Into<ProjectPath>,
791 cx: &mut ModelContext<Self>,
792 ) -> Task<Result<ModelHandle<Buffer>>> {
793 let project_path = path.into();
794 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
795 worktree
796 } else {
797 return Task::ready(Err(anyhow!("no such worktree")));
798 };
799
800 // If there is already a buffer for the given path, then return it.
801 let existing_buffer = self.get_open_buffer(&project_path, cx);
802 if let Some(existing_buffer) = existing_buffer {
803 return Task::ready(Ok(existing_buffer));
804 }
805
806 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
807 // If the given path is already being loaded, then wait for that existing
808 // task to complete and return the same buffer.
809 hash_map::Entry::Occupied(e) => e.get().clone(),
810
811 // Otherwise, record the fact that this path is now being loaded.
812 hash_map::Entry::Vacant(entry) => {
813 let (mut tx, rx) = postage::watch::channel();
814 entry.insert(rx.clone());
815
816 let load_buffer = if worktree.read(cx).is_local() {
817 self.open_local_buffer(&project_path.path, &worktree, cx)
818 } else {
819 self.open_remote_buffer(&project_path.path, &worktree, cx)
820 };
821
822 cx.spawn(move |this, mut cx| async move {
823 let load_result = load_buffer.await;
824 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
825 // Record the fact that the buffer is no longer loading.
826 this.loading_buffers.remove(&project_path);
827 let buffer = load_result.map_err(Arc::new)?;
828 Ok(buffer)
829 }));
830 })
831 .detach();
832 rx
833 }
834 };
835
836 cx.foreground().spawn(async move {
837 loop {
838 if let Some(result) = loading_watch.borrow().as_ref() {
839 match result {
840 Ok(buffer) => return Ok(buffer.clone()),
841 Err(error) => return Err(anyhow!("{}", error)),
842 }
843 }
844 loading_watch.next().await;
845 }
846 })
847 }
848
849 fn open_local_buffer(
850 &mut self,
851 path: &Arc<Path>,
852 worktree: &ModelHandle<Worktree>,
853 cx: &mut ModelContext<Self>,
854 ) -> Task<Result<ModelHandle<Buffer>>> {
855 let load_buffer = worktree.update(cx, |worktree, cx| {
856 let worktree = worktree.as_local_mut().unwrap();
857 worktree.load_buffer(path, cx)
858 });
859 cx.spawn(|this, mut cx| async move {
860 let buffer = load_buffer.await?;
861 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
862 Ok(buffer)
863 })
864 }
865
866 fn open_remote_buffer(
867 &mut self,
868 path: &Arc<Path>,
869 worktree: &ModelHandle<Worktree>,
870 cx: &mut ModelContext<Self>,
871 ) -> Task<Result<ModelHandle<Buffer>>> {
872 let rpc = self.client.clone();
873 let project_id = self.remote_id().unwrap();
874 let remote_worktree_id = worktree.read(cx).id();
875 let path = path.clone();
876 let path_string = path.to_string_lossy().to_string();
877 cx.spawn(|this, mut cx| async move {
878 let response = rpc
879 .request(proto::OpenBuffer {
880 project_id,
881 worktree_id: remote_worktree_id.to_proto(),
882 path: path_string,
883 })
884 .await?;
885 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
886 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
887 .await
888 })
889 }
890
891 fn open_local_buffer_via_lsp(
892 &mut self,
893 abs_path: lsp::Url,
894 lang_name: Arc<str>,
895 lang_server: Arc<LanguageServer>,
896 cx: &mut ModelContext<Self>,
897 ) -> Task<Result<ModelHandle<Buffer>>> {
898 cx.spawn(|this, mut cx| async move {
899 let abs_path = abs_path
900 .to_file_path()
901 .map_err(|_| anyhow!("can't convert URI to path"))?;
902 let (worktree, relative_path) = if let Some(result) =
903 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
904 {
905 result
906 } else {
907 let worktree = this
908 .update(&mut cx, |this, cx| {
909 this.create_local_worktree(&abs_path, false, cx)
910 })
911 .await?;
912 this.update(&mut cx, |this, cx| {
913 this.language_servers
914 .insert((worktree.read(cx).id(), lang_name), lang_server);
915 });
916 (worktree, PathBuf::new())
917 };
918
919 let project_path = ProjectPath {
920 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
921 path: relative_path.into(),
922 };
923 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
924 .await
925 })
926 }
927
928 pub fn save_buffer_as(
929 &mut self,
930 buffer: ModelHandle<Buffer>,
931 abs_path: PathBuf,
932 cx: &mut ModelContext<Project>,
933 ) -> Task<Result<()>> {
934 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
935 cx.spawn(|this, mut cx| async move {
936 let (worktree, path) = worktree_task.await?;
937 worktree
938 .update(&mut cx, |worktree, cx| {
939 worktree
940 .as_local_mut()
941 .unwrap()
942 .save_buffer_as(buffer.clone(), path, cx)
943 })
944 .await?;
945 this.update(&mut cx, |this, cx| {
946 this.assign_language_to_buffer(&buffer, cx);
947 this.register_buffer_with_language_server(&buffer, cx);
948 });
949 Ok(())
950 })
951 }
952
953 pub fn get_open_buffer(
954 &mut self,
955 path: &ProjectPath,
956 cx: &mut ModelContext<Self>,
957 ) -> Option<ModelHandle<Buffer>> {
958 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
959 self.opened_buffers.values().find_map(|buffer| {
960 let buffer = buffer.upgrade(cx)?;
961 let file = File::from_dyn(buffer.read(cx).file())?;
962 if file.worktree == worktree && file.path() == &path.path {
963 Some(buffer)
964 } else {
965 None
966 }
967 })
968 }
969
970 fn register_buffer(
971 &mut self,
972 buffer: &ModelHandle<Buffer>,
973 cx: &mut ModelContext<Self>,
974 ) -> Result<()> {
975 let remote_id = buffer.read(cx).remote_id();
976 let open_buffer = if self.is_remote() || self.is_shared() {
977 OpenBuffer::Strong(buffer.clone())
978 } else {
979 OpenBuffer::Weak(buffer.downgrade())
980 };
981
982 match self.opened_buffers.insert(remote_id, open_buffer) {
983 None => {}
984 Some(OpenBuffer::Loading(operations)) => {
985 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
986 }
987 Some(OpenBuffer::Weak(existing_handle)) => {
988 if existing_handle.upgrade(cx).is_some() {
989 Err(anyhow!(
990 "already registered buffer with remote id {}",
991 remote_id
992 ))?
993 }
994 }
995 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
996 "already registered buffer with remote id {}",
997 remote_id
998 ))?,
999 }
1000 cx.subscribe(buffer, |this, buffer, event, cx| {
1001 this.on_buffer_event(buffer, event, cx);
1002 })
1003 .detach();
1004
1005 self.assign_language_to_buffer(buffer, cx);
1006 self.register_buffer_with_language_server(buffer, cx);
1007
1008 Ok(())
1009 }
1010
1011 fn register_buffer_with_language_server(
1012 &mut self,
1013 buffer_handle: &ModelHandle<Buffer>,
1014 cx: &mut ModelContext<Self>,
1015 ) {
1016 let buffer = buffer_handle.read(cx);
1017 let buffer_id = buffer.remote_id();
1018 if let Some(file) = File::from_dyn(buffer.file()) {
1019 if file.is_local() {
1020 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1021 let initial_snapshot = buffer.text_snapshot();
1022 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1023
1024 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1025 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1026 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1027 .log_err();
1028 }
1029 }
1030
1031 if let Some(server) = language_server {
1032 server
1033 .notify::<lsp::notification::DidOpenTextDocument>(
1034 lsp::DidOpenTextDocumentParams {
1035 text_document: lsp::TextDocumentItem::new(
1036 uri,
1037 Default::default(),
1038 0,
1039 initial_snapshot.text(),
1040 ),
1041 }
1042 .clone(),
1043 )
1044 .log_err();
1045 buffer_handle.update(cx, |buffer, cx| {
1046 buffer.set_completion_triggers(
1047 server
1048 .capabilities()
1049 .completion_provider
1050 .as_ref()
1051 .and_then(|provider| provider.trigger_characters.clone())
1052 .unwrap_or(Vec::new()),
1053 cx,
1054 )
1055 });
1056 self.buffer_snapshots
1057 .insert(buffer_id, vec![(0, initial_snapshot)]);
1058 }
1059
1060 cx.observe_release(buffer_handle, |this, buffer, cx| {
1061 if let Some(file) = File::from_dyn(buffer.file()) {
1062 if file.is_local() {
1063 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1064 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1065 server
1066 .notify::<lsp::notification::DidCloseTextDocument>(
1067 lsp::DidCloseTextDocumentParams {
1068 text_document: lsp::TextDocumentIdentifier::new(
1069 uri.clone(),
1070 ),
1071 },
1072 )
1073 .log_err();
1074 }
1075 }
1076 }
1077 })
1078 .detach();
1079 }
1080 }
1081 }
1082
1083 fn on_buffer_event(
1084 &mut self,
1085 buffer: ModelHandle<Buffer>,
1086 event: &BufferEvent,
1087 cx: &mut ModelContext<Self>,
1088 ) -> Option<()> {
1089 match event {
1090 BufferEvent::Operation(operation) => {
1091 let project_id = self.remote_id()?;
1092 let request = self.client.request(proto::UpdateBuffer {
1093 project_id,
1094 buffer_id: buffer.read(cx).remote_id(),
1095 operations: vec![language::proto::serialize_operation(&operation)],
1096 });
1097 cx.background().spawn(request).detach_and_log_err(cx);
1098 }
1099 BufferEvent::Edited => {
1100 let language_server = self
1101 .language_server_for_buffer(buffer.read(cx), cx)?
1102 .clone();
1103 let buffer = buffer.read(cx);
1104 let file = File::from_dyn(buffer.file())?;
1105 let abs_path = file.as_local()?.abs_path(cx);
1106 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1107 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1108 let (version, prev_snapshot) = buffer_snapshots.last()?;
1109 let next_snapshot = buffer.text_snapshot();
1110 let next_version = version + 1;
1111
1112 let content_changes = buffer
1113 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1114 .map(|edit| {
1115 let edit_start = edit.new.start.0;
1116 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1117 let new_text = next_snapshot
1118 .text_for_range(edit.new.start.1..edit.new.end.1)
1119 .collect();
1120 lsp::TextDocumentContentChangeEvent {
1121 range: Some(lsp::Range::new(
1122 edit_start.to_lsp_position(),
1123 edit_end.to_lsp_position(),
1124 )),
1125 range_length: None,
1126 text: new_text,
1127 }
1128 })
1129 .collect();
1130
1131 buffer_snapshots.push((next_version, next_snapshot));
1132
1133 language_server
1134 .notify::<lsp::notification::DidChangeTextDocument>(
1135 lsp::DidChangeTextDocumentParams {
1136 text_document: lsp::VersionedTextDocumentIdentifier::new(
1137 uri,
1138 next_version,
1139 ),
1140 content_changes,
1141 },
1142 )
1143 .log_err();
1144 }
1145 BufferEvent::Saved => {
1146 let file = File::from_dyn(buffer.read(cx).file())?;
1147 let worktree_id = file.worktree_id(cx);
1148 let abs_path = file.as_local()?.abs_path(cx);
1149 let text_document = lsp::TextDocumentIdentifier {
1150 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1151 };
1152
1153 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1154 server
1155 .notify::<lsp::notification::DidSaveTextDocument>(
1156 lsp::DidSaveTextDocumentParams {
1157 text_document: text_document.clone(),
1158 text: None,
1159 },
1160 )
1161 .log_err();
1162 }
1163 }
1164 _ => {}
1165 }
1166
1167 None
1168 }
1169
1170 fn language_servers_for_worktree(
1171 &self,
1172 worktree_id: WorktreeId,
1173 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1174 self.language_servers.iter().filter_map(
1175 move |((language_server_worktree_id, language_name), server)| {
1176 if *language_server_worktree_id == worktree_id {
1177 Some((language_name.as_ref(), server))
1178 } else {
1179 None
1180 }
1181 },
1182 )
1183 }
1184
1185 fn assign_language_to_buffer(
1186 &mut self,
1187 buffer: &ModelHandle<Buffer>,
1188 cx: &mut ModelContext<Self>,
1189 ) -> Option<()> {
1190 // If the buffer has a language, set it and start the language server if we haven't already.
1191 let full_path = buffer.read(cx).file()?.full_path(cx);
1192 let language = self.languages.select_language(&full_path)?;
1193 buffer.update(cx, |buffer, cx| {
1194 buffer.set_language(Some(language.clone()), cx);
1195 });
1196
1197 let file = File::from_dyn(buffer.read(cx).file())?;
1198 let worktree = file.worktree.read(cx).as_local()?;
1199 let worktree_id = worktree.id();
1200 let worktree_abs_path = worktree.abs_path().clone();
1201 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1202
1203 None
1204 }
1205
1206 fn start_language_server(
1207 &mut self,
1208 worktree_id: WorktreeId,
1209 worktree_path: Arc<Path>,
1210 language: Arc<Language>,
1211 cx: &mut ModelContext<Self>,
1212 ) {
1213 let key = (worktree_id, language.name());
1214 self.started_language_servers
1215 .entry(key.clone())
1216 .or_insert_with(|| {
1217 let server_id = post_inc(&mut self.next_language_server_id);
1218 let language_server = self.languages.start_language_server(
1219 language.clone(),
1220 worktree_path,
1221 self.client.http_client(),
1222 cx,
1223 );
1224 cx.spawn_weak(|this, mut cx| async move {
1225 let mut language_server = language_server?.await.log_err()?;
1226 let this = this.upgrade(&cx)?;
1227 let (language_server_events_tx, language_server_events_rx) =
1228 smol::channel::unbounded();
1229
1230 language_server
1231 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1232 let language_server_events_tx = language_server_events_tx.clone();
1233 move |params| {
1234 language_server_events_tx
1235 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1236 .ok();
1237 }
1238 })
1239 .detach();
1240
1241 language_server
1242 .on_request::<lsp::request::WorkspaceConfiguration, _>({
1243 let settings = this
1244 .read_with(&cx, |this, _| this.language_server_settings.clone());
1245 move |params| {
1246 let settings = settings.lock();
1247 Ok(params
1248 .items
1249 .into_iter()
1250 .map(|item| {
1251 if let Some(section) = &item.section {
1252 settings
1253 .get(section)
1254 .cloned()
1255 .unwrap_or(serde_json::Value::Null)
1256 } else {
1257 settings.clone()
1258 }
1259 })
1260 .collect())
1261 }
1262 })
1263 .detach();
1264
1265 language_server
1266 .on_notification::<lsp::notification::Progress, _>(move |params| {
1267 let token = match params.token {
1268 lsp::NumberOrString::String(token) => token,
1269 lsp::NumberOrString::Number(token) => {
1270 log::info!("skipping numeric progress token {}", token);
1271 return;
1272 }
1273 };
1274
1275 match params.value {
1276 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1277 lsp::WorkDoneProgress::Begin(_) => {
1278 language_server_events_tx
1279 .try_send(LanguageServerEvent::WorkStart { token })
1280 .ok();
1281 }
1282 lsp::WorkDoneProgress::Report(report) => {
1283 language_server_events_tx
1284 .try_send(LanguageServerEvent::WorkProgress {
1285 token,
1286 progress: LanguageServerProgress {
1287 message: report.message,
1288 percentage: report
1289 .percentage
1290 .map(|p| p as usize),
1291 last_update_at: Instant::now(),
1292 },
1293 })
1294 .ok();
1295 }
1296 lsp::WorkDoneProgress::End(_) => {
1297 language_server_events_tx
1298 .try_send(LanguageServerEvent::WorkEnd { token })
1299 .ok();
1300 }
1301 },
1302 }
1303 })
1304 .detach();
1305
1306 // Process all the LSP events.
1307 cx.spawn(|mut cx| {
1308 let this = this.downgrade();
1309 async move {
1310 while let Ok(event) = language_server_events_rx.recv().await {
1311 let this = this.upgrade(&cx)?;
1312 this.update(&mut cx, |this, cx| {
1313 this.on_lsp_event(server_id, event, &language, cx)
1314 });
1315
1316 // Don't starve the main thread when lots of events arrive all at once.
1317 smol::future::yield_now().await;
1318 }
1319 Some(())
1320 }
1321 })
1322 .detach();
1323
1324 let language_server = language_server.initialize().await.log_err()?;
1325 this.update(&mut cx, |this, cx| {
1326 this.language_servers
1327 .insert(key.clone(), language_server.clone());
1328 this.language_server_statuses.insert(
1329 server_id,
1330 LanguageServerStatus {
1331 name: language_server.name().to_string(),
1332 pending_work: Default::default(),
1333 pending_diagnostic_updates: 0,
1334 },
1335 );
1336 language_server
1337 .notify::<lsp::notification::DidChangeConfiguration>(
1338 lsp::DidChangeConfigurationParams {
1339 settings: this.language_server_settings.lock().clone(),
1340 },
1341 )
1342 .ok();
1343
1344 if let Some(project_id) = this.remote_id() {
1345 this.client
1346 .send(proto::StartLanguageServer {
1347 project_id,
1348 server: Some(proto::LanguageServer {
1349 id: server_id as u64,
1350 name: language_server.name().to_string(),
1351 }),
1352 })
1353 .log_err();
1354 }
1355
1356 // Tell the language server about every open buffer in the worktree that matches the language.
1357 for buffer in this.opened_buffers.values() {
1358 if let Some(buffer_handle) = buffer.upgrade(cx) {
1359 let buffer = buffer_handle.read(cx);
1360 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1361 file
1362 } else {
1363 continue;
1364 };
1365 let language = if let Some(language) = buffer.language() {
1366 language
1367 } else {
1368 continue;
1369 };
1370 if (file.worktree.read(cx).id(), language.name()) != key {
1371 continue;
1372 }
1373
1374 let file = file.as_local()?;
1375 let versions = this
1376 .buffer_snapshots
1377 .entry(buffer.remote_id())
1378 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1379 let (version, initial_snapshot) = versions.last().unwrap();
1380 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1381 language_server
1382 .notify::<lsp::notification::DidOpenTextDocument>(
1383 lsp::DidOpenTextDocumentParams {
1384 text_document: lsp::TextDocumentItem::new(
1385 uri,
1386 Default::default(),
1387 *version,
1388 initial_snapshot.text(),
1389 ),
1390 },
1391 )
1392 .log_err()?;
1393 buffer_handle.update(cx, |buffer, cx| {
1394 buffer.set_completion_triggers(
1395 language_server
1396 .capabilities()
1397 .completion_provider
1398 .as_ref()
1399 .and_then(|provider| {
1400 provider.trigger_characters.clone()
1401 })
1402 .unwrap_or(Vec::new()),
1403 cx,
1404 )
1405 });
1406 }
1407 }
1408
1409 cx.notify();
1410 Some(())
1411 });
1412
1413 Some(language_server)
1414 })
1415 });
1416 }
1417
1418 fn on_lsp_event(
1419 &mut self,
1420 language_server_id: usize,
1421 event: LanguageServerEvent,
1422 language: &Arc<Language>,
1423 cx: &mut ModelContext<Self>,
1424 ) {
1425 let disk_diagnostics_token = language.disk_based_diagnostics_progress_token();
1426 let language_server_status =
1427 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1428 status
1429 } else {
1430 return;
1431 };
1432
1433 match event {
1434 LanguageServerEvent::WorkStart { token } => {
1435 if Some(&token) == disk_diagnostics_token {
1436 language_server_status.pending_diagnostic_updates += 1;
1437 if language_server_status.pending_diagnostic_updates == 1 {
1438 self.disk_based_diagnostics_started(cx);
1439 self.broadcast_language_server_update(
1440 language_server_id,
1441 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1442 proto::LspDiskBasedDiagnosticsUpdating {},
1443 ),
1444 );
1445 }
1446 } else {
1447 self.on_lsp_work_start(language_server_id, token.clone(), cx);
1448 self.broadcast_language_server_update(
1449 language_server_id,
1450 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1451 token,
1452 }),
1453 );
1454 }
1455 }
1456 LanguageServerEvent::WorkProgress { token, progress } => {
1457 if Some(&token) != disk_diagnostics_token {
1458 self.on_lsp_work_progress(
1459 language_server_id,
1460 token.clone(),
1461 progress.clone(),
1462 cx,
1463 );
1464 self.broadcast_language_server_update(
1465 language_server_id,
1466 proto::update_language_server::Variant::WorkProgress(
1467 proto::LspWorkProgress {
1468 token,
1469 message: progress.message,
1470 percentage: progress.percentage.map(|p| p as u32),
1471 },
1472 ),
1473 );
1474 }
1475 }
1476 LanguageServerEvent::WorkEnd { token } => {
1477 if Some(&token) == disk_diagnostics_token {
1478 language_server_status.pending_diagnostic_updates -= 1;
1479 if language_server_status.pending_diagnostic_updates == 0 {
1480 self.disk_based_diagnostics_finished(cx);
1481 self.broadcast_language_server_update(
1482 language_server_id,
1483 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1484 proto::LspDiskBasedDiagnosticsUpdated {},
1485 ),
1486 );
1487 }
1488 } else {
1489 self.on_lsp_work_end(language_server_id, token.clone(), cx);
1490 self.broadcast_language_server_update(
1491 language_server_id,
1492 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1493 token,
1494 }),
1495 );
1496 }
1497 }
1498 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1499 language.process_diagnostics(&mut params);
1500
1501 if disk_diagnostics_token.is_none() {
1502 self.disk_based_diagnostics_started(cx);
1503 self.broadcast_language_server_update(
1504 language_server_id,
1505 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1506 proto::LspDiskBasedDiagnosticsUpdating {},
1507 ),
1508 );
1509 }
1510 self.update_diagnostics(
1511 params,
1512 language
1513 .disk_based_diagnostic_sources()
1514 .unwrap_or(&Default::default()),
1515 cx,
1516 )
1517 .log_err();
1518 if disk_diagnostics_token.is_none() {
1519 self.disk_based_diagnostics_finished(cx);
1520 self.broadcast_language_server_update(
1521 language_server_id,
1522 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1523 proto::LspDiskBasedDiagnosticsUpdated {},
1524 ),
1525 );
1526 }
1527 }
1528 }
1529 }
1530
1531 fn on_lsp_work_start(
1532 &mut self,
1533 language_server_id: usize,
1534 token: String,
1535 cx: &mut ModelContext<Self>,
1536 ) {
1537 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1538 status.pending_work.insert(
1539 token,
1540 LanguageServerProgress {
1541 message: None,
1542 percentage: None,
1543 last_update_at: Instant::now(),
1544 },
1545 );
1546 cx.notify();
1547 }
1548 }
1549
1550 fn on_lsp_work_progress(
1551 &mut self,
1552 language_server_id: usize,
1553 token: String,
1554 progress: LanguageServerProgress,
1555 cx: &mut ModelContext<Self>,
1556 ) {
1557 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1558 status.pending_work.insert(token, progress);
1559 cx.notify();
1560 }
1561 }
1562
1563 fn on_lsp_work_end(
1564 &mut self,
1565 language_server_id: usize,
1566 token: String,
1567 cx: &mut ModelContext<Self>,
1568 ) {
1569 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1570 status.pending_work.remove(&token);
1571 cx.notify();
1572 }
1573 }
1574
1575 fn broadcast_language_server_update(
1576 &self,
1577 language_server_id: usize,
1578 event: proto::update_language_server::Variant,
1579 ) {
1580 if let Some(project_id) = self.remote_id() {
1581 self.client
1582 .send(proto::UpdateLanguageServer {
1583 project_id,
1584 language_server_id: language_server_id as u64,
1585 variant: Some(event),
1586 })
1587 .log_err();
1588 }
1589 }
1590
1591 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1592 for server in self.language_servers.values() {
1593 server
1594 .notify::<lsp::notification::DidChangeConfiguration>(
1595 lsp::DidChangeConfigurationParams {
1596 settings: settings.clone(),
1597 },
1598 )
1599 .ok();
1600 }
1601 *self.language_server_settings.lock() = settings;
1602 }
1603
1604 pub fn language_server_statuses(
1605 &self,
1606 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1607 self.language_server_statuses.values()
1608 }
1609
1610 pub fn update_diagnostics(
1611 &mut self,
1612 params: lsp::PublishDiagnosticsParams,
1613 disk_based_sources: &HashSet<String>,
1614 cx: &mut ModelContext<Self>,
1615 ) -> Result<()> {
1616 let abs_path = params
1617 .uri
1618 .to_file_path()
1619 .map_err(|_| anyhow!("URI is not a file"))?;
1620 let mut next_group_id = 0;
1621 let mut diagnostics = Vec::default();
1622 let mut primary_diagnostic_group_ids = HashMap::default();
1623 let mut sources_by_group_id = HashMap::default();
1624 let mut supporting_diagnostics = HashMap::default();
1625 for diagnostic in ¶ms.diagnostics {
1626 let source = diagnostic.source.as_ref();
1627 let code = diagnostic.code.as_ref().map(|code| match code {
1628 lsp::NumberOrString::Number(code) => code.to_string(),
1629 lsp::NumberOrString::String(code) => code.clone(),
1630 });
1631 let range = range_from_lsp(diagnostic.range);
1632 let is_supporting = diagnostic
1633 .related_information
1634 .as_ref()
1635 .map_or(false, |infos| {
1636 infos.iter().any(|info| {
1637 primary_diagnostic_group_ids.contains_key(&(
1638 source,
1639 code.clone(),
1640 range_from_lsp(info.location.range),
1641 ))
1642 })
1643 });
1644
1645 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1646 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1647 });
1648
1649 if is_supporting {
1650 supporting_diagnostics.insert(
1651 (source, code.clone(), range),
1652 (diagnostic.severity, is_unnecessary),
1653 );
1654 } else {
1655 let group_id = post_inc(&mut next_group_id);
1656 let is_disk_based =
1657 source.map_or(false, |source| disk_based_sources.contains(source));
1658
1659 sources_by_group_id.insert(group_id, source);
1660 primary_diagnostic_group_ids
1661 .insert((source, code.clone(), range.clone()), group_id);
1662
1663 diagnostics.push(DiagnosticEntry {
1664 range,
1665 diagnostic: Diagnostic {
1666 code: code.clone(),
1667 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1668 message: diagnostic.message.clone(),
1669 group_id,
1670 is_primary: true,
1671 is_valid: true,
1672 is_disk_based,
1673 is_unnecessary,
1674 },
1675 });
1676 if let Some(infos) = &diagnostic.related_information {
1677 for info in infos {
1678 if info.location.uri == params.uri && !info.message.is_empty() {
1679 let range = range_from_lsp(info.location.range);
1680 diagnostics.push(DiagnosticEntry {
1681 range,
1682 diagnostic: Diagnostic {
1683 code: code.clone(),
1684 severity: DiagnosticSeverity::INFORMATION,
1685 message: info.message.clone(),
1686 group_id,
1687 is_primary: false,
1688 is_valid: true,
1689 is_disk_based,
1690 is_unnecessary: false,
1691 },
1692 });
1693 }
1694 }
1695 }
1696 }
1697 }
1698
1699 for entry in &mut diagnostics {
1700 let diagnostic = &mut entry.diagnostic;
1701 if !diagnostic.is_primary {
1702 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1703 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1704 source,
1705 diagnostic.code.clone(),
1706 entry.range.clone(),
1707 )) {
1708 if let Some(severity) = severity {
1709 diagnostic.severity = severity;
1710 }
1711 diagnostic.is_unnecessary = is_unnecessary;
1712 }
1713 }
1714 }
1715
1716 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1717 Ok(())
1718 }
1719
1720 pub fn update_diagnostic_entries(
1721 &mut self,
1722 abs_path: PathBuf,
1723 version: Option<i32>,
1724 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1725 cx: &mut ModelContext<Project>,
1726 ) -> Result<(), anyhow::Error> {
1727 let (worktree, relative_path) = self
1728 .find_local_worktree(&abs_path, cx)
1729 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1730 if !worktree.read(cx).is_visible() {
1731 return Ok(());
1732 }
1733
1734 let project_path = ProjectPath {
1735 worktree_id: worktree.read(cx).id(),
1736 path: relative_path.into(),
1737 };
1738
1739 for buffer in self.opened_buffers.values() {
1740 if let Some(buffer) = buffer.upgrade(cx) {
1741 if buffer
1742 .read(cx)
1743 .file()
1744 .map_or(false, |file| *file.path() == project_path.path)
1745 {
1746 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1747 break;
1748 }
1749 }
1750 }
1751 worktree.update(cx, |worktree, cx| {
1752 worktree
1753 .as_local_mut()
1754 .ok_or_else(|| anyhow!("not a local worktree"))?
1755 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1756 })?;
1757 cx.emit(Event::DiagnosticsUpdated(project_path));
1758 Ok(())
1759 }
1760
1761 fn update_buffer_diagnostics(
1762 &mut self,
1763 buffer: &ModelHandle<Buffer>,
1764 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1765 version: Option<i32>,
1766 cx: &mut ModelContext<Self>,
1767 ) -> Result<()> {
1768 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1769 Ordering::Equal
1770 .then_with(|| b.is_primary.cmp(&a.is_primary))
1771 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1772 .then_with(|| a.severity.cmp(&b.severity))
1773 .then_with(|| a.message.cmp(&b.message))
1774 }
1775
1776 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1777
1778 diagnostics.sort_unstable_by(|a, b| {
1779 Ordering::Equal
1780 .then_with(|| a.range.start.cmp(&b.range.start))
1781 .then_with(|| b.range.end.cmp(&a.range.end))
1782 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1783 });
1784
1785 let mut sanitized_diagnostics = Vec::new();
1786 let mut edits_since_save = snapshot
1787 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1788 .peekable();
1789 let mut last_edit_old_end = PointUtf16::zero();
1790 let mut last_edit_new_end = PointUtf16::zero();
1791 'outer: for entry in diagnostics {
1792 let mut start = entry.range.start;
1793 let mut end = entry.range.end;
1794
1795 // Some diagnostics are based on files on disk instead of buffers'
1796 // current contents. Adjust these diagnostics' ranges to reflect
1797 // any unsaved edits.
1798 if entry.diagnostic.is_disk_based {
1799 while let Some(edit) = edits_since_save.peek() {
1800 if edit.old.end <= start {
1801 last_edit_old_end = edit.old.end;
1802 last_edit_new_end = edit.new.end;
1803 edits_since_save.next();
1804 } else if edit.old.start <= end && edit.old.end >= start {
1805 continue 'outer;
1806 } else {
1807 break;
1808 }
1809 }
1810
1811 let start_overshoot = start - last_edit_old_end;
1812 start = last_edit_new_end;
1813 start += start_overshoot;
1814
1815 let end_overshoot = end - last_edit_old_end;
1816 end = last_edit_new_end;
1817 end += end_overshoot;
1818 }
1819
1820 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1821 ..snapshot.clip_point_utf16(end, Bias::Right);
1822
1823 // Expand empty ranges by one character
1824 if range.start == range.end {
1825 range.end.column += 1;
1826 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1827 if range.start == range.end && range.end.column > 0 {
1828 range.start.column -= 1;
1829 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1830 }
1831 }
1832
1833 sanitized_diagnostics.push(DiagnosticEntry {
1834 range,
1835 diagnostic: entry.diagnostic,
1836 });
1837 }
1838 drop(edits_since_save);
1839
1840 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1841 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1842 Ok(())
1843 }
1844
1845 pub fn format(
1846 &self,
1847 buffers: HashSet<ModelHandle<Buffer>>,
1848 push_to_history: bool,
1849 cx: &mut ModelContext<Project>,
1850 ) -> Task<Result<ProjectTransaction>> {
1851 let mut local_buffers = Vec::new();
1852 let mut remote_buffers = None;
1853 for buffer_handle in buffers {
1854 let buffer = buffer_handle.read(cx);
1855 if let Some(file) = File::from_dyn(buffer.file()) {
1856 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1857 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1858 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1859 }
1860 } else {
1861 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1862 }
1863 } else {
1864 return Task::ready(Ok(Default::default()));
1865 }
1866 }
1867
1868 let remote_buffers = self.remote_id().zip(remote_buffers);
1869 let client = self.client.clone();
1870
1871 cx.spawn(|this, mut cx| async move {
1872 let mut project_transaction = ProjectTransaction::default();
1873
1874 if let Some((project_id, remote_buffers)) = remote_buffers {
1875 let response = client
1876 .request(proto::FormatBuffers {
1877 project_id,
1878 buffer_ids: remote_buffers
1879 .iter()
1880 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1881 .collect(),
1882 })
1883 .await?
1884 .transaction
1885 .ok_or_else(|| anyhow!("missing transaction"))?;
1886 project_transaction = this
1887 .update(&mut cx, |this, cx| {
1888 this.deserialize_project_transaction(response, push_to_history, cx)
1889 })
1890 .await?;
1891 }
1892
1893 for (buffer, buffer_abs_path, language_server) in local_buffers {
1894 let text_document = lsp::TextDocumentIdentifier::new(
1895 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1896 );
1897 let capabilities = &language_server.capabilities();
1898 let lsp_edits = if capabilities
1899 .document_formatting_provider
1900 .as_ref()
1901 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1902 {
1903 language_server
1904 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1905 text_document,
1906 options: Default::default(),
1907 work_done_progress_params: Default::default(),
1908 })
1909 .await?
1910 } else if capabilities
1911 .document_range_formatting_provider
1912 .as_ref()
1913 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1914 {
1915 let buffer_start = lsp::Position::new(0, 0);
1916 let buffer_end = buffer
1917 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1918 .to_lsp_position();
1919 language_server
1920 .request::<lsp::request::RangeFormatting>(
1921 lsp::DocumentRangeFormattingParams {
1922 text_document,
1923 range: lsp::Range::new(buffer_start, buffer_end),
1924 options: Default::default(),
1925 work_done_progress_params: Default::default(),
1926 },
1927 )
1928 .await?
1929 } else {
1930 continue;
1931 };
1932
1933 if let Some(lsp_edits) = lsp_edits {
1934 let edits = this
1935 .update(&mut cx, |this, cx| {
1936 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1937 })
1938 .await?;
1939 buffer.update(&mut cx, |buffer, cx| {
1940 buffer.finalize_last_transaction();
1941 buffer.start_transaction();
1942 for (range, text) in edits {
1943 buffer.edit([range], text, cx);
1944 }
1945 if buffer.end_transaction(cx).is_some() {
1946 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1947 if !push_to_history {
1948 buffer.forget_transaction(transaction.id);
1949 }
1950 project_transaction.0.insert(cx.handle(), transaction);
1951 }
1952 });
1953 }
1954 }
1955
1956 Ok(project_transaction)
1957 })
1958 }
1959
1960 pub fn definition<T: ToPointUtf16>(
1961 &self,
1962 buffer: &ModelHandle<Buffer>,
1963 position: T,
1964 cx: &mut ModelContext<Self>,
1965 ) -> Task<Result<Vec<Location>>> {
1966 let position = position.to_point_utf16(buffer.read(cx));
1967 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1968 }
1969
1970 pub fn references<T: ToPointUtf16>(
1971 &self,
1972 buffer: &ModelHandle<Buffer>,
1973 position: T,
1974 cx: &mut ModelContext<Self>,
1975 ) -> Task<Result<Vec<Location>>> {
1976 let position = position.to_point_utf16(buffer.read(cx));
1977 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1978 }
1979
1980 pub fn document_highlights<T: ToPointUtf16>(
1981 &self,
1982 buffer: &ModelHandle<Buffer>,
1983 position: T,
1984 cx: &mut ModelContext<Self>,
1985 ) -> Task<Result<Vec<DocumentHighlight>>> {
1986 let position = position.to_point_utf16(buffer.read(cx));
1987
1988 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1989 }
1990
1991 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1992 if self.is_local() {
1993 let mut language_servers = HashMap::default();
1994 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1995 if let Some((worktree, language)) = self
1996 .worktree_for_id(*worktree_id, cx)
1997 .and_then(|worktree| worktree.read(cx).as_local())
1998 .zip(self.languages.get_language(language_name))
1999 {
2000 language_servers
2001 .entry(Arc::as_ptr(language_server))
2002 .or_insert((
2003 language_server.clone(),
2004 *worktree_id,
2005 worktree.abs_path().clone(),
2006 language.clone(),
2007 ));
2008 }
2009 }
2010
2011 let mut requests = Vec::new();
2012 for (language_server, _, _, _) in language_servers.values() {
2013 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2014 lsp::WorkspaceSymbolParams {
2015 query: query.to_string(),
2016 ..Default::default()
2017 },
2018 ));
2019 }
2020
2021 cx.spawn_weak(|this, cx| async move {
2022 let responses = futures::future::try_join_all(requests).await?;
2023
2024 let mut symbols = Vec::new();
2025 if let Some(this) = this.upgrade(&cx) {
2026 this.read_with(&cx, |this, cx| {
2027 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
2028 language_servers.into_values().zip(responses)
2029 {
2030 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2031 |lsp_symbol| {
2032 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2033 let mut worktree_id = source_worktree_id;
2034 let path;
2035 if let Some((worktree, rel_path)) =
2036 this.find_local_worktree(&abs_path, cx)
2037 {
2038 worktree_id = worktree.read(cx).id();
2039 path = rel_path;
2040 } else {
2041 path = relativize_path(&worktree_abs_path, &abs_path);
2042 }
2043
2044 let label = language
2045 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2046 .unwrap_or_else(|| {
2047 CodeLabel::plain(lsp_symbol.name.clone(), None)
2048 });
2049 let signature = this.symbol_signature(worktree_id, &path);
2050
2051 Some(Symbol {
2052 source_worktree_id,
2053 worktree_id,
2054 language_name: language.name().to_string(),
2055 name: lsp_symbol.name,
2056 kind: lsp_symbol.kind,
2057 label,
2058 path,
2059 range: range_from_lsp(lsp_symbol.location.range),
2060 signature,
2061 })
2062 },
2063 ));
2064 }
2065 })
2066 }
2067
2068 Ok(symbols)
2069 })
2070 } else if let Some(project_id) = self.remote_id() {
2071 let request = self.client.request(proto::GetProjectSymbols {
2072 project_id,
2073 query: query.to_string(),
2074 });
2075 cx.spawn_weak(|this, cx| async move {
2076 let response = request.await?;
2077 let mut symbols = Vec::new();
2078 if let Some(this) = this.upgrade(&cx) {
2079 this.read_with(&cx, |this, _| {
2080 symbols.extend(
2081 response
2082 .symbols
2083 .into_iter()
2084 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2085 );
2086 })
2087 }
2088 Ok(symbols)
2089 })
2090 } else {
2091 Task::ready(Ok(Default::default()))
2092 }
2093 }
2094
2095 pub fn open_buffer_for_symbol(
2096 &mut self,
2097 symbol: &Symbol,
2098 cx: &mut ModelContext<Self>,
2099 ) -> Task<Result<ModelHandle<Buffer>>> {
2100 if self.is_local() {
2101 let language_server = if let Some(server) = self.language_servers.get(&(
2102 symbol.source_worktree_id,
2103 Arc::from(symbol.language_name.as_str()),
2104 )) {
2105 server.clone()
2106 } else {
2107 return Task::ready(Err(anyhow!(
2108 "language server for worktree and language not found"
2109 )));
2110 };
2111
2112 let worktree_abs_path = if let Some(worktree_abs_path) = self
2113 .worktree_for_id(symbol.worktree_id, cx)
2114 .and_then(|worktree| worktree.read(cx).as_local())
2115 .map(|local_worktree| local_worktree.abs_path())
2116 {
2117 worktree_abs_path
2118 } else {
2119 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2120 };
2121 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2122 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2123 uri
2124 } else {
2125 return Task::ready(Err(anyhow!("invalid symbol path")));
2126 };
2127
2128 self.open_local_buffer_via_lsp(
2129 symbol_uri,
2130 Arc::from(symbol.language_name.as_str()),
2131 language_server,
2132 cx,
2133 )
2134 } else if let Some(project_id) = self.remote_id() {
2135 let request = self.client.request(proto::OpenBufferForSymbol {
2136 project_id,
2137 symbol: Some(serialize_symbol(symbol)),
2138 });
2139 cx.spawn(|this, mut cx| async move {
2140 let response = request.await?;
2141 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2142 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2143 .await
2144 })
2145 } else {
2146 Task::ready(Err(anyhow!("project does not have a remote id")))
2147 }
2148 }
2149
2150 pub fn completions<T: ToPointUtf16>(
2151 &self,
2152 source_buffer_handle: &ModelHandle<Buffer>,
2153 position: T,
2154 cx: &mut ModelContext<Self>,
2155 ) -> Task<Result<Vec<Completion>>> {
2156 let source_buffer_handle = source_buffer_handle.clone();
2157 let source_buffer = source_buffer_handle.read(cx);
2158 let buffer_id = source_buffer.remote_id();
2159 let language = source_buffer.language().cloned();
2160 let worktree;
2161 let buffer_abs_path;
2162 if let Some(file) = File::from_dyn(source_buffer.file()) {
2163 worktree = file.worktree.clone();
2164 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2165 } else {
2166 return Task::ready(Ok(Default::default()));
2167 };
2168
2169 let position = position.to_point_utf16(source_buffer);
2170 let anchor = source_buffer.anchor_after(position);
2171
2172 if worktree.read(cx).as_local().is_some() {
2173 let buffer_abs_path = buffer_abs_path.unwrap();
2174 let lang_server =
2175 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2176 server.clone()
2177 } else {
2178 return Task::ready(Ok(Default::default()));
2179 };
2180
2181 cx.spawn(|_, cx| async move {
2182 let completions = lang_server
2183 .request::<lsp::request::Completion>(lsp::CompletionParams {
2184 text_document_position: lsp::TextDocumentPositionParams::new(
2185 lsp::TextDocumentIdentifier::new(
2186 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2187 ),
2188 position.to_lsp_position(),
2189 ),
2190 context: Default::default(),
2191 work_done_progress_params: Default::default(),
2192 partial_result_params: Default::default(),
2193 })
2194 .await
2195 .context("lsp completion request failed")?;
2196
2197 let completions = if let Some(completions) = completions {
2198 match completions {
2199 lsp::CompletionResponse::Array(completions) => completions,
2200 lsp::CompletionResponse::List(list) => list.items,
2201 }
2202 } else {
2203 Default::default()
2204 };
2205
2206 source_buffer_handle.read_with(&cx, |this, _| {
2207 Ok(completions
2208 .into_iter()
2209 .filter_map(|lsp_completion| {
2210 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
2211 lsp::CompletionTextEdit::Edit(edit) => {
2212 (range_from_lsp(edit.range), edit.new_text.clone())
2213 }
2214 lsp::CompletionTextEdit::InsertAndReplace(_) => {
2215 log::info!("unsupported insert/replace completion");
2216 return None;
2217 }
2218 };
2219
2220 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2221 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2222 if clipped_start == old_range.start && clipped_end == old_range.end {
2223 Some(Completion {
2224 old_range: this.anchor_before(old_range.start)
2225 ..this.anchor_after(old_range.end),
2226 new_text,
2227 label: language
2228 .as_ref()
2229 .and_then(|l| l.label_for_completion(&lsp_completion))
2230 .unwrap_or_else(|| {
2231 CodeLabel::plain(
2232 lsp_completion.label.clone(),
2233 lsp_completion.filter_text.as_deref(),
2234 )
2235 }),
2236 lsp_completion,
2237 })
2238 } else {
2239 None
2240 }
2241 })
2242 .collect())
2243 })
2244 })
2245 } else if let Some(project_id) = self.remote_id() {
2246 let rpc = self.client.clone();
2247 let message = proto::GetCompletions {
2248 project_id,
2249 buffer_id,
2250 position: Some(language::proto::serialize_anchor(&anchor)),
2251 version: serialize_version(&source_buffer.version()),
2252 };
2253 cx.spawn_weak(|_, mut cx| async move {
2254 let response = rpc.request(message).await?;
2255
2256 source_buffer_handle
2257 .update(&mut cx, |buffer, _| {
2258 buffer.wait_for_version(deserialize_version(response.version))
2259 })
2260 .await;
2261
2262 response
2263 .completions
2264 .into_iter()
2265 .map(|completion| {
2266 language::proto::deserialize_completion(completion, language.as_ref())
2267 })
2268 .collect()
2269 })
2270 } else {
2271 Task::ready(Ok(Default::default()))
2272 }
2273 }
2274
2275 pub fn apply_additional_edits_for_completion(
2276 &self,
2277 buffer_handle: ModelHandle<Buffer>,
2278 completion: Completion,
2279 push_to_history: bool,
2280 cx: &mut ModelContext<Self>,
2281 ) -> Task<Result<Option<Transaction>>> {
2282 let buffer = buffer_handle.read(cx);
2283 let buffer_id = buffer.remote_id();
2284
2285 if self.is_local() {
2286 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2287 server.clone()
2288 } else {
2289 return Task::ready(Ok(Default::default()));
2290 };
2291
2292 cx.spawn(|this, mut cx| async move {
2293 let resolved_completion = lang_server
2294 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2295 .await?;
2296 if let Some(edits) = resolved_completion.additional_text_edits {
2297 let edits = this
2298 .update(&mut cx, |this, cx| {
2299 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2300 })
2301 .await?;
2302 buffer_handle.update(&mut cx, |buffer, cx| {
2303 buffer.finalize_last_transaction();
2304 buffer.start_transaction();
2305 for (range, text) in edits {
2306 buffer.edit([range], text, cx);
2307 }
2308 let transaction = if buffer.end_transaction(cx).is_some() {
2309 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2310 if !push_to_history {
2311 buffer.forget_transaction(transaction.id);
2312 }
2313 Some(transaction)
2314 } else {
2315 None
2316 };
2317 Ok(transaction)
2318 })
2319 } else {
2320 Ok(None)
2321 }
2322 })
2323 } else if let Some(project_id) = self.remote_id() {
2324 let client = self.client.clone();
2325 cx.spawn(|_, mut cx| async move {
2326 let response = client
2327 .request(proto::ApplyCompletionAdditionalEdits {
2328 project_id,
2329 buffer_id,
2330 completion: Some(language::proto::serialize_completion(&completion)),
2331 })
2332 .await?;
2333
2334 if let Some(transaction) = response.transaction {
2335 let transaction = language::proto::deserialize_transaction(transaction)?;
2336 buffer_handle
2337 .update(&mut cx, |buffer, _| {
2338 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2339 })
2340 .await;
2341 if push_to_history {
2342 buffer_handle.update(&mut cx, |buffer, _| {
2343 buffer.push_transaction(transaction.clone(), Instant::now());
2344 });
2345 }
2346 Ok(Some(transaction))
2347 } else {
2348 Ok(None)
2349 }
2350 })
2351 } else {
2352 Task::ready(Err(anyhow!("project does not have a remote id")))
2353 }
2354 }
2355
2356 pub fn code_actions<T: ToOffset>(
2357 &self,
2358 buffer_handle: &ModelHandle<Buffer>,
2359 range: Range<T>,
2360 cx: &mut ModelContext<Self>,
2361 ) -> Task<Result<Vec<CodeAction>>> {
2362 let buffer_handle = buffer_handle.clone();
2363 let buffer = buffer_handle.read(cx);
2364 let buffer_id = buffer.remote_id();
2365 let worktree;
2366 let buffer_abs_path;
2367 if let Some(file) = File::from_dyn(buffer.file()) {
2368 worktree = file.worktree.clone();
2369 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2370 } else {
2371 return Task::ready(Ok(Default::default()));
2372 };
2373 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2374
2375 if worktree.read(cx).as_local().is_some() {
2376 let buffer_abs_path = buffer_abs_path.unwrap();
2377 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2378 server.clone()
2379 } else {
2380 return Task::ready(Ok(Default::default()));
2381 };
2382
2383 let lsp_range = lsp::Range::new(
2384 range.start.to_point_utf16(buffer).to_lsp_position(),
2385 range.end.to_point_utf16(buffer).to_lsp_position(),
2386 );
2387 cx.foreground().spawn(async move {
2388 if !lang_server.capabilities().code_action_provider.is_some() {
2389 return Ok(Default::default());
2390 }
2391
2392 Ok(lang_server
2393 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2394 text_document: lsp::TextDocumentIdentifier::new(
2395 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2396 ),
2397 range: lsp_range,
2398 work_done_progress_params: Default::default(),
2399 partial_result_params: Default::default(),
2400 context: lsp::CodeActionContext {
2401 diagnostics: Default::default(),
2402 only: Some(vec![
2403 lsp::CodeActionKind::QUICKFIX,
2404 lsp::CodeActionKind::REFACTOR,
2405 lsp::CodeActionKind::REFACTOR_EXTRACT,
2406 ]),
2407 },
2408 })
2409 .await?
2410 .unwrap_or_default()
2411 .into_iter()
2412 .filter_map(|entry| {
2413 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2414 Some(CodeAction {
2415 range: range.clone(),
2416 lsp_action,
2417 })
2418 } else {
2419 None
2420 }
2421 })
2422 .collect())
2423 })
2424 } else if let Some(project_id) = self.remote_id() {
2425 let rpc = self.client.clone();
2426 let version = buffer.version();
2427 cx.spawn_weak(|_, mut cx| async move {
2428 let response = rpc
2429 .request(proto::GetCodeActions {
2430 project_id,
2431 buffer_id,
2432 start: Some(language::proto::serialize_anchor(&range.start)),
2433 end: Some(language::proto::serialize_anchor(&range.end)),
2434 version: serialize_version(&version),
2435 })
2436 .await?;
2437
2438 buffer_handle
2439 .update(&mut cx, |buffer, _| {
2440 buffer.wait_for_version(deserialize_version(response.version))
2441 })
2442 .await;
2443
2444 response
2445 .actions
2446 .into_iter()
2447 .map(language::proto::deserialize_code_action)
2448 .collect()
2449 })
2450 } else {
2451 Task::ready(Ok(Default::default()))
2452 }
2453 }
2454
2455 pub fn apply_code_action(
2456 &self,
2457 buffer_handle: ModelHandle<Buffer>,
2458 mut action: CodeAction,
2459 push_to_history: bool,
2460 cx: &mut ModelContext<Self>,
2461 ) -> Task<Result<ProjectTransaction>> {
2462 if self.is_local() {
2463 let buffer = buffer_handle.read(cx);
2464 let lang_name = if let Some(lang) = buffer.language() {
2465 lang.name()
2466 } else {
2467 return Task::ready(Ok(Default::default()));
2468 };
2469 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2470 server.clone()
2471 } else {
2472 return Task::ready(Ok(Default::default()));
2473 };
2474 let range = action.range.to_point_utf16(buffer);
2475
2476 cx.spawn(|this, mut cx| async move {
2477 if let Some(lsp_range) = action
2478 .lsp_action
2479 .data
2480 .as_mut()
2481 .and_then(|d| d.get_mut("codeActionParams"))
2482 .and_then(|d| d.get_mut("range"))
2483 {
2484 *lsp_range = serde_json::to_value(&lsp::Range::new(
2485 range.start.to_lsp_position(),
2486 range.end.to_lsp_position(),
2487 ))
2488 .unwrap();
2489 action.lsp_action = lang_server
2490 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2491 .await?;
2492 } else {
2493 let actions = this
2494 .update(&mut cx, |this, cx| {
2495 this.code_actions(&buffer_handle, action.range, cx)
2496 })
2497 .await?;
2498 action.lsp_action = actions
2499 .into_iter()
2500 .find(|a| a.lsp_action.title == action.lsp_action.title)
2501 .ok_or_else(|| anyhow!("code action is outdated"))?
2502 .lsp_action;
2503 }
2504
2505 if let Some(edit) = action.lsp_action.edit {
2506 Self::deserialize_workspace_edit(
2507 this,
2508 edit,
2509 push_to_history,
2510 lang_name,
2511 lang_server,
2512 &mut cx,
2513 )
2514 .await
2515 } else {
2516 Ok(ProjectTransaction::default())
2517 }
2518 })
2519 } else if let Some(project_id) = self.remote_id() {
2520 let client = self.client.clone();
2521 let request = proto::ApplyCodeAction {
2522 project_id,
2523 buffer_id: buffer_handle.read(cx).remote_id(),
2524 action: Some(language::proto::serialize_code_action(&action)),
2525 };
2526 cx.spawn(|this, mut cx| async move {
2527 let response = client
2528 .request(request)
2529 .await?
2530 .transaction
2531 .ok_or_else(|| anyhow!("missing transaction"))?;
2532 this.update(&mut cx, |this, cx| {
2533 this.deserialize_project_transaction(response, push_to_history, cx)
2534 })
2535 .await
2536 })
2537 } else {
2538 Task::ready(Err(anyhow!("project does not have a remote id")))
2539 }
2540 }
2541
2542 async fn deserialize_workspace_edit(
2543 this: ModelHandle<Self>,
2544 edit: lsp::WorkspaceEdit,
2545 push_to_history: bool,
2546 language_name: Arc<str>,
2547 language_server: Arc<LanguageServer>,
2548 cx: &mut AsyncAppContext,
2549 ) -> Result<ProjectTransaction> {
2550 let fs = this.read_with(cx, |this, _| this.fs.clone());
2551 let mut operations = Vec::new();
2552 if let Some(document_changes) = edit.document_changes {
2553 match document_changes {
2554 lsp::DocumentChanges::Edits(edits) => {
2555 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2556 }
2557 lsp::DocumentChanges::Operations(ops) => operations = ops,
2558 }
2559 } else if let Some(changes) = edit.changes {
2560 operations.extend(changes.into_iter().map(|(uri, edits)| {
2561 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2562 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2563 uri,
2564 version: None,
2565 },
2566 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2567 })
2568 }));
2569 }
2570
2571 let mut project_transaction = ProjectTransaction::default();
2572 for operation in operations {
2573 match operation {
2574 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2575 let abs_path = op
2576 .uri
2577 .to_file_path()
2578 .map_err(|_| anyhow!("can't convert URI to path"))?;
2579
2580 if let Some(parent_path) = abs_path.parent() {
2581 fs.create_dir(parent_path).await?;
2582 }
2583 if abs_path.ends_with("/") {
2584 fs.create_dir(&abs_path).await?;
2585 } else {
2586 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2587 .await?;
2588 }
2589 }
2590 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2591 let source_abs_path = op
2592 .old_uri
2593 .to_file_path()
2594 .map_err(|_| anyhow!("can't convert URI to path"))?;
2595 let target_abs_path = op
2596 .new_uri
2597 .to_file_path()
2598 .map_err(|_| anyhow!("can't convert URI to path"))?;
2599 fs.rename(
2600 &source_abs_path,
2601 &target_abs_path,
2602 op.options.map(Into::into).unwrap_or_default(),
2603 )
2604 .await?;
2605 }
2606 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2607 let abs_path = op
2608 .uri
2609 .to_file_path()
2610 .map_err(|_| anyhow!("can't convert URI to path"))?;
2611 let options = op.options.map(Into::into).unwrap_or_default();
2612 if abs_path.ends_with("/") {
2613 fs.remove_dir(&abs_path, options).await?;
2614 } else {
2615 fs.remove_file(&abs_path, options).await?;
2616 }
2617 }
2618 lsp::DocumentChangeOperation::Edit(op) => {
2619 let buffer_to_edit = this
2620 .update(cx, |this, cx| {
2621 this.open_local_buffer_via_lsp(
2622 op.text_document.uri,
2623 language_name.clone(),
2624 language_server.clone(),
2625 cx,
2626 )
2627 })
2628 .await?;
2629
2630 let edits = this
2631 .update(cx, |this, cx| {
2632 let edits = op.edits.into_iter().map(|edit| match edit {
2633 lsp::OneOf::Left(edit) => edit,
2634 lsp::OneOf::Right(edit) => edit.text_edit,
2635 });
2636 this.edits_from_lsp(
2637 &buffer_to_edit,
2638 edits,
2639 op.text_document.version,
2640 cx,
2641 )
2642 })
2643 .await?;
2644
2645 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2646 buffer.finalize_last_transaction();
2647 buffer.start_transaction();
2648 for (range, text) in edits {
2649 buffer.edit([range], text, cx);
2650 }
2651 let transaction = if buffer.end_transaction(cx).is_some() {
2652 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2653 if !push_to_history {
2654 buffer.forget_transaction(transaction.id);
2655 }
2656 Some(transaction)
2657 } else {
2658 None
2659 };
2660
2661 transaction
2662 });
2663 if let Some(transaction) = transaction {
2664 project_transaction.0.insert(buffer_to_edit, transaction);
2665 }
2666 }
2667 }
2668 }
2669
2670 Ok(project_transaction)
2671 }
2672
2673 pub fn prepare_rename<T: ToPointUtf16>(
2674 &self,
2675 buffer: ModelHandle<Buffer>,
2676 position: T,
2677 cx: &mut ModelContext<Self>,
2678 ) -> Task<Result<Option<Range<Anchor>>>> {
2679 let position = position.to_point_utf16(buffer.read(cx));
2680 self.request_lsp(buffer, PrepareRename { position }, cx)
2681 }
2682
2683 pub fn perform_rename<T: ToPointUtf16>(
2684 &self,
2685 buffer: ModelHandle<Buffer>,
2686 position: T,
2687 new_name: String,
2688 push_to_history: bool,
2689 cx: &mut ModelContext<Self>,
2690 ) -> Task<Result<ProjectTransaction>> {
2691 let position = position.to_point_utf16(buffer.read(cx));
2692 self.request_lsp(
2693 buffer,
2694 PerformRename {
2695 position,
2696 new_name,
2697 push_to_history,
2698 },
2699 cx,
2700 )
2701 }
2702
2703 pub fn search(
2704 &self,
2705 query: SearchQuery,
2706 cx: &mut ModelContext<Self>,
2707 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2708 if self.is_local() {
2709 let snapshots = self
2710 .visible_worktrees(cx)
2711 .filter_map(|tree| {
2712 let tree = tree.read(cx).as_local()?;
2713 Some(tree.snapshot())
2714 })
2715 .collect::<Vec<_>>();
2716
2717 let background = cx.background().clone();
2718 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2719 if path_count == 0 {
2720 return Task::ready(Ok(Default::default()));
2721 }
2722 let workers = background.num_cpus().min(path_count);
2723 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2724 cx.background()
2725 .spawn({
2726 let fs = self.fs.clone();
2727 let background = cx.background().clone();
2728 let query = query.clone();
2729 async move {
2730 let fs = &fs;
2731 let query = &query;
2732 let matching_paths_tx = &matching_paths_tx;
2733 let paths_per_worker = (path_count + workers - 1) / workers;
2734 let snapshots = &snapshots;
2735 background
2736 .scoped(|scope| {
2737 for worker_ix in 0..workers {
2738 let worker_start_ix = worker_ix * paths_per_worker;
2739 let worker_end_ix = worker_start_ix + paths_per_worker;
2740 scope.spawn(async move {
2741 let mut snapshot_start_ix = 0;
2742 let mut abs_path = PathBuf::new();
2743 for snapshot in snapshots {
2744 let snapshot_end_ix =
2745 snapshot_start_ix + snapshot.visible_file_count();
2746 if worker_end_ix <= snapshot_start_ix {
2747 break;
2748 } else if worker_start_ix > snapshot_end_ix {
2749 snapshot_start_ix = snapshot_end_ix;
2750 continue;
2751 } else {
2752 let start_in_snapshot = worker_start_ix
2753 .saturating_sub(snapshot_start_ix);
2754 let end_in_snapshot =
2755 cmp::min(worker_end_ix, snapshot_end_ix)
2756 - snapshot_start_ix;
2757
2758 for entry in snapshot
2759 .files(false, start_in_snapshot)
2760 .take(end_in_snapshot - start_in_snapshot)
2761 {
2762 if matching_paths_tx.is_closed() {
2763 break;
2764 }
2765
2766 abs_path.clear();
2767 abs_path.push(&snapshot.abs_path());
2768 abs_path.push(&entry.path);
2769 let matches = if let Some(file) =
2770 fs.open_sync(&abs_path).await.log_err()
2771 {
2772 query.detect(file).unwrap_or(false)
2773 } else {
2774 false
2775 };
2776
2777 if matches {
2778 let project_path =
2779 (snapshot.id(), entry.path.clone());
2780 if matching_paths_tx
2781 .send(project_path)
2782 .await
2783 .is_err()
2784 {
2785 break;
2786 }
2787 }
2788 }
2789
2790 snapshot_start_ix = snapshot_end_ix;
2791 }
2792 }
2793 });
2794 }
2795 })
2796 .await;
2797 }
2798 })
2799 .detach();
2800
2801 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2802 let open_buffers = self
2803 .opened_buffers
2804 .values()
2805 .filter_map(|b| b.upgrade(cx))
2806 .collect::<HashSet<_>>();
2807 cx.spawn(|this, cx| async move {
2808 for buffer in &open_buffers {
2809 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2810 buffers_tx.send((buffer.clone(), snapshot)).await?;
2811 }
2812
2813 let open_buffers = Rc::new(RefCell::new(open_buffers));
2814 while let Some(project_path) = matching_paths_rx.next().await {
2815 if buffers_tx.is_closed() {
2816 break;
2817 }
2818
2819 let this = this.clone();
2820 let open_buffers = open_buffers.clone();
2821 let buffers_tx = buffers_tx.clone();
2822 cx.spawn(|mut cx| async move {
2823 if let Some(buffer) = this
2824 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2825 .await
2826 .log_err()
2827 {
2828 if open_buffers.borrow_mut().insert(buffer.clone()) {
2829 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2830 buffers_tx.send((buffer, snapshot)).await?;
2831 }
2832 }
2833
2834 Ok::<_, anyhow::Error>(())
2835 })
2836 .detach();
2837 }
2838
2839 Ok::<_, anyhow::Error>(())
2840 })
2841 .detach_and_log_err(cx);
2842
2843 let background = cx.background().clone();
2844 cx.background().spawn(async move {
2845 let query = &query;
2846 let mut matched_buffers = Vec::new();
2847 for _ in 0..workers {
2848 matched_buffers.push(HashMap::default());
2849 }
2850 background
2851 .scoped(|scope| {
2852 for worker_matched_buffers in matched_buffers.iter_mut() {
2853 let mut buffers_rx = buffers_rx.clone();
2854 scope.spawn(async move {
2855 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2856 let buffer_matches = query
2857 .search(snapshot.as_rope())
2858 .await
2859 .iter()
2860 .map(|range| {
2861 snapshot.anchor_before(range.start)
2862 ..snapshot.anchor_after(range.end)
2863 })
2864 .collect::<Vec<_>>();
2865 if !buffer_matches.is_empty() {
2866 worker_matched_buffers
2867 .insert(buffer.clone(), buffer_matches);
2868 }
2869 }
2870 });
2871 }
2872 })
2873 .await;
2874 Ok(matched_buffers.into_iter().flatten().collect())
2875 })
2876 } else if let Some(project_id) = self.remote_id() {
2877 let request = self.client.request(query.to_proto(project_id));
2878 cx.spawn(|this, mut cx| async move {
2879 let response = request.await?;
2880 let mut result = HashMap::default();
2881 for location in response.locations {
2882 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2883 let target_buffer = this
2884 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2885 .await?;
2886 let start = location
2887 .start
2888 .and_then(deserialize_anchor)
2889 .ok_or_else(|| anyhow!("missing target start"))?;
2890 let end = location
2891 .end
2892 .and_then(deserialize_anchor)
2893 .ok_or_else(|| anyhow!("missing target end"))?;
2894 result
2895 .entry(target_buffer)
2896 .or_insert(Vec::new())
2897 .push(start..end)
2898 }
2899 Ok(result)
2900 })
2901 } else {
2902 Task::ready(Ok(Default::default()))
2903 }
2904 }
2905
2906 fn request_lsp<R: LspCommand>(
2907 &self,
2908 buffer_handle: ModelHandle<Buffer>,
2909 request: R,
2910 cx: &mut ModelContext<Self>,
2911 ) -> Task<Result<R::Response>>
2912 where
2913 <R::LspRequest as lsp::request::Request>::Result: Send,
2914 {
2915 let buffer = buffer_handle.read(cx);
2916 if self.is_local() {
2917 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2918 if let Some((file, language_server)) =
2919 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2920 {
2921 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2922 return cx.spawn(|this, cx| async move {
2923 if !request.check_capabilities(&language_server.capabilities()) {
2924 return Ok(Default::default());
2925 }
2926
2927 let response = language_server
2928 .request::<R::LspRequest>(lsp_params)
2929 .await
2930 .context("lsp request failed")?;
2931 request
2932 .response_from_lsp(response, this, buffer_handle, cx)
2933 .await
2934 });
2935 }
2936 } else if let Some(project_id) = self.remote_id() {
2937 let rpc = self.client.clone();
2938 let message = request.to_proto(project_id, buffer);
2939 return cx.spawn(|this, cx| async move {
2940 let response = rpc.request(message).await?;
2941 request
2942 .response_from_proto(response, this, buffer_handle, cx)
2943 .await
2944 });
2945 }
2946 Task::ready(Ok(Default::default()))
2947 }
2948
2949 pub fn find_or_create_local_worktree(
2950 &mut self,
2951 abs_path: impl AsRef<Path>,
2952 visible: bool,
2953 cx: &mut ModelContext<Self>,
2954 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2955 let abs_path = abs_path.as_ref();
2956 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2957 Task::ready(Ok((tree.clone(), relative_path.into())))
2958 } else {
2959 let worktree = self.create_local_worktree(abs_path, visible, cx);
2960 cx.foreground()
2961 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2962 }
2963 }
2964
2965 pub fn find_local_worktree(
2966 &self,
2967 abs_path: &Path,
2968 cx: &AppContext,
2969 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2970 for tree in self.worktrees(cx) {
2971 if let Some(relative_path) = tree
2972 .read(cx)
2973 .as_local()
2974 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2975 {
2976 return Some((tree.clone(), relative_path.into()));
2977 }
2978 }
2979 None
2980 }
2981
2982 pub fn is_shared(&self) -> bool {
2983 match &self.client_state {
2984 ProjectClientState::Local { is_shared, .. } => *is_shared,
2985 ProjectClientState::Remote { .. } => false,
2986 }
2987 }
2988
2989 fn create_local_worktree(
2990 &mut self,
2991 abs_path: impl AsRef<Path>,
2992 visible: bool,
2993 cx: &mut ModelContext<Self>,
2994 ) -> Task<Result<ModelHandle<Worktree>>> {
2995 let fs = self.fs.clone();
2996 let client = self.client.clone();
2997 let next_entry_id = self.next_entry_id.clone();
2998 let path: Arc<Path> = abs_path.as_ref().into();
2999 let task = self
3000 .loading_local_worktrees
3001 .entry(path.clone())
3002 .or_insert_with(|| {
3003 cx.spawn(|project, mut cx| {
3004 async move {
3005 let worktree = Worktree::local(
3006 client.clone(),
3007 path.clone(),
3008 visible,
3009 fs,
3010 next_entry_id,
3011 &mut cx,
3012 )
3013 .await;
3014 project.update(&mut cx, |project, _| {
3015 project.loading_local_worktrees.remove(&path);
3016 });
3017 let worktree = worktree?;
3018
3019 let (remote_project_id, is_shared) =
3020 project.update(&mut cx, |project, cx| {
3021 project.add_worktree(&worktree, cx);
3022 (project.remote_id(), project.is_shared())
3023 });
3024
3025 if let Some(project_id) = remote_project_id {
3026 if is_shared {
3027 worktree
3028 .update(&mut cx, |worktree, cx| {
3029 worktree.as_local_mut().unwrap().share(project_id, cx)
3030 })
3031 .await?;
3032 } else {
3033 worktree
3034 .update(&mut cx, |worktree, cx| {
3035 worktree.as_local_mut().unwrap().register(project_id, cx)
3036 })
3037 .await?;
3038 }
3039 }
3040
3041 Ok(worktree)
3042 }
3043 .map_err(|err| Arc::new(err))
3044 })
3045 .shared()
3046 })
3047 .clone();
3048 cx.foreground().spawn(async move {
3049 match task.await {
3050 Ok(worktree) => Ok(worktree),
3051 Err(err) => Err(anyhow!("{}", err)),
3052 }
3053 })
3054 }
3055
3056 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3057 self.worktrees.retain(|worktree| {
3058 worktree
3059 .upgrade(cx)
3060 .map_or(false, |w| w.read(cx).id() != id)
3061 });
3062 cx.notify();
3063 }
3064
3065 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3066 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3067 if worktree.read(cx).is_local() {
3068 cx.subscribe(&worktree, |this, worktree, _, cx| {
3069 this.update_local_worktree_buffers(worktree, cx);
3070 })
3071 .detach();
3072 }
3073
3074 let push_strong_handle = {
3075 let worktree = worktree.read(cx);
3076 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3077 };
3078 if push_strong_handle {
3079 self.worktrees
3080 .push(WorktreeHandle::Strong(worktree.clone()));
3081 } else {
3082 cx.observe_release(&worktree, |this, _, cx| {
3083 this.worktrees
3084 .retain(|worktree| worktree.upgrade(cx).is_some());
3085 cx.notify();
3086 })
3087 .detach();
3088 self.worktrees
3089 .push(WorktreeHandle::Weak(worktree.downgrade()));
3090 }
3091 cx.notify();
3092 }
3093
3094 fn update_local_worktree_buffers(
3095 &mut self,
3096 worktree_handle: ModelHandle<Worktree>,
3097 cx: &mut ModelContext<Self>,
3098 ) {
3099 let snapshot = worktree_handle.read(cx).snapshot();
3100 let mut buffers_to_delete = Vec::new();
3101 for (buffer_id, buffer) in &self.opened_buffers {
3102 if let Some(buffer) = buffer.upgrade(cx) {
3103 buffer.update(cx, |buffer, cx| {
3104 if let Some(old_file) = File::from_dyn(buffer.file()) {
3105 if old_file.worktree != worktree_handle {
3106 return;
3107 }
3108
3109 let new_file = if let Some(entry) = old_file
3110 .entry_id
3111 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3112 {
3113 File {
3114 is_local: true,
3115 entry_id: Some(entry.id),
3116 mtime: entry.mtime,
3117 path: entry.path.clone(),
3118 worktree: worktree_handle.clone(),
3119 }
3120 } else if let Some(entry) =
3121 snapshot.entry_for_path(old_file.path().as_ref())
3122 {
3123 File {
3124 is_local: true,
3125 entry_id: Some(entry.id),
3126 mtime: entry.mtime,
3127 path: entry.path.clone(),
3128 worktree: worktree_handle.clone(),
3129 }
3130 } else {
3131 File {
3132 is_local: true,
3133 entry_id: None,
3134 path: old_file.path().clone(),
3135 mtime: old_file.mtime(),
3136 worktree: worktree_handle.clone(),
3137 }
3138 };
3139
3140 if let Some(project_id) = self.remote_id() {
3141 self.client
3142 .send(proto::UpdateBufferFile {
3143 project_id,
3144 buffer_id: *buffer_id as u64,
3145 file: Some(new_file.to_proto()),
3146 })
3147 .log_err();
3148 }
3149 buffer.file_updated(Box::new(new_file), cx).detach();
3150 }
3151 });
3152 } else {
3153 buffers_to_delete.push(*buffer_id);
3154 }
3155 }
3156
3157 for buffer_id in buffers_to_delete {
3158 self.opened_buffers.remove(&buffer_id);
3159 }
3160 }
3161
3162 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3163 let new_active_entry = entry.and_then(|project_path| {
3164 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3165 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3166 Some(ProjectEntry {
3167 worktree_id: project_path.worktree_id,
3168 entry_id: entry.id,
3169 })
3170 });
3171 if new_active_entry != self.active_entry {
3172 self.active_entry = new_active_entry;
3173 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3174 }
3175 }
3176
3177 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3178 self.language_servers_with_diagnostics_running > 0
3179 }
3180
3181 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3182 let mut summary = DiagnosticSummary::default();
3183 for (_, path_summary) in self.diagnostic_summaries(cx) {
3184 summary.error_count += path_summary.error_count;
3185 summary.warning_count += path_summary.warning_count;
3186 summary.info_count += path_summary.info_count;
3187 summary.hint_count += path_summary.hint_count;
3188 }
3189 summary
3190 }
3191
3192 pub fn diagnostic_summaries<'a>(
3193 &'a self,
3194 cx: &'a AppContext,
3195 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3196 self.worktrees(cx).flat_map(move |worktree| {
3197 let worktree = worktree.read(cx);
3198 let worktree_id = worktree.id();
3199 worktree
3200 .diagnostic_summaries()
3201 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3202 })
3203 }
3204
3205 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3206 self.language_servers_with_diagnostics_running += 1;
3207 if self.language_servers_with_diagnostics_running == 1 {
3208 cx.emit(Event::DiskBasedDiagnosticsStarted);
3209 }
3210 }
3211
3212 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3213 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3214 self.language_servers_with_diagnostics_running -= 1;
3215 if self.language_servers_with_diagnostics_running == 0 {
3216 cx.emit(Event::DiskBasedDiagnosticsFinished);
3217 }
3218 }
3219
3220 pub fn active_entry(&self) -> Option<ProjectEntry> {
3221 self.active_entry
3222 }
3223
3224 // RPC message handlers
3225
3226 async fn handle_unshare_project(
3227 this: ModelHandle<Self>,
3228 _: TypedEnvelope<proto::UnshareProject>,
3229 _: Arc<Client>,
3230 mut cx: AsyncAppContext,
3231 ) -> Result<()> {
3232 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3233 Ok(())
3234 }
3235
3236 async fn handle_add_collaborator(
3237 this: ModelHandle<Self>,
3238 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3239 _: Arc<Client>,
3240 mut cx: AsyncAppContext,
3241 ) -> Result<()> {
3242 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3243 let collaborator = envelope
3244 .payload
3245 .collaborator
3246 .take()
3247 .ok_or_else(|| anyhow!("empty collaborator"))?;
3248
3249 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3250 this.update(&mut cx, |this, cx| {
3251 this.collaborators
3252 .insert(collaborator.peer_id, collaborator);
3253 cx.notify();
3254 });
3255
3256 Ok(())
3257 }
3258
3259 async fn handle_remove_collaborator(
3260 this: ModelHandle<Self>,
3261 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3262 _: Arc<Client>,
3263 mut cx: AsyncAppContext,
3264 ) -> Result<()> {
3265 this.update(&mut cx, |this, cx| {
3266 let peer_id = PeerId(envelope.payload.peer_id);
3267 let replica_id = this
3268 .collaborators
3269 .remove(&peer_id)
3270 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3271 .replica_id;
3272 for (_, buffer) in &this.opened_buffers {
3273 if let Some(buffer) = buffer.upgrade(cx) {
3274 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3275 }
3276 }
3277 cx.notify();
3278 Ok(())
3279 })
3280 }
3281
3282 async fn handle_register_worktree(
3283 this: ModelHandle<Self>,
3284 envelope: TypedEnvelope<proto::RegisterWorktree>,
3285 client: Arc<Client>,
3286 mut cx: AsyncAppContext,
3287 ) -> Result<()> {
3288 this.update(&mut cx, |this, cx| {
3289 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3290 let replica_id = this.replica_id();
3291 let worktree = proto::Worktree {
3292 id: envelope.payload.worktree_id,
3293 root_name: envelope.payload.root_name,
3294 entries: Default::default(),
3295 diagnostic_summaries: Default::default(),
3296 visible: envelope.payload.visible,
3297 };
3298 let (worktree, load_task) =
3299 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3300 this.add_worktree(&worktree, cx);
3301 load_task.detach();
3302 Ok(())
3303 })
3304 }
3305
3306 async fn handle_unregister_worktree(
3307 this: ModelHandle<Self>,
3308 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3309 _: Arc<Client>,
3310 mut cx: AsyncAppContext,
3311 ) -> Result<()> {
3312 this.update(&mut cx, |this, cx| {
3313 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3314 this.remove_worktree(worktree_id, cx);
3315 Ok(())
3316 })
3317 }
3318
3319 async fn handle_update_worktree(
3320 this: ModelHandle<Self>,
3321 envelope: TypedEnvelope<proto::UpdateWorktree>,
3322 _: Arc<Client>,
3323 mut cx: AsyncAppContext,
3324 ) -> Result<()> {
3325 this.update(&mut cx, |this, cx| {
3326 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3327 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3328 worktree.update(cx, |worktree, _| {
3329 let worktree = worktree.as_remote_mut().unwrap();
3330 worktree.update_from_remote(envelope)
3331 })?;
3332 }
3333 Ok(())
3334 })
3335 }
3336
3337 async fn handle_update_diagnostic_summary(
3338 this: ModelHandle<Self>,
3339 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3340 _: Arc<Client>,
3341 mut cx: AsyncAppContext,
3342 ) -> Result<()> {
3343 this.update(&mut cx, |this, cx| {
3344 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3345 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3346 if let Some(summary) = envelope.payload.summary {
3347 let project_path = ProjectPath {
3348 worktree_id,
3349 path: Path::new(&summary.path).into(),
3350 };
3351 worktree.update(cx, |worktree, _| {
3352 worktree
3353 .as_remote_mut()
3354 .unwrap()
3355 .update_diagnostic_summary(project_path.path.clone(), &summary);
3356 });
3357 cx.emit(Event::DiagnosticsUpdated(project_path));
3358 }
3359 }
3360 Ok(())
3361 })
3362 }
3363
3364 async fn handle_start_language_server(
3365 this: ModelHandle<Self>,
3366 envelope: TypedEnvelope<proto::StartLanguageServer>,
3367 _: Arc<Client>,
3368 mut cx: AsyncAppContext,
3369 ) -> Result<()> {
3370 let server = envelope
3371 .payload
3372 .server
3373 .ok_or_else(|| anyhow!("invalid server"))?;
3374 this.update(&mut cx, |this, cx| {
3375 this.language_server_statuses.insert(
3376 server.id as usize,
3377 LanguageServerStatus {
3378 name: server.name,
3379 pending_work: Default::default(),
3380 pending_diagnostic_updates: 0,
3381 },
3382 );
3383 cx.notify();
3384 });
3385 Ok(())
3386 }
3387
3388 async fn handle_update_language_server(
3389 this: ModelHandle<Self>,
3390 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3391 _: Arc<Client>,
3392 mut cx: AsyncAppContext,
3393 ) -> Result<()> {
3394 let language_server_id = envelope.payload.language_server_id as usize;
3395 match envelope
3396 .payload
3397 .variant
3398 .ok_or_else(|| anyhow!("invalid variant"))?
3399 {
3400 proto::update_language_server::Variant::WorkStart(payload) => {
3401 this.update(&mut cx, |this, cx| {
3402 this.on_lsp_work_start(language_server_id, payload.token, cx);
3403 })
3404 }
3405 proto::update_language_server::Variant::WorkProgress(payload) => {
3406 this.update(&mut cx, |this, cx| {
3407 this.on_lsp_work_progress(
3408 language_server_id,
3409 payload.token,
3410 LanguageServerProgress {
3411 message: payload.message,
3412 percentage: payload.percentage.map(|p| p as usize),
3413 last_update_at: Instant::now(),
3414 },
3415 cx,
3416 );
3417 })
3418 }
3419 proto::update_language_server::Variant::WorkEnd(payload) => {
3420 this.update(&mut cx, |this, cx| {
3421 this.on_lsp_work_end(language_server_id, payload.token, cx);
3422 })
3423 }
3424 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3425 this.update(&mut cx, |this, cx| {
3426 this.disk_based_diagnostics_started(cx);
3427 })
3428 }
3429 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3430 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3431 }
3432 }
3433
3434 Ok(())
3435 }
3436
3437 async fn handle_update_buffer(
3438 this: ModelHandle<Self>,
3439 envelope: TypedEnvelope<proto::UpdateBuffer>,
3440 _: Arc<Client>,
3441 mut cx: AsyncAppContext,
3442 ) -> Result<()> {
3443 this.update(&mut cx, |this, cx| {
3444 let payload = envelope.payload.clone();
3445 let buffer_id = payload.buffer_id;
3446 let ops = payload
3447 .operations
3448 .into_iter()
3449 .map(|op| language::proto::deserialize_operation(op))
3450 .collect::<Result<Vec<_>, _>>()?;
3451 match this.opened_buffers.entry(buffer_id) {
3452 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3453 OpenBuffer::Strong(buffer) => {
3454 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3455 }
3456 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3457 OpenBuffer::Weak(_) => {}
3458 },
3459 hash_map::Entry::Vacant(e) => {
3460 e.insert(OpenBuffer::Loading(ops));
3461 }
3462 }
3463 Ok(())
3464 })
3465 }
3466
3467 async fn handle_update_buffer_file(
3468 this: ModelHandle<Self>,
3469 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3470 _: Arc<Client>,
3471 mut cx: AsyncAppContext,
3472 ) -> Result<()> {
3473 this.update(&mut cx, |this, cx| {
3474 let payload = envelope.payload.clone();
3475 let buffer_id = payload.buffer_id;
3476 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3477 let worktree = this
3478 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3479 .ok_or_else(|| anyhow!("no such worktree"))?;
3480 let file = File::from_proto(file, worktree.clone(), cx)?;
3481 let buffer = this
3482 .opened_buffers
3483 .get_mut(&buffer_id)
3484 .and_then(|b| b.upgrade(cx))
3485 .ok_or_else(|| anyhow!("no such buffer"))?;
3486 buffer.update(cx, |buffer, cx| {
3487 buffer.file_updated(Box::new(file), cx).detach();
3488 });
3489 Ok(())
3490 })
3491 }
3492
3493 async fn handle_save_buffer(
3494 this: ModelHandle<Self>,
3495 envelope: TypedEnvelope<proto::SaveBuffer>,
3496 _: Arc<Client>,
3497 mut cx: AsyncAppContext,
3498 ) -> Result<proto::BufferSaved> {
3499 let buffer_id = envelope.payload.buffer_id;
3500 let requested_version = deserialize_version(envelope.payload.version);
3501
3502 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3503 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3504 let buffer = this
3505 .opened_buffers
3506 .get(&buffer_id)
3507 .map(|buffer| buffer.upgrade(cx).unwrap())
3508 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3509 Ok::<_, anyhow::Error>((project_id, buffer))
3510 })?;
3511 buffer
3512 .update(&mut cx, |buffer, _| {
3513 buffer.wait_for_version(requested_version)
3514 })
3515 .await;
3516
3517 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3518 Ok(proto::BufferSaved {
3519 project_id,
3520 buffer_id,
3521 version: serialize_version(&saved_version),
3522 mtime: Some(mtime.into()),
3523 })
3524 }
3525
3526 async fn handle_format_buffers(
3527 this: ModelHandle<Self>,
3528 envelope: TypedEnvelope<proto::FormatBuffers>,
3529 _: Arc<Client>,
3530 mut cx: AsyncAppContext,
3531 ) -> Result<proto::FormatBuffersResponse> {
3532 let sender_id = envelope.original_sender_id()?;
3533 let format = this.update(&mut cx, |this, cx| {
3534 let mut buffers = HashSet::default();
3535 for buffer_id in &envelope.payload.buffer_ids {
3536 buffers.insert(
3537 this.opened_buffers
3538 .get(buffer_id)
3539 .map(|buffer| buffer.upgrade(cx).unwrap())
3540 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3541 );
3542 }
3543 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3544 })?;
3545
3546 let project_transaction = format.await?;
3547 let project_transaction = this.update(&mut cx, |this, cx| {
3548 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3549 });
3550 Ok(proto::FormatBuffersResponse {
3551 transaction: Some(project_transaction),
3552 })
3553 }
3554
3555 async fn handle_get_completions(
3556 this: ModelHandle<Self>,
3557 envelope: TypedEnvelope<proto::GetCompletions>,
3558 _: Arc<Client>,
3559 mut cx: AsyncAppContext,
3560 ) -> Result<proto::GetCompletionsResponse> {
3561 let position = envelope
3562 .payload
3563 .position
3564 .and_then(language::proto::deserialize_anchor)
3565 .ok_or_else(|| anyhow!("invalid position"))?;
3566 let version = deserialize_version(envelope.payload.version);
3567 let buffer = this.read_with(&cx, |this, cx| {
3568 this.opened_buffers
3569 .get(&envelope.payload.buffer_id)
3570 .map(|buffer| buffer.upgrade(cx).unwrap())
3571 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3572 })?;
3573 buffer
3574 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3575 .await;
3576 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3577 let completions = this
3578 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3579 .await?;
3580
3581 Ok(proto::GetCompletionsResponse {
3582 completions: completions
3583 .iter()
3584 .map(language::proto::serialize_completion)
3585 .collect(),
3586 version: serialize_version(&version),
3587 })
3588 }
3589
3590 async fn handle_apply_additional_edits_for_completion(
3591 this: ModelHandle<Self>,
3592 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3593 _: Arc<Client>,
3594 mut cx: AsyncAppContext,
3595 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3596 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3597 let buffer = this
3598 .opened_buffers
3599 .get(&envelope.payload.buffer_id)
3600 .map(|buffer| buffer.upgrade(cx).unwrap())
3601 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3602 let language = buffer.read(cx).language();
3603 let completion = language::proto::deserialize_completion(
3604 envelope
3605 .payload
3606 .completion
3607 .ok_or_else(|| anyhow!("invalid completion"))?,
3608 language,
3609 )?;
3610 Ok::<_, anyhow::Error>(
3611 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3612 )
3613 })?;
3614
3615 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3616 transaction: apply_additional_edits
3617 .await?
3618 .as_ref()
3619 .map(language::proto::serialize_transaction),
3620 })
3621 }
3622
3623 async fn handle_get_code_actions(
3624 this: ModelHandle<Self>,
3625 envelope: TypedEnvelope<proto::GetCodeActions>,
3626 _: Arc<Client>,
3627 mut cx: AsyncAppContext,
3628 ) -> Result<proto::GetCodeActionsResponse> {
3629 let start = envelope
3630 .payload
3631 .start
3632 .and_then(language::proto::deserialize_anchor)
3633 .ok_or_else(|| anyhow!("invalid start"))?;
3634 let end = envelope
3635 .payload
3636 .end
3637 .and_then(language::proto::deserialize_anchor)
3638 .ok_or_else(|| anyhow!("invalid end"))?;
3639 let buffer = this.update(&mut cx, |this, cx| {
3640 this.opened_buffers
3641 .get(&envelope.payload.buffer_id)
3642 .map(|buffer| buffer.upgrade(cx).unwrap())
3643 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3644 })?;
3645 buffer
3646 .update(&mut cx, |buffer, _| {
3647 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3648 })
3649 .await;
3650
3651 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3652 let code_actions = this.update(&mut cx, |this, cx| {
3653 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3654 })?;
3655
3656 Ok(proto::GetCodeActionsResponse {
3657 actions: code_actions
3658 .await?
3659 .iter()
3660 .map(language::proto::serialize_code_action)
3661 .collect(),
3662 version: serialize_version(&version),
3663 })
3664 }
3665
3666 async fn handle_apply_code_action(
3667 this: ModelHandle<Self>,
3668 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3669 _: Arc<Client>,
3670 mut cx: AsyncAppContext,
3671 ) -> Result<proto::ApplyCodeActionResponse> {
3672 let sender_id = envelope.original_sender_id()?;
3673 let action = language::proto::deserialize_code_action(
3674 envelope
3675 .payload
3676 .action
3677 .ok_or_else(|| anyhow!("invalid action"))?,
3678 )?;
3679 let apply_code_action = this.update(&mut cx, |this, cx| {
3680 let buffer = this
3681 .opened_buffers
3682 .get(&envelope.payload.buffer_id)
3683 .map(|buffer| buffer.upgrade(cx).unwrap())
3684 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3685 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3686 })?;
3687
3688 let project_transaction = apply_code_action.await?;
3689 let project_transaction = this.update(&mut cx, |this, cx| {
3690 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3691 });
3692 Ok(proto::ApplyCodeActionResponse {
3693 transaction: Some(project_transaction),
3694 })
3695 }
3696
3697 async fn handle_lsp_command<T: LspCommand>(
3698 this: ModelHandle<Self>,
3699 envelope: TypedEnvelope<T::ProtoRequest>,
3700 _: Arc<Client>,
3701 mut cx: AsyncAppContext,
3702 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3703 where
3704 <T::LspRequest as lsp::request::Request>::Result: Send,
3705 {
3706 let sender_id = envelope.original_sender_id()?;
3707 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3708 let buffer_handle = this.read_with(&cx, |this, _| {
3709 this.opened_buffers
3710 .get(&buffer_id)
3711 .and_then(|buffer| buffer.upgrade(&cx))
3712 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3713 })?;
3714 let request = T::from_proto(
3715 envelope.payload,
3716 this.clone(),
3717 buffer_handle.clone(),
3718 cx.clone(),
3719 )
3720 .await?;
3721 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3722 let response = this
3723 .update(&mut cx, |this, cx| {
3724 this.request_lsp(buffer_handle, request, cx)
3725 })
3726 .await?;
3727 this.update(&mut cx, |this, cx| {
3728 Ok(T::response_to_proto(
3729 response,
3730 this,
3731 sender_id,
3732 &buffer_version,
3733 cx,
3734 ))
3735 })
3736 }
3737
3738 async fn handle_get_project_symbols(
3739 this: ModelHandle<Self>,
3740 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3741 _: Arc<Client>,
3742 mut cx: AsyncAppContext,
3743 ) -> Result<proto::GetProjectSymbolsResponse> {
3744 let symbols = this
3745 .update(&mut cx, |this, cx| {
3746 this.symbols(&envelope.payload.query, cx)
3747 })
3748 .await?;
3749
3750 Ok(proto::GetProjectSymbolsResponse {
3751 symbols: symbols.iter().map(serialize_symbol).collect(),
3752 })
3753 }
3754
3755 async fn handle_search_project(
3756 this: ModelHandle<Self>,
3757 envelope: TypedEnvelope<proto::SearchProject>,
3758 _: Arc<Client>,
3759 mut cx: AsyncAppContext,
3760 ) -> Result<proto::SearchProjectResponse> {
3761 let peer_id = envelope.original_sender_id()?;
3762 let query = SearchQuery::from_proto(envelope.payload)?;
3763 let result = this
3764 .update(&mut cx, |this, cx| this.search(query, cx))
3765 .await?;
3766
3767 this.update(&mut cx, |this, cx| {
3768 let mut locations = Vec::new();
3769 for (buffer, ranges) in result {
3770 for range in ranges {
3771 let start = serialize_anchor(&range.start);
3772 let end = serialize_anchor(&range.end);
3773 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3774 locations.push(proto::Location {
3775 buffer: Some(buffer),
3776 start: Some(start),
3777 end: Some(end),
3778 });
3779 }
3780 }
3781 Ok(proto::SearchProjectResponse { locations })
3782 })
3783 }
3784
3785 async fn handle_open_buffer_for_symbol(
3786 this: ModelHandle<Self>,
3787 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3788 _: Arc<Client>,
3789 mut cx: AsyncAppContext,
3790 ) -> Result<proto::OpenBufferForSymbolResponse> {
3791 let peer_id = envelope.original_sender_id()?;
3792 let symbol = envelope
3793 .payload
3794 .symbol
3795 .ok_or_else(|| anyhow!("invalid symbol"))?;
3796 let symbol = this.read_with(&cx, |this, _| {
3797 let symbol = this.deserialize_symbol(symbol)?;
3798 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3799 if signature == symbol.signature {
3800 Ok(symbol)
3801 } else {
3802 Err(anyhow!("invalid symbol signature"))
3803 }
3804 })?;
3805 let buffer = this
3806 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3807 .await?;
3808
3809 Ok(proto::OpenBufferForSymbolResponse {
3810 buffer: Some(this.update(&mut cx, |this, cx| {
3811 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3812 })),
3813 })
3814 }
3815
3816 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3817 let mut hasher = Sha256::new();
3818 hasher.update(worktree_id.to_proto().to_be_bytes());
3819 hasher.update(path.to_string_lossy().as_bytes());
3820 hasher.update(self.nonce.to_be_bytes());
3821 hasher.finalize().as_slice().try_into().unwrap()
3822 }
3823
3824 async fn handle_open_buffer(
3825 this: ModelHandle<Self>,
3826 envelope: TypedEnvelope<proto::OpenBuffer>,
3827 _: Arc<Client>,
3828 mut cx: AsyncAppContext,
3829 ) -> Result<proto::OpenBufferResponse> {
3830 let peer_id = envelope.original_sender_id()?;
3831 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3832 let open_buffer = this.update(&mut cx, |this, cx| {
3833 this.open_buffer(
3834 ProjectPath {
3835 worktree_id,
3836 path: PathBuf::from(envelope.payload.path).into(),
3837 },
3838 cx,
3839 )
3840 });
3841
3842 let buffer = open_buffer.await?;
3843 this.update(&mut cx, |this, cx| {
3844 Ok(proto::OpenBufferResponse {
3845 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3846 })
3847 })
3848 }
3849
3850 fn serialize_project_transaction_for_peer(
3851 &mut self,
3852 project_transaction: ProjectTransaction,
3853 peer_id: PeerId,
3854 cx: &AppContext,
3855 ) -> proto::ProjectTransaction {
3856 let mut serialized_transaction = proto::ProjectTransaction {
3857 buffers: Default::default(),
3858 transactions: Default::default(),
3859 };
3860 for (buffer, transaction) in project_transaction.0 {
3861 serialized_transaction
3862 .buffers
3863 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3864 serialized_transaction
3865 .transactions
3866 .push(language::proto::serialize_transaction(&transaction));
3867 }
3868 serialized_transaction
3869 }
3870
3871 fn deserialize_project_transaction(
3872 &mut self,
3873 message: proto::ProjectTransaction,
3874 push_to_history: bool,
3875 cx: &mut ModelContext<Self>,
3876 ) -> Task<Result<ProjectTransaction>> {
3877 cx.spawn(|this, mut cx| async move {
3878 let mut project_transaction = ProjectTransaction::default();
3879 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3880 let buffer = this
3881 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3882 .await?;
3883 let transaction = language::proto::deserialize_transaction(transaction)?;
3884 project_transaction.0.insert(buffer, transaction);
3885 }
3886
3887 for (buffer, transaction) in &project_transaction.0 {
3888 buffer
3889 .update(&mut cx, |buffer, _| {
3890 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3891 })
3892 .await;
3893
3894 if push_to_history {
3895 buffer.update(&mut cx, |buffer, _| {
3896 buffer.push_transaction(transaction.clone(), Instant::now());
3897 });
3898 }
3899 }
3900
3901 Ok(project_transaction)
3902 })
3903 }
3904
3905 fn serialize_buffer_for_peer(
3906 &mut self,
3907 buffer: &ModelHandle<Buffer>,
3908 peer_id: PeerId,
3909 cx: &AppContext,
3910 ) -> proto::Buffer {
3911 let buffer_id = buffer.read(cx).remote_id();
3912 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3913 if shared_buffers.insert(buffer_id) {
3914 proto::Buffer {
3915 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3916 }
3917 } else {
3918 proto::Buffer {
3919 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3920 }
3921 }
3922 }
3923
3924 fn deserialize_buffer(
3925 &mut self,
3926 buffer: proto::Buffer,
3927 cx: &mut ModelContext<Self>,
3928 ) -> Task<Result<ModelHandle<Buffer>>> {
3929 let replica_id = self.replica_id();
3930
3931 let opened_buffer_tx = self.opened_buffer.0.clone();
3932 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3933 cx.spawn(|this, mut cx| async move {
3934 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3935 proto::buffer::Variant::Id(id) => {
3936 let buffer = loop {
3937 let buffer = this.read_with(&cx, |this, cx| {
3938 this.opened_buffers
3939 .get(&id)
3940 .and_then(|buffer| buffer.upgrade(cx))
3941 });
3942 if let Some(buffer) = buffer {
3943 break buffer;
3944 }
3945 opened_buffer_rx
3946 .next()
3947 .await
3948 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3949 };
3950 Ok(buffer)
3951 }
3952 proto::buffer::Variant::State(mut buffer) => {
3953 let mut buffer_worktree = None;
3954 let mut buffer_file = None;
3955 if let Some(file) = buffer.file.take() {
3956 this.read_with(&cx, |this, cx| {
3957 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3958 let worktree =
3959 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3960 anyhow!("no worktree found for id {}", file.worktree_id)
3961 })?;
3962 buffer_file =
3963 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3964 as Box<dyn language::File>);
3965 buffer_worktree = Some(worktree);
3966 Ok::<_, anyhow::Error>(())
3967 })?;
3968 }
3969
3970 let buffer = cx.add_model(|cx| {
3971 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3972 });
3973
3974 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
3975
3976 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3977 Ok(buffer)
3978 }
3979 }
3980 })
3981 }
3982
3983 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3984 let language = self
3985 .languages
3986 .get_language(&serialized_symbol.language_name);
3987 let start = serialized_symbol
3988 .start
3989 .ok_or_else(|| anyhow!("invalid start"))?;
3990 let end = serialized_symbol
3991 .end
3992 .ok_or_else(|| anyhow!("invalid end"))?;
3993 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3994 Ok(Symbol {
3995 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3996 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3997 language_name: serialized_symbol.language_name.clone(),
3998 label: language
3999 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4000 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4001 name: serialized_symbol.name,
4002 path: PathBuf::from(serialized_symbol.path),
4003 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4004 kind,
4005 signature: serialized_symbol
4006 .signature
4007 .try_into()
4008 .map_err(|_| anyhow!("invalid signature"))?,
4009 })
4010 }
4011
4012 async fn handle_buffer_saved(
4013 this: ModelHandle<Self>,
4014 envelope: TypedEnvelope<proto::BufferSaved>,
4015 _: Arc<Client>,
4016 mut cx: AsyncAppContext,
4017 ) -> Result<()> {
4018 let version = deserialize_version(envelope.payload.version);
4019 let mtime = envelope
4020 .payload
4021 .mtime
4022 .ok_or_else(|| anyhow!("missing mtime"))?
4023 .into();
4024
4025 this.update(&mut cx, |this, cx| {
4026 let buffer = this
4027 .opened_buffers
4028 .get(&envelope.payload.buffer_id)
4029 .and_then(|buffer| buffer.upgrade(cx));
4030 if let Some(buffer) = buffer {
4031 buffer.update(cx, |buffer, cx| {
4032 buffer.did_save(version, mtime, None, cx);
4033 });
4034 }
4035 Ok(())
4036 })
4037 }
4038
4039 async fn handle_buffer_reloaded(
4040 this: ModelHandle<Self>,
4041 envelope: TypedEnvelope<proto::BufferReloaded>,
4042 _: Arc<Client>,
4043 mut cx: AsyncAppContext,
4044 ) -> Result<()> {
4045 let payload = envelope.payload.clone();
4046 let version = deserialize_version(payload.version);
4047 let mtime = payload
4048 .mtime
4049 .ok_or_else(|| anyhow!("missing mtime"))?
4050 .into();
4051 this.update(&mut cx, |this, cx| {
4052 let buffer = this
4053 .opened_buffers
4054 .get(&payload.buffer_id)
4055 .and_then(|buffer| buffer.upgrade(cx));
4056 if let Some(buffer) = buffer {
4057 buffer.update(cx, |buffer, cx| {
4058 buffer.did_reload(version, mtime, cx);
4059 });
4060 }
4061 Ok(())
4062 })
4063 }
4064
4065 pub fn match_paths<'a>(
4066 &self,
4067 query: &'a str,
4068 include_ignored: bool,
4069 smart_case: bool,
4070 max_results: usize,
4071 cancel_flag: &'a AtomicBool,
4072 cx: &AppContext,
4073 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4074 let worktrees = self
4075 .worktrees(cx)
4076 .filter(|worktree| worktree.read(cx).is_visible())
4077 .collect::<Vec<_>>();
4078 let include_root_name = worktrees.len() > 1;
4079 let candidate_sets = worktrees
4080 .into_iter()
4081 .map(|worktree| CandidateSet {
4082 snapshot: worktree.read(cx).snapshot(),
4083 include_ignored,
4084 include_root_name,
4085 })
4086 .collect::<Vec<_>>();
4087
4088 let background = cx.background().clone();
4089 async move {
4090 fuzzy::match_paths(
4091 candidate_sets.as_slice(),
4092 query,
4093 smart_case,
4094 max_results,
4095 cancel_flag,
4096 background,
4097 )
4098 .await
4099 }
4100 }
4101
4102 fn edits_from_lsp(
4103 &mut self,
4104 buffer: &ModelHandle<Buffer>,
4105 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4106 version: Option<i32>,
4107 cx: &mut ModelContext<Self>,
4108 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4109 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4110 cx.background().spawn(async move {
4111 let snapshot = snapshot?;
4112 let mut lsp_edits = lsp_edits
4113 .into_iter()
4114 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4115 .peekable();
4116
4117 let mut edits = Vec::new();
4118 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4119 // Combine any LSP edits that are adjacent.
4120 //
4121 // Also, combine LSP edits that are separated from each other by only
4122 // a newline. This is important because for some code actions,
4123 // Rust-analyzer rewrites the entire buffer via a series of edits that
4124 // are separated by unchanged newline characters.
4125 //
4126 // In order for the diffing logic below to work properly, any edits that
4127 // cancel each other out must be combined into one.
4128 while let Some((next_range, next_text)) = lsp_edits.peek() {
4129 if next_range.start > range.end {
4130 if next_range.start.row > range.end.row + 1
4131 || next_range.start.column > 0
4132 || snapshot.clip_point_utf16(
4133 PointUtf16::new(range.end.row, u32::MAX),
4134 Bias::Left,
4135 ) > range.end
4136 {
4137 break;
4138 }
4139 new_text.push('\n');
4140 }
4141 range.end = next_range.end;
4142 new_text.push_str(&next_text);
4143 lsp_edits.next();
4144 }
4145
4146 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4147 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4148 {
4149 return Err(anyhow!("invalid edits received from language server"));
4150 }
4151
4152 // For multiline edits, perform a diff of the old and new text so that
4153 // we can identify the changes more precisely, preserving the locations
4154 // of any anchors positioned in the unchanged regions.
4155 if range.end.row > range.start.row {
4156 let mut offset = range.start.to_offset(&snapshot);
4157 let old_text = snapshot.text_for_range(range).collect::<String>();
4158
4159 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4160 let mut moved_since_edit = true;
4161 for change in diff.iter_all_changes() {
4162 let tag = change.tag();
4163 let value = change.value();
4164 match tag {
4165 ChangeTag::Equal => {
4166 offset += value.len();
4167 moved_since_edit = true;
4168 }
4169 ChangeTag::Delete => {
4170 let start = snapshot.anchor_after(offset);
4171 let end = snapshot.anchor_before(offset + value.len());
4172 if moved_since_edit {
4173 edits.push((start..end, String::new()));
4174 } else {
4175 edits.last_mut().unwrap().0.end = end;
4176 }
4177 offset += value.len();
4178 moved_since_edit = false;
4179 }
4180 ChangeTag::Insert => {
4181 if moved_since_edit {
4182 let anchor = snapshot.anchor_after(offset);
4183 edits.push((anchor.clone()..anchor, value.to_string()));
4184 } else {
4185 edits.last_mut().unwrap().1.push_str(value);
4186 }
4187 moved_since_edit = false;
4188 }
4189 }
4190 }
4191 } else if range.end == range.start {
4192 let anchor = snapshot.anchor_after(range.start);
4193 edits.push((anchor.clone()..anchor, new_text));
4194 } else {
4195 let edit_start = snapshot.anchor_after(range.start);
4196 let edit_end = snapshot.anchor_before(range.end);
4197 edits.push((edit_start..edit_end, new_text));
4198 }
4199 }
4200
4201 Ok(edits)
4202 })
4203 }
4204
4205 fn buffer_snapshot_for_lsp_version(
4206 &mut self,
4207 buffer: &ModelHandle<Buffer>,
4208 version: Option<i32>,
4209 cx: &AppContext,
4210 ) -> Result<TextBufferSnapshot> {
4211 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4212
4213 if let Some(version) = version {
4214 let buffer_id = buffer.read(cx).remote_id();
4215 let snapshots = self
4216 .buffer_snapshots
4217 .get_mut(&buffer_id)
4218 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4219 let mut found_snapshot = None;
4220 snapshots.retain(|(snapshot_version, snapshot)| {
4221 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4222 false
4223 } else {
4224 if *snapshot_version == version {
4225 found_snapshot = Some(snapshot.clone());
4226 }
4227 true
4228 }
4229 });
4230
4231 found_snapshot.ok_or_else(|| {
4232 anyhow!(
4233 "snapshot not found for buffer {} at version {}",
4234 buffer_id,
4235 version
4236 )
4237 })
4238 } else {
4239 Ok((buffer.read(cx)).text_snapshot())
4240 }
4241 }
4242
4243 fn language_server_for_buffer(
4244 &self,
4245 buffer: &Buffer,
4246 cx: &AppContext,
4247 ) -> Option<&Arc<LanguageServer>> {
4248 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4249 let worktree_id = file.worktree_id(cx);
4250 self.language_servers.get(&(worktree_id, language.name()))
4251 } else {
4252 None
4253 }
4254 }
4255}
4256
4257impl WorktreeHandle {
4258 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4259 match self {
4260 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4261 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4262 }
4263 }
4264}
4265
4266impl OpenBuffer {
4267 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4268 match self {
4269 OpenBuffer::Strong(handle) => Some(handle.clone()),
4270 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4271 OpenBuffer::Loading(_) => None,
4272 }
4273 }
4274}
4275
4276struct CandidateSet {
4277 snapshot: Snapshot,
4278 include_ignored: bool,
4279 include_root_name: bool,
4280}
4281
4282impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4283 type Candidates = CandidateSetIter<'a>;
4284
4285 fn id(&self) -> usize {
4286 self.snapshot.id().to_usize()
4287 }
4288
4289 fn len(&self) -> usize {
4290 if self.include_ignored {
4291 self.snapshot.file_count()
4292 } else {
4293 self.snapshot.visible_file_count()
4294 }
4295 }
4296
4297 fn prefix(&self) -> Arc<str> {
4298 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4299 self.snapshot.root_name().into()
4300 } else if self.include_root_name {
4301 format!("{}/", self.snapshot.root_name()).into()
4302 } else {
4303 "".into()
4304 }
4305 }
4306
4307 fn candidates(&'a self, start: usize) -> Self::Candidates {
4308 CandidateSetIter {
4309 traversal: self.snapshot.files(self.include_ignored, start),
4310 }
4311 }
4312}
4313
4314struct CandidateSetIter<'a> {
4315 traversal: Traversal<'a>,
4316}
4317
4318impl<'a> Iterator for CandidateSetIter<'a> {
4319 type Item = PathMatchCandidate<'a>;
4320
4321 fn next(&mut self) -> Option<Self::Item> {
4322 self.traversal.next().map(|entry| {
4323 if let EntryKind::File(char_bag) = entry.kind {
4324 PathMatchCandidate {
4325 path: &entry.path,
4326 char_bag,
4327 }
4328 } else {
4329 unreachable!()
4330 }
4331 })
4332 }
4333}
4334
4335impl Entity for Project {
4336 type Event = Event;
4337
4338 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4339 match &self.client_state {
4340 ProjectClientState::Local { remote_id_rx, .. } => {
4341 if let Some(project_id) = *remote_id_rx.borrow() {
4342 self.client
4343 .send(proto::UnregisterProject { project_id })
4344 .log_err();
4345 }
4346 }
4347 ProjectClientState::Remote { remote_id, .. } => {
4348 self.client
4349 .send(proto::LeaveProject {
4350 project_id: *remote_id,
4351 })
4352 .log_err();
4353 }
4354 }
4355 }
4356
4357 fn app_will_quit(
4358 &mut self,
4359 _: &mut MutableAppContext,
4360 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4361 let shutdown_futures = self
4362 .language_servers
4363 .drain()
4364 .filter_map(|(_, server)| server.shutdown())
4365 .collect::<Vec<_>>();
4366 Some(
4367 async move {
4368 futures::future::join_all(shutdown_futures).await;
4369 }
4370 .boxed(),
4371 )
4372 }
4373}
4374
4375impl Collaborator {
4376 fn from_proto(
4377 message: proto::Collaborator,
4378 user_store: &ModelHandle<UserStore>,
4379 cx: &mut AsyncAppContext,
4380 ) -> impl Future<Output = Result<Self>> {
4381 let user = user_store.update(cx, |user_store, cx| {
4382 user_store.fetch_user(message.user_id, cx)
4383 });
4384
4385 async move {
4386 Ok(Self {
4387 peer_id: PeerId(message.peer_id),
4388 user: user.await?,
4389 replica_id: message.replica_id as ReplicaId,
4390 })
4391 }
4392 }
4393}
4394
4395impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4396 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4397 Self {
4398 worktree_id,
4399 path: path.as_ref().into(),
4400 }
4401 }
4402}
4403
4404impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4405 fn from(options: lsp::CreateFileOptions) -> Self {
4406 Self {
4407 overwrite: options.overwrite.unwrap_or(false),
4408 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4409 }
4410 }
4411}
4412
4413impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4414 fn from(options: lsp::RenameFileOptions) -> Self {
4415 Self {
4416 overwrite: options.overwrite.unwrap_or(false),
4417 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4418 }
4419 }
4420}
4421
4422impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4423 fn from(options: lsp::DeleteFileOptions) -> Self {
4424 Self {
4425 recursive: options.recursive.unwrap_or(false),
4426 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4427 }
4428 }
4429}
4430
4431fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4432 proto::Symbol {
4433 source_worktree_id: symbol.source_worktree_id.to_proto(),
4434 worktree_id: symbol.worktree_id.to_proto(),
4435 language_name: symbol.language_name.clone(),
4436 name: symbol.name.clone(),
4437 kind: unsafe { mem::transmute(symbol.kind) },
4438 path: symbol.path.to_string_lossy().to_string(),
4439 start: Some(proto::Point {
4440 row: symbol.range.start.row,
4441 column: symbol.range.start.column,
4442 }),
4443 end: Some(proto::Point {
4444 row: symbol.range.end.row,
4445 column: symbol.range.end.column,
4446 }),
4447 signature: symbol.signature.to_vec(),
4448 }
4449}
4450
4451fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4452 let mut path_components = path.components();
4453 let mut base_components = base.components();
4454 let mut components: Vec<Component> = Vec::new();
4455 loop {
4456 match (path_components.next(), base_components.next()) {
4457 (None, None) => break,
4458 (Some(a), None) => {
4459 components.push(a);
4460 components.extend(path_components.by_ref());
4461 break;
4462 }
4463 (None, _) => components.push(Component::ParentDir),
4464 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4465 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4466 (Some(a), Some(_)) => {
4467 components.push(Component::ParentDir);
4468 for _ in base_components {
4469 components.push(Component::ParentDir);
4470 }
4471 components.push(a);
4472 components.extend(path_components.by_ref());
4473 break;
4474 }
4475 }
4476 }
4477 components.iter().map(|c| c.as_os_str()).collect()
4478}
4479
4480#[cfg(test)]
4481mod tests {
4482 use super::{Event, *};
4483 use fs::RealFs;
4484 use futures::StreamExt;
4485 use gpui::test::subscribe;
4486 use language::{
4487 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4488 ToPoint,
4489 };
4490 use lsp::Url;
4491 use serde_json::json;
4492 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4493 use unindent::Unindent as _;
4494 use util::test::temp_tree;
4495 use worktree::WorktreeHandle as _;
4496
4497 #[gpui::test]
4498 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4499 let dir = temp_tree(json!({
4500 "root": {
4501 "apple": "",
4502 "banana": {
4503 "carrot": {
4504 "date": "",
4505 "endive": "",
4506 }
4507 },
4508 "fennel": {
4509 "grape": "",
4510 }
4511 }
4512 }));
4513
4514 let root_link_path = dir.path().join("root_link");
4515 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4516 unix::fs::symlink(
4517 &dir.path().join("root/fennel"),
4518 &dir.path().join("root/finnochio"),
4519 )
4520 .unwrap();
4521
4522 let project = Project::test(Arc::new(RealFs), cx);
4523
4524 let (tree, _) = project
4525 .update(cx, |project, cx| {
4526 project.find_or_create_local_worktree(&root_link_path, true, cx)
4527 })
4528 .await
4529 .unwrap();
4530
4531 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4532 .await;
4533 cx.read(|cx| {
4534 let tree = tree.read(cx);
4535 assert_eq!(tree.file_count(), 5);
4536 assert_eq!(
4537 tree.inode_for_path("fennel/grape"),
4538 tree.inode_for_path("finnochio/grape")
4539 );
4540 });
4541
4542 let cancel_flag = Default::default();
4543 let results = project
4544 .read_with(cx, |project, cx| {
4545 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4546 })
4547 .await;
4548 assert_eq!(
4549 results
4550 .into_iter()
4551 .map(|result| result.path)
4552 .collect::<Vec<Arc<Path>>>(),
4553 vec![
4554 PathBuf::from("banana/carrot/date").into(),
4555 PathBuf::from("banana/carrot/endive").into(),
4556 ]
4557 );
4558 }
4559
4560 #[gpui::test]
4561 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4562 cx.foreground().forbid_parking();
4563
4564 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4565 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4566 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4567 completion_provider: Some(lsp::CompletionOptions {
4568 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4569 ..Default::default()
4570 }),
4571 ..Default::default()
4572 });
4573 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4574 completion_provider: Some(lsp::CompletionOptions {
4575 trigger_characters: Some(vec![":".to_string()]),
4576 ..Default::default()
4577 }),
4578 ..Default::default()
4579 });
4580
4581 let rust_language = Arc::new(Language::new(
4582 LanguageConfig {
4583 name: "Rust".into(),
4584 path_suffixes: vec!["rs".to_string()],
4585 language_server: Some(rust_lsp_config),
4586 ..Default::default()
4587 },
4588 Some(tree_sitter_rust::language()),
4589 ));
4590 let json_language = Arc::new(Language::new(
4591 LanguageConfig {
4592 name: "JSON".into(),
4593 path_suffixes: vec!["json".to_string()],
4594 language_server: Some(json_lsp_config),
4595 ..Default::default()
4596 },
4597 None,
4598 ));
4599
4600 let fs = FakeFs::new(cx.background());
4601 fs.insert_tree(
4602 "/the-root",
4603 json!({
4604 "test.rs": "const A: i32 = 1;",
4605 "test2.rs": "",
4606 "Cargo.toml": "a = 1",
4607 "package.json": "{\"a\": 1}",
4608 }),
4609 )
4610 .await;
4611
4612 let project = Project::test(fs, cx);
4613 project.update(cx, |project, _| {
4614 project.languages.add(rust_language);
4615 project.languages.add(json_language);
4616 });
4617
4618 let worktree_id = project
4619 .update(cx, |project, cx| {
4620 project.find_or_create_local_worktree("/the-root", true, cx)
4621 })
4622 .await
4623 .unwrap()
4624 .0
4625 .read_with(cx, |tree, _| tree.id());
4626
4627 // Open a buffer without an associated language server.
4628 let toml_buffer = project
4629 .update(cx, |project, cx| {
4630 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4631 })
4632 .await
4633 .unwrap();
4634
4635 // Open a buffer with an associated language server.
4636 let rust_buffer = project
4637 .update(cx, |project, cx| {
4638 project.open_buffer((worktree_id, "test.rs"), cx)
4639 })
4640 .await
4641 .unwrap();
4642
4643 // A server is started up, and it is notified about Rust files.
4644 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4645 assert_eq!(
4646 fake_rust_server
4647 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4648 .await
4649 .text_document,
4650 lsp::TextDocumentItem {
4651 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4652 version: 0,
4653 text: "const A: i32 = 1;".to_string(),
4654 language_id: Default::default()
4655 }
4656 );
4657
4658 // The buffer is configured based on the language server's capabilities.
4659 rust_buffer.read_with(cx, |buffer, _| {
4660 assert_eq!(
4661 buffer.completion_triggers(),
4662 &[".".to_string(), "::".to_string()]
4663 );
4664 });
4665 toml_buffer.read_with(cx, |buffer, _| {
4666 assert!(buffer.completion_triggers().is_empty());
4667 });
4668
4669 // Edit a buffer. The changes are reported to the language server.
4670 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4671 assert_eq!(
4672 fake_rust_server
4673 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4674 .await
4675 .text_document,
4676 lsp::VersionedTextDocumentIdentifier::new(
4677 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4678 1
4679 )
4680 );
4681
4682 // Open a third buffer with a different associated language server.
4683 let json_buffer = project
4684 .update(cx, |project, cx| {
4685 project.open_buffer((worktree_id, "package.json"), cx)
4686 })
4687 .await
4688 .unwrap();
4689
4690 // Another language server is started up, and it is notified about
4691 // all three open buffers.
4692 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4693 assert_eq!(
4694 fake_json_server
4695 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4696 .await
4697 .text_document,
4698 lsp::TextDocumentItem {
4699 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4700 version: 0,
4701 text: "{\"a\": 1}".to_string(),
4702 language_id: Default::default()
4703 }
4704 );
4705
4706 // This buffer is configured based on the second language server's
4707 // capabilities.
4708 json_buffer.read_with(cx, |buffer, _| {
4709 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4710 });
4711
4712 // When opening another buffer whose language server is already running,
4713 // it is also configured based on the existing language server's capabilities.
4714 let rust_buffer2 = project
4715 .update(cx, |project, cx| {
4716 project.open_buffer((worktree_id, "test2.rs"), cx)
4717 })
4718 .await
4719 .unwrap();
4720 rust_buffer2.read_with(cx, |buffer, _| {
4721 assert_eq!(
4722 buffer.completion_triggers(),
4723 &[".".to_string(), "::".to_string()]
4724 );
4725 });
4726
4727 // Changes are reported only to servers matching the buffer's language.
4728 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4729 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4730 assert_eq!(
4731 fake_rust_server
4732 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4733 .await
4734 .text_document,
4735 lsp::VersionedTextDocumentIdentifier::new(
4736 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4737 1
4738 )
4739 );
4740
4741 // Save notifications are reported to all servers.
4742 toml_buffer
4743 .update(cx, |buffer, cx| buffer.save(cx))
4744 .await
4745 .unwrap();
4746 assert_eq!(
4747 fake_rust_server
4748 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4749 .await
4750 .text_document,
4751 lsp::TextDocumentIdentifier::new(
4752 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4753 )
4754 );
4755 assert_eq!(
4756 fake_json_server
4757 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4758 .await
4759 .text_document,
4760 lsp::TextDocumentIdentifier::new(
4761 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4762 )
4763 );
4764
4765 // Close notifications are reported only to servers matching the buffer's language.
4766 cx.update(|_| drop(json_buffer));
4767 let close_message = lsp::DidCloseTextDocumentParams {
4768 text_document: lsp::TextDocumentIdentifier::new(
4769 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4770 ),
4771 };
4772 assert_eq!(
4773 fake_json_server
4774 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4775 .await,
4776 close_message,
4777 );
4778 }
4779
4780 #[gpui::test]
4781 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4782 cx.foreground().forbid_parking();
4783
4784 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4785 let progress_token = language_server_config
4786 .disk_based_diagnostics_progress_token
4787 .clone()
4788 .unwrap();
4789
4790 let language = Arc::new(Language::new(
4791 LanguageConfig {
4792 name: "Rust".into(),
4793 path_suffixes: vec!["rs".to_string()],
4794 language_server: Some(language_server_config),
4795 ..Default::default()
4796 },
4797 Some(tree_sitter_rust::language()),
4798 ));
4799
4800 let fs = FakeFs::new(cx.background());
4801 fs.insert_tree(
4802 "/dir",
4803 json!({
4804 "a.rs": "fn a() { A }",
4805 "b.rs": "const y: i32 = 1",
4806 }),
4807 )
4808 .await;
4809
4810 let project = Project::test(fs, cx);
4811 project.update(cx, |project, _| project.languages.add(language));
4812
4813 let (tree, _) = project
4814 .update(cx, |project, cx| {
4815 project.find_or_create_local_worktree("/dir", true, cx)
4816 })
4817 .await
4818 .unwrap();
4819 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4820
4821 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4822 .await;
4823
4824 // Cause worktree to start the fake language server
4825 let _buffer = project
4826 .update(cx, |project, cx| {
4827 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4828 })
4829 .await
4830 .unwrap();
4831
4832 let mut events = subscribe(&project, cx);
4833
4834 let mut fake_server = fake_servers.next().await.unwrap();
4835 fake_server.start_progress(&progress_token).await;
4836 assert_eq!(
4837 events.next().await.unwrap(),
4838 Event::DiskBasedDiagnosticsStarted
4839 );
4840
4841 fake_server.start_progress(&progress_token).await;
4842 fake_server.end_progress(&progress_token).await;
4843 fake_server.start_progress(&progress_token).await;
4844
4845 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4846 lsp::PublishDiagnosticsParams {
4847 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4848 version: None,
4849 diagnostics: vec![lsp::Diagnostic {
4850 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4851 severity: Some(lsp::DiagnosticSeverity::ERROR),
4852 message: "undefined variable 'A'".to_string(),
4853 ..Default::default()
4854 }],
4855 },
4856 );
4857 assert_eq!(
4858 events.next().await.unwrap(),
4859 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4860 );
4861
4862 fake_server.end_progress(&progress_token).await;
4863 fake_server.end_progress(&progress_token).await;
4864 assert_eq!(
4865 events.next().await.unwrap(),
4866 Event::DiskBasedDiagnosticsUpdated
4867 );
4868 assert_eq!(
4869 events.next().await.unwrap(),
4870 Event::DiskBasedDiagnosticsFinished
4871 );
4872
4873 let buffer = project
4874 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4875 .await
4876 .unwrap();
4877
4878 buffer.read_with(cx, |buffer, _| {
4879 let snapshot = buffer.snapshot();
4880 let diagnostics = snapshot
4881 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4882 .collect::<Vec<_>>();
4883 assert_eq!(
4884 diagnostics,
4885 &[DiagnosticEntry {
4886 range: Point::new(0, 9)..Point::new(0, 10),
4887 diagnostic: Diagnostic {
4888 severity: lsp::DiagnosticSeverity::ERROR,
4889 message: "undefined variable 'A'".to_string(),
4890 group_id: 0,
4891 is_primary: true,
4892 ..Default::default()
4893 }
4894 }]
4895 )
4896 });
4897 }
4898
4899 #[gpui::test]
4900 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4901 cx.foreground().forbid_parking();
4902
4903 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4904 lsp_config
4905 .disk_based_diagnostic_sources
4906 .insert("disk".to_string());
4907 let language = Arc::new(Language::new(
4908 LanguageConfig {
4909 name: "Rust".into(),
4910 path_suffixes: vec!["rs".to_string()],
4911 language_server: Some(lsp_config),
4912 ..Default::default()
4913 },
4914 Some(tree_sitter_rust::language()),
4915 ));
4916
4917 let text = "
4918 fn a() { A }
4919 fn b() { BB }
4920 fn c() { CCC }
4921 "
4922 .unindent();
4923
4924 let fs = FakeFs::new(cx.background());
4925 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4926
4927 let project = Project::test(fs, cx);
4928 project.update(cx, |project, _| project.languages.add(language));
4929
4930 let worktree_id = project
4931 .update(cx, |project, cx| {
4932 project.find_or_create_local_worktree("/dir", true, cx)
4933 })
4934 .await
4935 .unwrap()
4936 .0
4937 .read_with(cx, |tree, _| tree.id());
4938
4939 let buffer = project
4940 .update(cx, |project, cx| {
4941 project.open_buffer((worktree_id, "a.rs"), cx)
4942 })
4943 .await
4944 .unwrap();
4945
4946 let mut fake_server = fake_servers.next().await.unwrap();
4947 let open_notification = fake_server
4948 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4949 .await;
4950
4951 // Edit the buffer, moving the content down
4952 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
4953 let change_notification_1 = fake_server
4954 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4955 .await;
4956 assert!(
4957 change_notification_1.text_document.version > open_notification.text_document.version
4958 );
4959
4960 // Report some diagnostics for the initial version of the buffer
4961 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4962 lsp::PublishDiagnosticsParams {
4963 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4964 version: Some(open_notification.text_document.version),
4965 diagnostics: vec![
4966 lsp::Diagnostic {
4967 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4968 severity: Some(DiagnosticSeverity::ERROR),
4969 message: "undefined variable 'A'".to_string(),
4970 source: Some("disk".to_string()),
4971 ..Default::default()
4972 },
4973 lsp::Diagnostic {
4974 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
4975 severity: Some(DiagnosticSeverity::ERROR),
4976 message: "undefined variable 'BB'".to_string(),
4977 source: Some("disk".to_string()),
4978 ..Default::default()
4979 },
4980 lsp::Diagnostic {
4981 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
4982 severity: Some(DiagnosticSeverity::ERROR),
4983 source: Some("disk".to_string()),
4984 message: "undefined variable 'CCC'".to_string(),
4985 ..Default::default()
4986 },
4987 ],
4988 },
4989 );
4990
4991 // The diagnostics have moved down since they were created.
4992 buffer.next_notification(cx).await;
4993 buffer.read_with(cx, |buffer, _| {
4994 assert_eq!(
4995 buffer
4996 .snapshot()
4997 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
4998 .collect::<Vec<_>>(),
4999 &[
5000 DiagnosticEntry {
5001 range: Point::new(3, 9)..Point::new(3, 11),
5002 diagnostic: Diagnostic {
5003 severity: DiagnosticSeverity::ERROR,
5004 message: "undefined variable 'BB'".to_string(),
5005 is_disk_based: true,
5006 group_id: 1,
5007 is_primary: true,
5008 ..Default::default()
5009 },
5010 },
5011 DiagnosticEntry {
5012 range: Point::new(4, 9)..Point::new(4, 12),
5013 diagnostic: Diagnostic {
5014 severity: DiagnosticSeverity::ERROR,
5015 message: "undefined variable 'CCC'".to_string(),
5016 is_disk_based: true,
5017 group_id: 2,
5018 is_primary: true,
5019 ..Default::default()
5020 }
5021 }
5022 ]
5023 );
5024 assert_eq!(
5025 chunks_with_diagnostics(buffer, 0..buffer.len()),
5026 [
5027 ("\n\nfn a() { ".to_string(), None),
5028 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5029 (" }\nfn b() { ".to_string(), None),
5030 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5031 (" }\nfn c() { ".to_string(), None),
5032 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5033 (" }\n".to_string(), None),
5034 ]
5035 );
5036 assert_eq!(
5037 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5038 [
5039 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5040 (" }\nfn c() { ".to_string(), None),
5041 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5042 ]
5043 );
5044 });
5045
5046 // Ensure overlapping diagnostics are highlighted correctly.
5047 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5048 lsp::PublishDiagnosticsParams {
5049 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5050 version: Some(open_notification.text_document.version),
5051 diagnostics: vec![
5052 lsp::Diagnostic {
5053 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5054 severity: Some(DiagnosticSeverity::ERROR),
5055 message: "undefined variable 'A'".to_string(),
5056 source: Some("disk".to_string()),
5057 ..Default::default()
5058 },
5059 lsp::Diagnostic {
5060 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5061 severity: Some(DiagnosticSeverity::WARNING),
5062 message: "unreachable statement".to_string(),
5063 source: Some("disk".to_string()),
5064 ..Default::default()
5065 },
5066 ],
5067 },
5068 );
5069
5070 buffer.next_notification(cx).await;
5071 buffer.read_with(cx, |buffer, _| {
5072 assert_eq!(
5073 buffer
5074 .snapshot()
5075 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5076 .collect::<Vec<_>>(),
5077 &[
5078 DiagnosticEntry {
5079 range: Point::new(2, 9)..Point::new(2, 12),
5080 diagnostic: Diagnostic {
5081 severity: DiagnosticSeverity::WARNING,
5082 message: "unreachable statement".to_string(),
5083 is_disk_based: true,
5084 group_id: 1,
5085 is_primary: true,
5086 ..Default::default()
5087 }
5088 },
5089 DiagnosticEntry {
5090 range: Point::new(2, 9)..Point::new(2, 10),
5091 diagnostic: Diagnostic {
5092 severity: DiagnosticSeverity::ERROR,
5093 message: "undefined variable 'A'".to_string(),
5094 is_disk_based: true,
5095 group_id: 0,
5096 is_primary: true,
5097 ..Default::default()
5098 },
5099 }
5100 ]
5101 );
5102 assert_eq!(
5103 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5104 [
5105 ("fn a() { ".to_string(), None),
5106 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5107 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5108 ("\n".to_string(), None),
5109 ]
5110 );
5111 assert_eq!(
5112 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5113 [
5114 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5115 ("\n".to_string(), None),
5116 ]
5117 );
5118 });
5119
5120 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5121 // changes since the last save.
5122 buffer.update(cx, |buffer, cx| {
5123 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5124 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5125 });
5126 let change_notification_2 =
5127 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
5128 assert!(
5129 change_notification_2.await.text_document.version
5130 > change_notification_1.text_document.version
5131 );
5132
5133 // Handle out-of-order diagnostics
5134 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5135 lsp::PublishDiagnosticsParams {
5136 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5137 version: Some(open_notification.text_document.version),
5138 diagnostics: vec![
5139 lsp::Diagnostic {
5140 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5141 severity: Some(DiagnosticSeverity::ERROR),
5142 message: "undefined variable 'BB'".to_string(),
5143 source: Some("disk".to_string()),
5144 ..Default::default()
5145 },
5146 lsp::Diagnostic {
5147 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5148 severity: Some(DiagnosticSeverity::WARNING),
5149 message: "undefined variable 'A'".to_string(),
5150 source: Some("disk".to_string()),
5151 ..Default::default()
5152 },
5153 ],
5154 },
5155 );
5156
5157 buffer.next_notification(cx).await;
5158 buffer.read_with(cx, |buffer, _| {
5159 assert_eq!(
5160 buffer
5161 .snapshot()
5162 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5163 .collect::<Vec<_>>(),
5164 &[
5165 DiagnosticEntry {
5166 range: Point::new(2, 21)..Point::new(2, 22),
5167 diagnostic: Diagnostic {
5168 severity: DiagnosticSeverity::WARNING,
5169 message: "undefined variable 'A'".to_string(),
5170 is_disk_based: true,
5171 group_id: 1,
5172 is_primary: true,
5173 ..Default::default()
5174 }
5175 },
5176 DiagnosticEntry {
5177 range: Point::new(3, 9)..Point::new(3, 11),
5178 diagnostic: Diagnostic {
5179 severity: DiagnosticSeverity::ERROR,
5180 message: "undefined variable 'BB'".to_string(),
5181 is_disk_based: true,
5182 group_id: 0,
5183 is_primary: true,
5184 ..Default::default()
5185 },
5186 }
5187 ]
5188 );
5189 });
5190 }
5191
5192 #[gpui::test]
5193 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5194 cx.foreground().forbid_parking();
5195
5196 let text = concat!(
5197 "let one = ;\n", //
5198 "let two = \n",
5199 "let three = 3;\n",
5200 );
5201
5202 let fs = FakeFs::new(cx.background());
5203 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5204
5205 let project = Project::test(fs, cx);
5206 let worktree_id = project
5207 .update(cx, |project, cx| {
5208 project.find_or_create_local_worktree("/dir", true, cx)
5209 })
5210 .await
5211 .unwrap()
5212 .0
5213 .read_with(cx, |tree, _| tree.id());
5214
5215 let buffer = project
5216 .update(cx, |project, cx| {
5217 project.open_buffer((worktree_id, "a.rs"), cx)
5218 })
5219 .await
5220 .unwrap();
5221
5222 project.update(cx, |project, cx| {
5223 project
5224 .update_buffer_diagnostics(
5225 &buffer,
5226 vec![
5227 DiagnosticEntry {
5228 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5229 diagnostic: Diagnostic {
5230 severity: DiagnosticSeverity::ERROR,
5231 message: "syntax error 1".to_string(),
5232 ..Default::default()
5233 },
5234 },
5235 DiagnosticEntry {
5236 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5237 diagnostic: Diagnostic {
5238 severity: DiagnosticSeverity::ERROR,
5239 message: "syntax error 2".to_string(),
5240 ..Default::default()
5241 },
5242 },
5243 ],
5244 None,
5245 cx,
5246 )
5247 .unwrap();
5248 });
5249
5250 // An empty range is extended forward to include the following character.
5251 // At the end of a line, an empty range is extended backward to include
5252 // the preceding character.
5253 buffer.read_with(cx, |buffer, _| {
5254 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5255 assert_eq!(
5256 chunks
5257 .iter()
5258 .map(|(s, d)| (s.as_str(), *d))
5259 .collect::<Vec<_>>(),
5260 &[
5261 ("let one = ", None),
5262 (";", Some(DiagnosticSeverity::ERROR)),
5263 ("\nlet two =", None),
5264 (" ", Some(DiagnosticSeverity::ERROR)),
5265 ("\nlet three = 3;\n", None)
5266 ]
5267 );
5268 });
5269 }
5270
5271 #[gpui::test]
5272 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5273 cx.foreground().forbid_parking();
5274
5275 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5276 let language = Arc::new(Language::new(
5277 LanguageConfig {
5278 name: "Rust".into(),
5279 path_suffixes: vec!["rs".to_string()],
5280 language_server: Some(lsp_config),
5281 ..Default::default()
5282 },
5283 Some(tree_sitter_rust::language()),
5284 ));
5285
5286 let text = "
5287 fn a() {
5288 f1();
5289 }
5290 fn b() {
5291 f2();
5292 }
5293 fn c() {
5294 f3();
5295 }
5296 "
5297 .unindent();
5298
5299 let fs = FakeFs::new(cx.background());
5300 fs.insert_tree(
5301 "/dir",
5302 json!({
5303 "a.rs": text.clone(),
5304 }),
5305 )
5306 .await;
5307
5308 let project = Project::test(fs, cx);
5309 project.update(cx, |project, _| project.languages.add(language));
5310
5311 let worktree_id = project
5312 .update(cx, |project, cx| {
5313 project.find_or_create_local_worktree("/dir", true, cx)
5314 })
5315 .await
5316 .unwrap()
5317 .0
5318 .read_with(cx, |tree, _| tree.id());
5319
5320 let buffer = project
5321 .update(cx, |project, cx| {
5322 project.open_buffer((worktree_id, "a.rs"), cx)
5323 })
5324 .await
5325 .unwrap();
5326
5327 let mut fake_server = fake_servers.next().await.unwrap();
5328 let lsp_document_version = fake_server
5329 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5330 .await
5331 .text_document
5332 .version;
5333
5334 // Simulate editing the buffer after the language server computes some edits.
5335 buffer.update(cx, |buffer, cx| {
5336 buffer.edit(
5337 [Point::new(0, 0)..Point::new(0, 0)],
5338 "// above first function\n",
5339 cx,
5340 );
5341 buffer.edit(
5342 [Point::new(2, 0)..Point::new(2, 0)],
5343 " // inside first function\n",
5344 cx,
5345 );
5346 buffer.edit(
5347 [Point::new(6, 4)..Point::new(6, 4)],
5348 "// inside second function ",
5349 cx,
5350 );
5351
5352 assert_eq!(
5353 buffer.text(),
5354 "
5355 // above first function
5356 fn a() {
5357 // inside first function
5358 f1();
5359 }
5360 fn b() {
5361 // inside second function f2();
5362 }
5363 fn c() {
5364 f3();
5365 }
5366 "
5367 .unindent()
5368 );
5369 });
5370
5371 let edits = project
5372 .update(cx, |project, cx| {
5373 project.edits_from_lsp(
5374 &buffer,
5375 vec![
5376 // replace body of first function
5377 lsp::TextEdit {
5378 range: lsp::Range::new(
5379 lsp::Position::new(0, 0),
5380 lsp::Position::new(3, 0),
5381 ),
5382 new_text: "
5383 fn a() {
5384 f10();
5385 }
5386 "
5387 .unindent(),
5388 },
5389 // edit inside second function
5390 lsp::TextEdit {
5391 range: lsp::Range::new(
5392 lsp::Position::new(4, 6),
5393 lsp::Position::new(4, 6),
5394 ),
5395 new_text: "00".into(),
5396 },
5397 // edit inside third function via two distinct edits
5398 lsp::TextEdit {
5399 range: lsp::Range::new(
5400 lsp::Position::new(7, 5),
5401 lsp::Position::new(7, 5),
5402 ),
5403 new_text: "4000".into(),
5404 },
5405 lsp::TextEdit {
5406 range: lsp::Range::new(
5407 lsp::Position::new(7, 5),
5408 lsp::Position::new(7, 6),
5409 ),
5410 new_text: "".into(),
5411 },
5412 ],
5413 Some(lsp_document_version),
5414 cx,
5415 )
5416 })
5417 .await
5418 .unwrap();
5419
5420 buffer.update(cx, |buffer, cx| {
5421 for (range, new_text) in edits {
5422 buffer.edit([range], new_text, cx);
5423 }
5424 assert_eq!(
5425 buffer.text(),
5426 "
5427 // above first function
5428 fn a() {
5429 // inside first function
5430 f10();
5431 }
5432 fn b() {
5433 // inside second function f200();
5434 }
5435 fn c() {
5436 f4000();
5437 }
5438 "
5439 .unindent()
5440 );
5441 });
5442 }
5443
5444 #[gpui::test]
5445 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5446 cx.foreground().forbid_parking();
5447
5448 let text = "
5449 use a::b;
5450 use a::c;
5451
5452 fn f() {
5453 b();
5454 c();
5455 }
5456 "
5457 .unindent();
5458
5459 let fs = FakeFs::new(cx.background());
5460 fs.insert_tree(
5461 "/dir",
5462 json!({
5463 "a.rs": text.clone(),
5464 }),
5465 )
5466 .await;
5467
5468 let project = Project::test(fs, cx);
5469 let worktree_id = project
5470 .update(cx, |project, cx| {
5471 project.find_or_create_local_worktree("/dir", true, cx)
5472 })
5473 .await
5474 .unwrap()
5475 .0
5476 .read_with(cx, |tree, _| tree.id());
5477
5478 let buffer = project
5479 .update(cx, |project, cx| {
5480 project.open_buffer((worktree_id, "a.rs"), cx)
5481 })
5482 .await
5483 .unwrap();
5484
5485 // Simulate the language server sending us a small edit in the form of a very large diff.
5486 // Rust-analyzer does this when performing a merge-imports code action.
5487 let edits = project
5488 .update(cx, |project, cx| {
5489 project.edits_from_lsp(
5490 &buffer,
5491 [
5492 // Replace the first use statement without editing the semicolon.
5493 lsp::TextEdit {
5494 range: lsp::Range::new(
5495 lsp::Position::new(0, 4),
5496 lsp::Position::new(0, 8),
5497 ),
5498 new_text: "a::{b, c}".into(),
5499 },
5500 // Reinsert the remainder of the file between the semicolon and the final
5501 // newline of the file.
5502 lsp::TextEdit {
5503 range: lsp::Range::new(
5504 lsp::Position::new(0, 9),
5505 lsp::Position::new(0, 9),
5506 ),
5507 new_text: "\n\n".into(),
5508 },
5509 lsp::TextEdit {
5510 range: lsp::Range::new(
5511 lsp::Position::new(0, 9),
5512 lsp::Position::new(0, 9),
5513 ),
5514 new_text: "
5515 fn f() {
5516 b();
5517 c();
5518 }"
5519 .unindent(),
5520 },
5521 // Delete everything after the first newline of the file.
5522 lsp::TextEdit {
5523 range: lsp::Range::new(
5524 lsp::Position::new(1, 0),
5525 lsp::Position::new(7, 0),
5526 ),
5527 new_text: "".into(),
5528 },
5529 ],
5530 None,
5531 cx,
5532 )
5533 })
5534 .await
5535 .unwrap();
5536
5537 buffer.update(cx, |buffer, cx| {
5538 let edits = edits
5539 .into_iter()
5540 .map(|(range, text)| {
5541 (
5542 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5543 text,
5544 )
5545 })
5546 .collect::<Vec<_>>();
5547
5548 assert_eq!(
5549 edits,
5550 [
5551 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5552 (Point::new(1, 0)..Point::new(2, 0), "".into())
5553 ]
5554 );
5555
5556 for (range, new_text) in edits {
5557 buffer.edit([range], new_text, cx);
5558 }
5559 assert_eq!(
5560 buffer.text(),
5561 "
5562 use a::{b, c};
5563
5564 fn f() {
5565 b();
5566 c();
5567 }
5568 "
5569 .unindent()
5570 );
5571 });
5572 }
5573
5574 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5575 buffer: &Buffer,
5576 range: Range<T>,
5577 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5578 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5579 for chunk in buffer.snapshot().chunks(range, true) {
5580 if chunks.last().map_or(false, |prev_chunk| {
5581 prev_chunk.1 == chunk.diagnostic_severity
5582 }) {
5583 chunks.last_mut().unwrap().0.push_str(chunk.text);
5584 } else {
5585 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5586 }
5587 }
5588 chunks
5589 }
5590
5591 #[gpui::test]
5592 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5593 let dir = temp_tree(json!({
5594 "root": {
5595 "dir1": {},
5596 "dir2": {
5597 "dir3": {}
5598 }
5599 }
5600 }));
5601
5602 let project = Project::test(Arc::new(RealFs), cx);
5603 let (tree, _) = project
5604 .update(cx, |project, cx| {
5605 project.find_or_create_local_worktree(&dir.path(), true, cx)
5606 })
5607 .await
5608 .unwrap();
5609
5610 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5611 .await;
5612
5613 let cancel_flag = Default::default();
5614 let results = project
5615 .read_with(cx, |project, cx| {
5616 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5617 })
5618 .await;
5619
5620 assert!(results.is_empty());
5621 }
5622
5623 #[gpui::test]
5624 async fn test_definition(cx: &mut gpui::TestAppContext) {
5625 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5626 let language = Arc::new(Language::new(
5627 LanguageConfig {
5628 name: "Rust".into(),
5629 path_suffixes: vec!["rs".to_string()],
5630 language_server: Some(language_server_config),
5631 ..Default::default()
5632 },
5633 Some(tree_sitter_rust::language()),
5634 ));
5635
5636 let fs = FakeFs::new(cx.background());
5637 fs.insert_tree(
5638 "/dir",
5639 json!({
5640 "a.rs": "const fn a() { A }",
5641 "b.rs": "const y: i32 = crate::a()",
5642 }),
5643 )
5644 .await;
5645
5646 let project = Project::test(fs, cx);
5647 project.update(cx, |project, _| {
5648 Arc::get_mut(&mut project.languages).unwrap().add(language);
5649 });
5650
5651 let (tree, _) = project
5652 .update(cx, |project, cx| {
5653 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5654 })
5655 .await
5656 .unwrap();
5657 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5658 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5659 .await;
5660
5661 let buffer = project
5662 .update(cx, |project, cx| {
5663 project.open_buffer(
5664 ProjectPath {
5665 worktree_id,
5666 path: Path::new("").into(),
5667 },
5668 cx,
5669 )
5670 })
5671 .await
5672 .unwrap();
5673
5674 let mut fake_server = fake_servers.next().await.unwrap();
5675 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5676 let params = params.text_document_position_params;
5677 assert_eq!(
5678 params.text_document.uri.to_file_path().unwrap(),
5679 Path::new("/dir/b.rs"),
5680 );
5681 assert_eq!(params.position, lsp::Position::new(0, 22));
5682
5683 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5684 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5685 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5686 )))
5687 });
5688
5689 let mut definitions = project
5690 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5691 .await
5692 .unwrap();
5693
5694 assert_eq!(definitions.len(), 1);
5695 let definition = definitions.pop().unwrap();
5696 cx.update(|cx| {
5697 let target_buffer = definition.buffer.read(cx);
5698 assert_eq!(
5699 target_buffer
5700 .file()
5701 .unwrap()
5702 .as_local()
5703 .unwrap()
5704 .abs_path(cx),
5705 Path::new("/dir/a.rs"),
5706 );
5707 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5708 assert_eq!(
5709 list_worktrees(&project, cx),
5710 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5711 );
5712
5713 drop(definition);
5714 });
5715 cx.read(|cx| {
5716 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5717 });
5718
5719 fn list_worktrees<'a>(
5720 project: &'a ModelHandle<Project>,
5721 cx: &'a AppContext,
5722 ) -> Vec<(&'a Path, bool)> {
5723 project
5724 .read(cx)
5725 .worktrees(cx)
5726 .map(|worktree| {
5727 let worktree = worktree.read(cx);
5728 (
5729 worktree.as_local().unwrap().abs_path().as_ref(),
5730 worktree.is_visible(),
5731 )
5732 })
5733 .collect::<Vec<_>>()
5734 }
5735 }
5736
5737 #[gpui::test]
5738 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5739 let fs = FakeFs::new(cx.background());
5740 fs.insert_tree(
5741 "/dir",
5742 json!({
5743 "file1": "the old contents",
5744 }),
5745 )
5746 .await;
5747
5748 let project = Project::test(fs.clone(), cx);
5749 let worktree_id = project
5750 .update(cx, |p, cx| {
5751 p.find_or_create_local_worktree("/dir", true, cx)
5752 })
5753 .await
5754 .unwrap()
5755 .0
5756 .read_with(cx, |tree, _| tree.id());
5757
5758 let buffer = project
5759 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5760 .await
5761 .unwrap();
5762 buffer
5763 .update(cx, |buffer, cx| {
5764 assert_eq!(buffer.text(), "the old contents");
5765 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5766 buffer.save(cx)
5767 })
5768 .await
5769 .unwrap();
5770
5771 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5772 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5773 }
5774
5775 #[gpui::test]
5776 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5777 let fs = FakeFs::new(cx.background());
5778 fs.insert_tree(
5779 "/dir",
5780 json!({
5781 "file1": "the old contents",
5782 }),
5783 )
5784 .await;
5785
5786 let project = Project::test(fs.clone(), cx);
5787 let worktree_id = project
5788 .update(cx, |p, cx| {
5789 p.find_or_create_local_worktree("/dir/file1", true, cx)
5790 })
5791 .await
5792 .unwrap()
5793 .0
5794 .read_with(cx, |tree, _| tree.id());
5795
5796 let buffer = project
5797 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5798 .await
5799 .unwrap();
5800 buffer
5801 .update(cx, |buffer, cx| {
5802 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5803 buffer.save(cx)
5804 })
5805 .await
5806 .unwrap();
5807
5808 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5809 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5810 }
5811
5812 #[gpui::test]
5813 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5814 let fs = FakeFs::new(cx.background());
5815 fs.insert_tree("/dir", json!({})).await;
5816
5817 let project = Project::test(fs.clone(), cx);
5818 let (worktree, _) = project
5819 .update(cx, |project, cx| {
5820 project.find_or_create_local_worktree("/dir", true, cx)
5821 })
5822 .await
5823 .unwrap();
5824 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5825
5826 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5827 buffer.update(cx, |buffer, cx| {
5828 buffer.edit([0..0], "abc", cx);
5829 assert!(buffer.is_dirty());
5830 assert!(!buffer.has_conflict());
5831 });
5832 project
5833 .update(cx, |project, cx| {
5834 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5835 })
5836 .await
5837 .unwrap();
5838 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5839 buffer.read_with(cx, |buffer, cx| {
5840 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5841 assert!(!buffer.is_dirty());
5842 assert!(!buffer.has_conflict());
5843 });
5844
5845 let opened_buffer = project
5846 .update(cx, |project, cx| {
5847 project.open_buffer((worktree_id, "file1"), cx)
5848 })
5849 .await
5850 .unwrap();
5851 assert_eq!(opened_buffer, buffer);
5852 }
5853
5854 #[gpui::test(retries = 5)]
5855 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5856 let dir = temp_tree(json!({
5857 "a": {
5858 "file1": "",
5859 "file2": "",
5860 "file3": "",
5861 },
5862 "b": {
5863 "c": {
5864 "file4": "",
5865 "file5": "",
5866 }
5867 }
5868 }));
5869
5870 let project = Project::test(Arc::new(RealFs), cx);
5871 let rpc = project.read_with(cx, |p, _| p.client.clone());
5872
5873 let (tree, _) = project
5874 .update(cx, |p, cx| {
5875 p.find_or_create_local_worktree(dir.path(), true, cx)
5876 })
5877 .await
5878 .unwrap();
5879 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5880
5881 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5882 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5883 async move { buffer.await.unwrap() }
5884 };
5885 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5886 tree.read_with(cx, |tree, _| {
5887 tree.entry_for_path(path)
5888 .expect(&format!("no entry for path {}", path))
5889 .id
5890 })
5891 };
5892
5893 let buffer2 = buffer_for_path("a/file2", cx).await;
5894 let buffer3 = buffer_for_path("a/file3", cx).await;
5895 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5896 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5897
5898 let file2_id = id_for_path("a/file2", &cx);
5899 let file3_id = id_for_path("a/file3", &cx);
5900 let file4_id = id_for_path("b/c/file4", &cx);
5901
5902 // Wait for the initial scan.
5903 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5904 .await;
5905
5906 // Create a remote copy of this worktree.
5907 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5908 let (remote, load_task) = cx.update(|cx| {
5909 Worktree::remote(
5910 1,
5911 1,
5912 initial_snapshot.to_proto(&Default::default(), true),
5913 rpc.clone(),
5914 cx,
5915 )
5916 });
5917 load_task.await;
5918
5919 cx.read(|cx| {
5920 assert!(!buffer2.read(cx).is_dirty());
5921 assert!(!buffer3.read(cx).is_dirty());
5922 assert!(!buffer4.read(cx).is_dirty());
5923 assert!(!buffer5.read(cx).is_dirty());
5924 });
5925
5926 // Rename and delete files and directories.
5927 tree.flush_fs_events(&cx).await;
5928 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5929 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5930 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5931 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5932 tree.flush_fs_events(&cx).await;
5933
5934 let expected_paths = vec![
5935 "a",
5936 "a/file1",
5937 "a/file2.new",
5938 "b",
5939 "d",
5940 "d/file3",
5941 "d/file4",
5942 ];
5943
5944 cx.read(|app| {
5945 assert_eq!(
5946 tree.read(app)
5947 .paths()
5948 .map(|p| p.to_str().unwrap())
5949 .collect::<Vec<_>>(),
5950 expected_paths
5951 );
5952
5953 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
5954 assert_eq!(id_for_path("d/file3", &cx), file3_id);
5955 assert_eq!(id_for_path("d/file4", &cx), file4_id);
5956
5957 assert_eq!(
5958 buffer2.read(app).file().unwrap().path().as_ref(),
5959 Path::new("a/file2.new")
5960 );
5961 assert_eq!(
5962 buffer3.read(app).file().unwrap().path().as_ref(),
5963 Path::new("d/file3")
5964 );
5965 assert_eq!(
5966 buffer4.read(app).file().unwrap().path().as_ref(),
5967 Path::new("d/file4")
5968 );
5969 assert_eq!(
5970 buffer5.read(app).file().unwrap().path().as_ref(),
5971 Path::new("b/c/file5")
5972 );
5973
5974 assert!(!buffer2.read(app).file().unwrap().is_deleted());
5975 assert!(!buffer3.read(app).file().unwrap().is_deleted());
5976 assert!(!buffer4.read(app).file().unwrap().is_deleted());
5977 assert!(buffer5.read(app).file().unwrap().is_deleted());
5978 });
5979
5980 // Update the remote worktree. Check that it becomes consistent with the
5981 // local worktree.
5982 remote.update(cx, |remote, cx| {
5983 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
5984 &initial_snapshot,
5985 1,
5986 1,
5987 true,
5988 );
5989 remote
5990 .as_remote_mut()
5991 .unwrap()
5992 .snapshot
5993 .apply_remote_update(update_message)
5994 .unwrap();
5995
5996 assert_eq!(
5997 remote
5998 .paths()
5999 .map(|p| p.to_str().unwrap())
6000 .collect::<Vec<_>>(),
6001 expected_paths
6002 );
6003 });
6004 }
6005
6006 #[gpui::test]
6007 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6008 let fs = FakeFs::new(cx.background());
6009 fs.insert_tree(
6010 "/the-dir",
6011 json!({
6012 "a.txt": "a-contents",
6013 "b.txt": "b-contents",
6014 }),
6015 )
6016 .await;
6017
6018 let project = Project::test(fs.clone(), cx);
6019 let worktree_id = project
6020 .update(cx, |p, cx| {
6021 p.find_or_create_local_worktree("/the-dir", true, cx)
6022 })
6023 .await
6024 .unwrap()
6025 .0
6026 .read_with(cx, |tree, _| tree.id());
6027
6028 // Spawn multiple tasks to open paths, repeating some paths.
6029 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6030 (
6031 p.open_buffer((worktree_id, "a.txt"), cx),
6032 p.open_buffer((worktree_id, "b.txt"), cx),
6033 p.open_buffer((worktree_id, "a.txt"), cx),
6034 )
6035 });
6036
6037 let buffer_a_1 = buffer_a_1.await.unwrap();
6038 let buffer_a_2 = buffer_a_2.await.unwrap();
6039 let buffer_b = buffer_b.await.unwrap();
6040 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6041 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6042
6043 // There is only one buffer per path.
6044 let buffer_a_id = buffer_a_1.id();
6045 assert_eq!(buffer_a_2.id(), buffer_a_id);
6046
6047 // Open the same path again while it is still open.
6048 drop(buffer_a_1);
6049 let buffer_a_3 = project
6050 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6051 .await
6052 .unwrap();
6053
6054 // There's still only one buffer per path.
6055 assert_eq!(buffer_a_3.id(), buffer_a_id);
6056 }
6057
6058 #[gpui::test]
6059 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6060 use std::fs;
6061
6062 let dir = temp_tree(json!({
6063 "file1": "abc",
6064 "file2": "def",
6065 "file3": "ghi",
6066 }));
6067
6068 let project = Project::test(Arc::new(RealFs), cx);
6069 let (worktree, _) = project
6070 .update(cx, |p, cx| {
6071 p.find_or_create_local_worktree(dir.path(), true, cx)
6072 })
6073 .await
6074 .unwrap();
6075 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6076
6077 worktree.flush_fs_events(&cx).await;
6078 worktree
6079 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6080 .await;
6081
6082 let buffer1 = project
6083 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6084 .await
6085 .unwrap();
6086 let events = Rc::new(RefCell::new(Vec::new()));
6087
6088 // initially, the buffer isn't dirty.
6089 buffer1.update(cx, |buffer, cx| {
6090 cx.subscribe(&buffer1, {
6091 let events = events.clone();
6092 move |_, _, event, _| match event {
6093 BufferEvent::Operation(_) => {}
6094 _ => events.borrow_mut().push(event.clone()),
6095 }
6096 })
6097 .detach();
6098
6099 assert!(!buffer.is_dirty());
6100 assert!(events.borrow().is_empty());
6101
6102 buffer.edit(vec![1..2], "", cx);
6103 });
6104
6105 // after the first edit, the buffer is dirty, and emits a dirtied event.
6106 buffer1.update(cx, |buffer, cx| {
6107 assert!(buffer.text() == "ac");
6108 assert!(buffer.is_dirty());
6109 assert_eq!(
6110 *events.borrow(),
6111 &[language::Event::Edited, language::Event::Dirtied]
6112 );
6113 events.borrow_mut().clear();
6114 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6115 });
6116
6117 // after saving, the buffer is not dirty, and emits a saved event.
6118 buffer1.update(cx, |buffer, cx| {
6119 assert!(!buffer.is_dirty());
6120 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6121 events.borrow_mut().clear();
6122
6123 buffer.edit(vec![1..1], "B", cx);
6124 buffer.edit(vec![2..2], "D", cx);
6125 });
6126
6127 // after editing again, the buffer is dirty, and emits another dirty event.
6128 buffer1.update(cx, |buffer, cx| {
6129 assert!(buffer.text() == "aBDc");
6130 assert!(buffer.is_dirty());
6131 assert_eq!(
6132 *events.borrow(),
6133 &[
6134 language::Event::Edited,
6135 language::Event::Dirtied,
6136 language::Event::Edited,
6137 ],
6138 );
6139 events.borrow_mut().clear();
6140
6141 // TODO - currently, after restoring the buffer to its
6142 // previously-saved state, the is still considered dirty.
6143 buffer.edit([1..3], "", cx);
6144 assert!(buffer.text() == "ac");
6145 assert!(buffer.is_dirty());
6146 });
6147
6148 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6149
6150 // When a file is deleted, the buffer is considered dirty.
6151 let events = Rc::new(RefCell::new(Vec::new()));
6152 let buffer2 = project
6153 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6154 .await
6155 .unwrap();
6156 buffer2.update(cx, |_, cx| {
6157 cx.subscribe(&buffer2, {
6158 let events = events.clone();
6159 move |_, _, event, _| events.borrow_mut().push(event.clone())
6160 })
6161 .detach();
6162 });
6163
6164 fs::remove_file(dir.path().join("file2")).unwrap();
6165 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6166 assert_eq!(
6167 *events.borrow(),
6168 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6169 );
6170
6171 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6172 let events = Rc::new(RefCell::new(Vec::new()));
6173 let buffer3 = project
6174 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6175 .await
6176 .unwrap();
6177 buffer3.update(cx, |_, cx| {
6178 cx.subscribe(&buffer3, {
6179 let events = events.clone();
6180 move |_, _, event, _| events.borrow_mut().push(event.clone())
6181 })
6182 .detach();
6183 });
6184
6185 worktree.flush_fs_events(&cx).await;
6186 buffer3.update(cx, |buffer, cx| {
6187 buffer.edit(Some(0..0), "x", cx);
6188 });
6189 events.borrow_mut().clear();
6190 fs::remove_file(dir.path().join("file3")).unwrap();
6191 buffer3
6192 .condition(&cx, |_, _| !events.borrow().is_empty())
6193 .await;
6194 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6195 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6196 }
6197
6198 #[gpui::test]
6199 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6200 use std::fs;
6201
6202 let initial_contents = "aaa\nbbbbb\nc\n";
6203 let dir = temp_tree(json!({ "the-file": initial_contents }));
6204
6205 let project = Project::test(Arc::new(RealFs), cx);
6206 let (worktree, _) = project
6207 .update(cx, |p, cx| {
6208 p.find_or_create_local_worktree(dir.path(), true, cx)
6209 })
6210 .await
6211 .unwrap();
6212 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6213
6214 worktree
6215 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6216 .await;
6217
6218 let abs_path = dir.path().join("the-file");
6219 let buffer = project
6220 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6221 .await
6222 .unwrap();
6223
6224 // TODO
6225 // Add a cursor on each row.
6226 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6227 // assert!(!buffer.is_dirty());
6228 // buffer.add_selection_set(
6229 // &(0..3)
6230 // .map(|row| Selection {
6231 // id: row as usize,
6232 // start: Point::new(row, 1),
6233 // end: Point::new(row, 1),
6234 // reversed: false,
6235 // goal: SelectionGoal::None,
6236 // })
6237 // .collect::<Vec<_>>(),
6238 // cx,
6239 // )
6240 // });
6241
6242 // Change the file on disk, adding two new lines of text, and removing
6243 // one line.
6244 buffer.read_with(cx, |buffer, _| {
6245 assert!(!buffer.is_dirty());
6246 assert!(!buffer.has_conflict());
6247 });
6248 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6249 fs::write(&abs_path, new_contents).unwrap();
6250
6251 // Because the buffer was not modified, it is reloaded from disk. Its
6252 // contents are edited according to the diff between the old and new
6253 // file contents.
6254 buffer
6255 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6256 .await;
6257
6258 buffer.update(cx, |buffer, _| {
6259 assert_eq!(buffer.text(), new_contents);
6260 assert!(!buffer.is_dirty());
6261 assert!(!buffer.has_conflict());
6262
6263 // TODO
6264 // let cursor_positions = buffer
6265 // .selection_set(selection_set_id)
6266 // .unwrap()
6267 // .selections::<Point>(&*buffer)
6268 // .map(|selection| {
6269 // assert_eq!(selection.start, selection.end);
6270 // selection.start
6271 // })
6272 // .collect::<Vec<_>>();
6273 // assert_eq!(
6274 // cursor_positions,
6275 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6276 // );
6277 });
6278
6279 // Modify the buffer
6280 buffer.update(cx, |buffer, cx| {
6281 buffer.edit(vec![0..0], " ", cx);
6282 assert!(buffer.is_dirty());
6283 assert!(!buffer.has_conflict());
6284 });
6285
6286 // Change the file on disk again, adding blank lines to the beginning.
6287 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6288
6289 // Because the buffer is modified, it doesn't reload from disk, but is
6290 // marked as having a conflict.
6291 buffer
6292 .condition(&cx, |buffer, _| buffer.has_conflict())
6293 .await;
6294 }
6295
6296 #[gpui::test]
6297 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6298 cx.foreground().forbid_parking();
6299
6300 let fs = FakeFs::new(cx.background());
6301 fs.insert_tree(
6302 "/the-dir",
6303 json!({
6304 "a.rs": "
6305 fn foo(mut v: Vec<usize>) {
6306 for x in &v {
6307 v.push(1);
6308 }
6309 }
6310 "
6311 .unindent(),
6312 }),
6313 )
6314 .await;
6315
6316 let project = Project::test(fs.clone(), cx);
6317 let (worktree, _) = project
6318 .update(cx, |p, cx| {
6319 p.find_or_create_local_worktree("/the-dir", true, cx)
6320 })
6321 .await
6322 .unwrap();
6323 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6324
6325 let buffer = project
6326 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6327 .await
6328 .unwrap();
6329
6330 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6331 let message = lsp::PublishDiagnosticsParams {
6332 uri: buffer_uri.clone(),
6333 diagnostics: vec![
6334 lsp::Diagnostic {
6335 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6336 severity: Some(DiagnosticSeverity::WARNING),
6337 message: "error 1".to_string(),
6338 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6339 location: lsp::Location {
6340 uri: buffer_uri.clone(),
6341 range: lsp::Range::new(
6342 lsp::Position::new(1, 8),
6343 lsp::Position::new(1, 9),
6344 ),
6345 },
6346 message: "error 1 hint 1".to_string(),
6347 }]),
6348 ..Default::default()
6349 },
6350 lsp::Diagnostic {
6351 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6352 severity: Some(DiagnosticSeverity::HINT),
6353 message: "error 1 hint 1".to_string(),
6354 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6355 location: lsp::Location {
6356 uri: buffer_uri.clone(),
6357 range: lsp::Range::new(
6358 lsp::Position::new(1, 8),
6359 lsp::Position::new(1, 9),
6360 ),
6361 },
6362 message: "original diagnostic".to_string(),
6363 }]),
6364 ..Default::default()
6365 },
6366 lsp::Diagnostic {
6367 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6368 severity: Some(DiagnosticSeverity::ERROR),
6369 message: "error 2".to_string(),
6370 related_information: Some(vec![
6371 lsp::DiagnosticRelatedInformation {
6372 location: lsp::Location {
6373 uri: buffer_uri.clone(),
6374 range: lsp::Range::new(
6375 lsp::Position::new(1, 13),
6376 lsp::Position::new(1, 15),
6377 ),
6378 },
6379 message: "error 2 hint 1".to_string(),
6380 },
6381 lsp::DiagnosticRelatedInformation {
6382 location: lsp::Location {
6383 uri: buffer_uri.clone(),
6384 range: lsp::Range::new(
6385 lsp::Position::new(1, 13),
6386 lsp::Position::new(1, 15),
6387 ),
6388 },
6389 message: "error 2 hint 2".to_string(),
6390 },
6391 ]),
6392 ..Default::default()
6393 },
6394 lsp::Diagnostic {
6395 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6396 severity: Some(DiagnosticSeverity::HINT),
6397 message: "error 2 hint 1".to_string(),
6398 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6399 location: lsp::Location {
6400 uri: buffer_uri.clone(),
6401 range: lsp::Range::new(
6402 lsp::Position::new(2, 8),
6403 lsp::Position::new(2, 17),
6404 ),
6405 },
6406 message: "original diagnostic".to_string(),
6407 }]),
6408 ..Default::default()
6409 },
6410 lsp::Diagnostic {
6411 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6412 severity: Some(DiagnosticSeverity::HINT),
6413 message: "error 2 hint 2".to_string(),
6414 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6415 location: lsp::Location {
6416 uri: buffer_uri.clone(),
6417 range: lsp::Range::new(
6418 lsp::Position::new(2, 8),
6419 lsp::Position::new(2, 17),
6420 ),
6421 },
6422 message: "original diagnostic".to_string(),
6423 }]),
6424 ..Default::default()
6425 },
6426 ],
6427 version: None,
6428 };
6429
6430 project
6431 .update(cx, |p, cx| {
6432 p.update_diagnostics(message, &Default::default(), cx)
6433 })
6434 .unwrap();
6435 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6436
6437 assert_eq!(
6438 buffer
6439 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6440 .collect::<Vec<_>>(),
6441 &[
6442 DiagnosticEntry {
6443 range: Point::new(1, 8)..Point::new(1, 9),
6444 diagnostic: Diagnostic {
6445 severity: DiagnosticSeverity::WARNING,
6446 message: "error 1".to_string(),
6447 group_id: 0,
6448 is_primary: true,
6449 ..Default::default()
6450 }
6451 },
6452 DiagnosticEntry {
6453 range: Point::new(1, 8)..Point::new(1, 9),
6454 diagnostic: Diagnostic {
6455 severity: DiagnosticSeverity::HINT,
6456 message: "error 1 hint 1".to_string(),
6457 group_id: 0,
6458 is_primary: false,
6459 ..Default::default()
6460 }
6461 },
6462 DiagnosticEntry {
6463 range: Point::new(1, 13)..Point::new(1, 15),
6464 diagnostic: Diagnostic {
6465 severity: DiagnosticSeverity::HINT,
6466 message: "error 2 hint 1".to_string(),
6467 group_id: 1,
6468 is_primary: false,
6469 ..Default::default()
6470 }
6471 },
6472 DiagnosticEntry {
6473 range: Point::new(1, 13)..Point::new(1, 15),
6474 diagnostic: Diagnostic {
6475 severity: DiagnosticSeverity::HINT,
6476 message: "error 2 hint 2".to_string(),
6477 group_id: 1,
6478 is_primary: false,
6479 ..Default::default()
6480 }
6481 },
6482 DiagnosticEntry {
6483 range: Point::new(2, 8)..Point::new(2, 17),
6484 diagnostic: Diagnostic {
6485 severity: DiagnosticSeverity::ERROR,
6486 message: "error 2".to_string(),
6487 group_id: 1,
6488 is_primary: true,
6489 ..Default::default()
6490 }
6491 }
6492 ]
6493 );
6494
6495 assert_eq!(
6496 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6497 &[
6498 DiagnosticEntry {
6499 range: Point::new(1, 8)..Point::new(1, 9),
6500 diagnostic: Diagnostic {
6501 severity: DiagnosticSeverity::WARNING,
6502 message: "error 1".to_string(),
6503 group_id: 0,
6504 is_primary: true,
6505 ..Default::default()
6506 }
6507 },
6508 DiagnosticEntry {
6509 range: Point::new(1, 8)..Point::new(1, 9),
6510 diagnostic: Diagnostic {
6511 severity: DiagnosticSeverity::HINT,
6512 message: "error 1 hint 1".to_string(),
6513 group_id: 0,
6514 is_primary: false,
6515 ..Default::default()
6516 }
6517 },
6518 ]
6519 );
6520 assert_eq!(
6521 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6522 &[
6523 DiagnosticEntry {
6524 range: Point::new(1, 13)..Point::new(1, 15),
6525 diagnostic: Diagnostic {
6526 severity: DiagnosticSeverity::HINT,
6527 message: "error 2 hint 1".to_string(),
6528 group_id: 1,
6529 is_primary: false,
6530 ..Default::default()
6531 }
6532 },
6533 DiagnosticEntry {
6534 range: Point::new(1, 13)..Point::new(1, 15),
6535 diagnostic: Diagnostic {
6536 severity: DiagnosticSeverity::HINT,
6537 message: "error 2 hint 2".to_string(),
6538 group_id: 1,
6539 is_primary: false,
6540 ..Default::default()
6541 }
6542 },
6543 DiagnosticEntry {
6544 range: Point::new(2, 8)..Point::new(2, 17),
6545 diagnostic: Diagnostic {
6546 severity: DiagnosticSeverity::ERROR,
6547 message: "error 2".to_string(),
6548 group_id: 1,
6549 is_primary: true,
6550 ..Default::default()
6551 }
6552 }
6553 ]
6554 );
6555 }
6556
6557 #[gpui::test]
6558 async fn test_rename(cx: &mut gpui::TestAppContext) {
6559 cx.foreground().forbid_parking();
6560
6561 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6562 let language = Arc::new(Language::new(
6563 LanguageConfig {
6564 name: "Rust".into(),
6565 path_suffixes: vec!["rs".to_string()],
6566 language_server: Some(language_server_config),
6567 ..Default::default()
6568 },
6569 Some(tree_sitter_rust::language()),
6570 ));
6571
6572 let fs = FakeFs::new(cx.background());
6573 fs.insert_tree(
6574 "/dir",
6575 json!({
6576 "one.rs": "const ONE: usize = 1;",
6577 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6578 }),
6579 )
6580 .await;
6581
6582 let project = Project::test(fs.clone(), cx);
6583 project.update(cx, |project, _| {
6584 Arc::get_mut(&mut project.languages).unwrap().add(language);
6585 });
6586
6587 let (tree, _) = project
6588 .update(cx, |project, cx| {
6589 project.find_or_create_local_worktree("/dir", true, cx)
6590 })
6591 .await
6592 .unwrap();
6593 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6594 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6595 .await;
6596
6597 let buffer = project
6598 .update(cx, |project, cx| {
6599 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6600 })
6601 .await
6602 .unwrap();
6603
6604 let mut fake_server = fake_servers.next().await.unwrap();
6605
6606 let response = project.update(cx, |project, cx| {
6607 project.prepare_rename(buffer.clone(), 7, cx)
6608 });
6609 fake_server
6610 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6611 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6612 assert_eq!(params.position, lsp::Position::new(0, 7));
6613 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6614 lsp::Position::new(0, 6),
6615 lsp::Position::new(0, 9),
6616 )))
6617 })
6618 .next()
6619 .await
6620 .unwrap();
6621 let range = response.await.unwrap().unwrap();
6622 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6623 assert_eq!(range, 6..9);
6624
6625 let response = project.update(cx, |project, cx| {
6626 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6627 });
6628 fake_server
6629 .handle_request::<lsp::request::Rename, _>(|params, _| {
6630 assert_eq!(
6631 params.text_document_position.text_document.uri.as_str(),
6632 "file:///dir/one.rs"
6633 );
6634 assert_eq!(
6635 params.text_document_position.position,
6636 lsp::Position::new(0, 7)
6637 );
6638 assert_eq!(params.new_name, "THREE");
6639 Some(lsp::WorkspaceEdit {
6640 changes: Some(
6641 [
6642 (
6643 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6644 vec![lsp::TextEdit::new(
6645 lsp::Range::new(
6646 lsp::Position::new(0, 6),
6647 lsp::Position::new(0, 9),
6648 ),
6649 "THREE".to_string(),
6650 )],
6651 ),
6652 (
6653 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6654 vec![
6655 lsp::TextEdit::new(
6656 lsp::Range::new(
6657 lsp::Position::new(0, 24),
6658 lsp::Position::new(0, 27),
6659 ),
6660 "THREE".to_string(),
6661 ),
6662 lsp::TextEdit::new(
6663 lsp::Range::new(
6664 lsp::Position::new(0, 35),
6665 lsp::Position::new(0, 38),
6666 ),
6667 "THREE".to_string(),
6668 ),
6669 ],
6670 ),
6671 ]
6672 .into_iter()
6673 .collect(),
6674 ),
6675 ..Default::default()
6676 })
6677 })
6678 .next()
6679 .await
6680 .unwrap();
6681 let mut transaction = response.await.unwrap().0;
6682 assert_eq!(transaction.len(), 2);
6683 assert_eq!(
6684 transaction
6685 .remove_entry(&buffer)
6686 .unwrap()
6687 .0
6688 .read_with(cx, |buffer, _| buffer.text()),
6689 "const THREE: usize = 1;"
6690 );
6691 assert_eq!(
6692 transaction
6693 .into_keys()
6694 .next()
6695 .unwrap()
6696 .read_with(cx, |buffer, _| buffer.text()),
6697 "const TWO: usize = one::THREE + one::THREE;"
6698 );
6699 }
6700
6701 #[gpui::test]
6702 async fn test_search(cx: &mut gpui::TestAppContext) {
6703 let fs = FakeFs::new(cx.background());
6704 fs.insert_tree(
6705 "/dir",
6706 json!({
6707 "one.rs": "const ONE: usize = 1;",
6708 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6709 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6710 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6711 }),
6712 )
6713 .await;
6714 let project = Project::test(fs.clone(), cx);
6715 let (tree, _) = project
6716 .update(cx, |project, cx| {
6717 project.find_or_create_local_worktree("/dir", true, cx)
6718 })
6719 .await
6720 .unwrap();
6721 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6722 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6723 .await;
6724
6725 assert_eq!(
6726 search(&project, SearchQuery::text("TWO", false, true), cx)
6727 .await
6728 .unwrap(),
6729 HashMap::from_iter([
6730 ("two.rs".to_string(), vec![6..9]),
6731 ("three.rs".to_string(), vec![37..40])
6732 ])
6733 );
6734
6735 let buffer_4 = project
6736 .update(cx, |project, cx| {
6737 project.open_buffer((worktree_id, "four.rs"), cx)
6738 })
6739 .await
6740 .unwrap();
6741 buffer_4.update(cx, |buffer, cx| {
6742 buffer.edit([20..28, 31..43], "two::TWO", cx);
6743 });
6744
6745 assert_eq!(
6746 search(&project, SearchQuery::text("TWO", false, true), cx)
6747 .await
6748 .unwrap(),
6749 HashMap::from_iter([
6750 ("two.rs".to_string(), vec![6..9]),
6751 ("three.rs".to_string(), vec![37..40]),
6752 ("four.rs".to_string(), vec![25..28, 36..39])
6753 ])
6754 );
6755
6756 async fn search(
6757 project: &ModelHandle<Project>,
6758 query: SearchQuery,
6759 cx: &mut gpui::TestAppContext,
6760 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6761 let results = project
6762 .update(cx, |project, cx| project.search(query, cx))
6763 .await?;
6764
6765 Ok(results
6766 .into_iter()
6767 .map(|(buffer, ranges)| {
6768 buffer.read_with(cx, |buffer, _| {
6769 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6770 let ranges = ranges
6771 .into_iter()
6772 .map(|range| range.to_offset(buffer))
6773 .collect::<Vec<_>>();
6774 (path, ranges)
6775 })
6776 })
6777 .collect())
6778 }
6779 }
6780}