1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use sha2::{Digest, Sha256};
32use similar::{ChangeTag, TextDiff};
33use std::{
34 cell::RefCell,
35 cmp::{self, Ordering},
36 convert::TryInto,
37 hash::Hash,
38 mem,
39 ops::Range,
40 path::{Component, Path, PathBuf},
41 rc::Rc,
42 sync::{
43 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
44 Arc,
45 },
46 time::Instant,
47};
48use util::{post_inc, ResultExt, TryFutureExt as _};
49
50pub use fs::*;
51pub use worktree::*;
52
53pub trait Item: Entity {
54 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
55}
56
57pub struct Project {
58 worktrees: Vec<WorktreeHandle>,
59 active_entry: Option<ProjectEntryId>,
60 languages: Arc<LanguageRegistry>,
61 language_servers:
62 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
63 started_language_servers:
64 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
65 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
66 language_server_settings: Arc<Mutex<serde_json::Value>>,
67 next_language_server_id: usize,
68 client: Arc<client::Client>,
69 next_entry_id: Arc<AtomicUsize>,
70 user_store: ModelHandle<UserStore>,
71 fs: Arc<dyn Fs>,
72 client_state: ProjectClientState,
73 collaborators: HashMap<PeerId, Collaborator>,
74 subscriptions: Vec<client::Subscription>,
75 language_servers_with_diagnostics_running: isize,
76 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
77 shared_buffers: HashMap<PeerId, HashSet<u64>>,
78 loading_buffers: HashMap<
79 ProjectPath,
80 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
81 >,
82 loading_local_worktrees:
83 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
84 opened_buffers: HashMap<u64, OpenBuffer>,
85 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
86 nonce: u128,
87}
88
89enum OpenBuffer {
90 Strong(ModelHandle<Buffer>),
91 Weak(WeakModelHandle<Buffer>),
92 Loading(Vec<Operation>),
93}
94
95enum WorktreeHandle {
96 Strong(ModelHandle<Worktree>),
97 Weak(WeakModelHandle<Worktree>),
98}
99
100enum ProjectClientState {
101 Local {
102 is_shared: bool,
103 remote_id_tx: watch::Sender<Option<u64>>,
104 remote_id_rx: watch::Receiver<Option<u64>>,
105 _maintain_remote_id_task: Task<Option<()>>,
106 },
107 Remote {
108 sharing_has_stopped: bool,
109 remote_id: u64,
110 replica_id: ReplicaId,
111 _detect_unshare_task: Task<Option<()>>,
112 },
113}
114
115#[derive(Clone, Debug)]
116pub struct Collaborator {
117 pub user: Arc<User>,
118 pub peer_id: PeerId,
119 pub replica_id: ReplicaId,
120}
121
122#[derive(Clone, Debug, PartialEq)]
123pub enum Event {
124 ActiveEntryChanged(Option<ProjectEntryId>),
125 WorktreeRemoved(WorktreeId),
126 DiskBasedDiagnosticsStarted,
127 DiskBasedDiagnosticsUpdated,
128 DiskBasedDiagnosticsFinished,
129 DiagnosticsUpdated(ProjectPath),
130 RemoteIdChanged(Option<u64>),
131 CollaboratorLeft(PeerId),
132}
133
134enum LanguageServerEvent {
135 WorkStart {
136 token: String,
137 },
138 WorkProgress {
139 token: String,
140 progress: LanguageServerProgress,
141 },
142 WorkEnd {
143 token: String,
144 },
145 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
146 WorkspaceEdit(lsp::ApplyWorkspaceEditParams),
147}
148
149pub struct LanguageServerStatus {
150 pub name: String,
151 pub pending_work: BTreeMap<String, LanguageServerProgress>,
152 pending_diagnostic_updates: isize,
153}
154
155#[derive(Clone, Debug)]
156pub struct LanguageServerProgress {
157 pub message: Option<String>,
158 pub percentage: Option<usize>,
159 pub last_update_at: Instant,
160}
161
162#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
163pub struct ProjectPath {
164 pub worktree_id: WorktreeId,
165 pub path: Arc<Path>,
166}
167
168#[derive(Clone, Debug, Default, PartialEq)]
169pub struct DiagnosticSummary {
170 pub error_count: usize,
171 pub warning_count: usize,
172 pub info_count: usize,
173 pub hint_count: usize,
174}
175
176#[derive(Debug)]
177pub struct Location {
178 pub buffer: ModelHandle<Buffer>,
179 pub range: Range<language::Anchor>,
180}
181
182#[derive(Debug)]
183pub struct DocumentHighlight {
184 pub range: Range<language::Anchor>,
185 pub kind: DocumentHighlightKind,
186}
187
188#[derive(Clone, Debug)]
189pub struct Symbol {
190 pub source_worktree_id: WorktreeId,
191 pub worktree_id: WorktreeId,
192 pub language_server_name: LanguageServerName,
193 pub path: PathBuf,
194 pub label: CodeLabel,
195 pub name: String,
196 pub kind: lsp::SymbolKind,
197 pub range: Range<PointUtf16>,
198 pub signature: [u8; 32],
199}
200
201#[derive(Default)]
202pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
203
204impl DiagnosticSummary {
205 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
206 let mut this = Self {
207 error_count: 0,
208 warning_count: 0,
209 info_count: 0,
210 hint_count: 0,
211 };
212
213 for entry in diagnostics {
214 if entry.diagnostic.is_primary {
215 match entry.diagnostic.severity {
216 DiagnosticSeverity::ERROR => this.error_count += 1,
217 DiagnosticSeverity::WARNING => this.warning_count += 1,
218 DiagnosticSeverity::INFORMATION => this.info_count += 1,
219 DiagnosticSeverity::HINT => this.hint_count += 1,
220 _ => {}
221 }
222 }
223 }
224
225 this
226 }
227
228 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
229 proto::DiagnosticSummary {
230 path: path.to_string_lossy().to_string(),
231 error_count: self.error_count as u32,
232 warning_count: self.warning_count as u32,
233 info_count: self.info_count as u32,
234 hint_count: self.hint_count as u32,
235 }
236 }
237}
238
239#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
240pub struct ProjectEntryId(usize);
241
242impl ProjectEntryId {
243 pub fn new(counter: &AtomicUsize) -> Self {
244 Self(counter.fetch_add(1, SeqCst))
245 }
246
247 pub fn from_proto(id: u64) -> Self {
248 Self(id as usize)
249 }
250
251 pub fn to_proto(&self) -> u64 {
252 self.0 as u64
253 }
254
255 pub fn to_usize(&self) -> usize {
256 self.0
257 }
258}
259
260impl Project {
261 pub fn init(client: &Arc<Client>) {
262 client.add_model_message_handler(Self::handle_add_collaborator);
263 client.add_model_message_handler(Self::handle_buffer_reloaded);
264 client.add_model_message_handler(Self::handle_buffer_saved);
265 client.add_model_message_handler(Self::handle_start_language_server);
266 client.add_model_message_handler(Self::handle_update_language_server);
267 client.add_model_message_handler(Self::handle_remove_collaborator);
268 client.add_model_message_handler(Self::handle_register_worktree);
269 client.add_model_message_handler(Self::handle_unregister_worktree);
270 client.add_model_message_handler(Self::handle_unshare_project);
271 client.add_model_message_handler(Self::handle_update_buffer_file);
272 client.add_model_message_handler(Self::handle_update_buffer);
273 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
274 client.add_model_message_handler(Self::handle_update_worktree);
275 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
276 client.add_model_request_handler(Self::handle_apply_code_action);
277 client.add_model_request_handler(Self::handle_format_buffers);
278 client.add_model_request_handler(Self::handle_get_code_actions);
279 client.add_model_request_handler(Self::handle_get_completions);
280 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
281 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
282 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
283 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
284 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
285 client.add_model_request_handler(Self::handle_search_project);
286 client.add_model_request_handler(Self::handle_get_project_symbols);
287 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
288 client.add_model_request_handler(Self::handle_open_buffer_by_id);
289 client.add_model_request_handler(Self::handle_open_buffer_by_path);
290 client.add_model_request_handler(Self::handle_save_buffer);
291 }
292
293 pub fn local(
294 client: Arc<Client>,
295 user_store: ModelHandle<UserStore>,
296 languages: Arc<LanguageRegistry>,
297 fs: Arc<dyn Fs>,
298 cx: &mut MutableAppContext,
299 ) -> ModelHandle<Self> {
300 cx.add_model(|cx: &mut ModelContext<Self>| {
301 let (remote_id_tx, remote_id_rx) = watch::channel();
302 let _maintain_remote_id_task = cx.spawn_weak({
303 let rpc = client.clone();
304 move |this, mut cx| {
305 async move {
306 let mut status = rpc.status();
307 while let Some(status) = status.next().await {
308 if let Some(this) = this.upgrade(&cx) {
309 if status.is_connected() {
310 this.update(&mut cx, |this, cx| this.register(cx)).await?;
311 } else {
312 this.update(&mut cx, |this, cx| this.unregister(cx));
313 }
314 }
315 }
316 Ok(())
317 }
318 .log_err()
319 }
320 });
321
322 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
323 Self {
324 worktrees: Default::default(),
325 collaborators: Default::default(),
326 opened_buffers: Default::default(),
327 shared_buffers: Default::default(),
328 loading_buffers: Default::default(),
329 loading_local_worktrees: Default::default(),
330 buffer_snapshots: Default::default(),
331 client_state: ProjectClientState::Local {
332 is_shared: false,
333 remote_id_tx,
334 remote_id_rx,
335 _maintain_remote_id_task,
336 },
337 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
338 subscriptions: Vec::new(),
339 active_entry: None,
340 languages,
341 client,
342 user_store,
343 fs,
344 next_entry_id: Default::default(),
345 language_servers_with_diagnostics_running: 0,
346 language_servers: Default::default(),
347 started_language_servers: Default::default(),
348 language_server_statuses: Default::default(),
349 language_server_settings: Default::default(),
350 next_language_server_id: 0,
351 nonce: StdRng::from_entropy().gen(),
352 }
353 })
354 }
355
356 pub async fn remote(
357 remote_id: u64,
358 client: Arc<Client>,
359 user_store: ModelHandle<UserStore>,
360 languages: Arc<LanguageRegistry>,
361 fs: Arc<dyn Fs>,
362 cx: &mut AsyncAppContext,
363 ) -> Result<ModelHandle<Self>> {
364 client.authenticate_and_connect(true, &cx).await?;
365
366 let response = client
367 .request(proto::JoinProject {
368 project_id: remote_id,
369 })
370 .await?;
371
372 let replica_id = response.replica_id as ReplicaId;
373
374 let mut worktrees = Vec::new();
375 for worktree in response.worktrees {
376 let (worktree, load_task) = cx
377 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
378 worktrees.push(worktree);
379 load_task.detach();
380 }
381
382 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
383 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
384 let mut this = Self {
385 worktrees: Vec::new(),
386 loading_buffers: Default::default(),
387 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
388 shared_buffers: Default::default(),
389 loading_local_worktrees: Default::default(),
390 active_entry: None,
391 collaborators: Default::default(),
392 languages,
393 user_store: user_store.clone(),
394 fs,
395 next_entry_id: Default::default(),
396 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
397 client: client.clone(),
398 client_state: ProjectClientState::Remote {
399 sharing_has_stopped: false,
400 remote_id,
401 replica_id,
402 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
403 async move {
404 let mut status = client.status();
405 let is_connected =
406 status.next().await.map_or(false, |s| s.is_connected());
407 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
408 if !is_connected || status.next().await.is_some() {
409 if let Some(this) = this.upgrade(&cx) {
410 this.update(&mut cx, |this, cx| this.project_unshared(cx))
411 }
412 }
413 Ok(())
414 }
415 .log_err()
416 }),
417 },
418 language_servers_with_diagnostics_running: 0,
419 language_servers: Default::default(),
420 started_language_servers: Default::default(),
421 language_server_settings: Default::default(),
422 language_server_statuses: response
423 .language_servers
424 .into_iter()
425 .map(|server| {
426 (
427 server.id as usize,
428 LanguageServerStatus {
429 name: server.name,
430 pending_work: Default::default(),
431 pending_diagnostic_updates: 0,
432 },
433 )
434 })
435 .collect(),
436 next_language_server_id: 0,
437 opened_buffers: Default::default(),
438 buffer_snapshots: Default::default(),
439 nonce: StdRng::from_entropy().gen(),
440 };
441 for worktree in worktrees {
442 this.add_worktree(&worktree, cx);
443 }
444 this
445 });
446
447 let user_ids = response
448 .collaborators
449 .iter()
450 .map(|peer| peer.user_id)
451 .collect();
452 user_store
453 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
454 .await?;
455 let mut collaborators = HashMap::default();
456 for message in response.collaborators {
457 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
458 collaborators.insert(collaborator.peer_id, collaborator);
459 }
460
461 this.update(cx, |this, _| {
462 this.collaborators = collaborators;
463 });
464
465 Ok(this)
466 }
467
468 #[cfg(any(test, feature = "test-support"))]
469 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
470 let languages = Arc::new(LanguageRegistry::test());
471 let http_client = client::test::FakeHttpClient::with_404_response();
472 let client = client::Client::new(http_client.clone());
473 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
474 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
475 }
476
477 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
478 self.opened_buffers
479 .get(&remote_id)
480 .and_then(|buffer| buffer.upgrade(cx))
481 }
482
483 #[cfg(any(test, feature = "test-support"))]
484 pub fn languages(&self) -> &Arc<LanguageRegistry> {
485 &self.languages
486 }
487
488 #[cfg(any(test, feature = "test-support"))]
489 pub fn check_invariants(&self, cx: &AppContext) {
490 if self.is_local() {
491 let mut worktree_root_paths = HashMap::default();
492 for worktree in self.worktrees(cx) {
493 let worktree = worktree.read(cx);
494 let abs_path = worktree.as_local().unwrap().abs_path().clone();
495 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
496 assert_eq!(
497 prev_worktree_id,
498 None,
499 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
500 abs_path,
501 worktree.id(),
502 prev_worktree_id
503 )
504 }
505 } else {
506 let replica_id = self.replica_id();
507 for buffer in self.opened_buffers.values() {
508 if let Some(buffer) = buffer.upgrade(cx) {
509 let buffer = buffer.read(cx);
510 assert_eq!(
511 buffer.deferred_ops_len(),
512 0,
513 "replica {}, buffer {} has deferred operations",
514 replica_id,
515 buffer.remote_id()
516 );
517 }
518 }
519 }
520 }
521
522 #[cfg(any(test, feature = "test-support"))]
523 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
524 let path = path.into();
525 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
526 self.opened_buffers.iter().any(|(_, buffer)| {
527 if let Some(buffer) = buffer.upgrade(cx) {
528 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
529 if file.worktree == worktree && file.path() == &path.path {
530 return true;
531 }
532 }
533 }
534 false
535 })
536 } else {
537 false
538 }
539 }
540
541 pub fn fs(&self) -> &Arc<dyn Fs> {
542 &self.fs
543 }
544
545 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
546 self.unshare(cx);
547 for worktree in &self.worktrees {
548 if let Some(worktree) = worktree.upgrade(cx) {
549 worktree.update(cx, |worktree, _| {
550 worktree.as_local_mut().unwrap().unregister();
551 });
552 }
553 }
554
555 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
556 *remote_id_tx.borrow_mut() = None;
557 }
558
559 self.subscriptions.clear();
560 }
561
562 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
563 self.unregister(cx);
564
565 let response = self.client.request(proto::RegisterProject {});
566 cx.spawn(|this, mut cx| async move {
567 let remote_id = response.await?.project_id;
568
569 let mut registrations = Vec::new();
570 this.update(&mut cx, |this, cx| {
571 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
572 *remote_id_tx.borrow_mut() = Some(remote_id);
573 }
574
575 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
576
577 this.subscriptions
578 .push(this.client.add_model_for_remote_entity(remote_id, cx));
579
580 for worktree in &this.worktrees {
581 if let Some(worktree) = worktree.upgrade(cx) {
582 registrations.push(worktree.update(cx, |worktree, cx| {
583 let worktree = worktree.as_local_mut().unwrap();
584 worktree.register(remote_id, cx)
585 }));
586 }
587 }
588 });
589
590 futures::future::try_join_all(registrations).await?;
591 Ok(())
592 })
593 }
594
595 pub fn remote_id(&self) -> Option<u64> {
596 match &self.client_state {
597 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
598 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
599 }
600 }
601
602 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
603 let mut id = None;
604 let mut watch = None;
605 match &self.client_state {
606 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
607 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
608 }
609
610 async move {
611 if let Some(id) = id {
612 return id;
613 }
614 let mut watch = watch.unwrap();
615 loop {
616 let id = *watch.borrow();
617 if let Some(id) = id {
618 return id;
619 }
620 watch.next().await;
621 }
622 }
623 }
624
625 pub fn replica_id(&self) -> ReplicaId {
626 match &self.client_state {
627 ProjectClientState::Local { .. } => 0,
628 ProjectClientState::Remote { replica_id, .. } => *replica_id,
629 }
630 }
631
632 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
633 &self.collaborators
634 }
635
636 pub fn worktrees<'a>(
637 &'a self,
638 cx: &'a AppContext,
639 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
640 self.worktrees
641 .iter()
642 .filter_map(move |worktree| worktree.upgrade(cx))
643 }
644
645 pub fn visible_worktrees<'a>(
646 &'a self,
647 cx: &'a AppContext,
648 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
649 self.worktrees.iter().filter_map(|worktree| {
650 worktree.upgrade(cx).and_then(|worktree| {
651 if worktree.read(cx).is_visible() {
652 Some(worktree)
653 } else {
654 None
655 }
656 })
657 })
658 }
659
660 pub fn worktree_for_id(
661 &self,
662 id: WorktreeId,
663 cx: &AppContext,
664 ) -> Option<ModelHandle<Worktree>> {
665 self.worktrees(cx)
666 .find(|worktree| worktree.read(cx).id() == id)
667 }
668
669 pub fn worktree_for_entry(
670 &self,
671 entry_id: ProjectEntryId,
672 cx: &AppContext,
673 ) -> Option<ModelHandle<Worktree>> {
674 self.worktrees(cx)
675 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
676 }
677
678 pub fn worktree_id_for_entry(
679 &self,
680 entry_id: ProjectEntryId,
681 cx: &AppContext,
682 ) -> Option<WorktreeId> {
683 self.worktree_for_entry(entry_id, cx)
684 .map(|worktree| worktree.read(cx).id())
685 }
686
687 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
688 let rpc = self.client.clone();
689 cx.spawn(|this, mut cx| async move {
690 let project_id = this.update(&mut cx, |this, cx| {
691 if let ProjectClientState::Local {
692 is_shared,
693 remote_id_rx,
694 ..
695 } = &mut this.client_state
696 {
697 *is_shared = true;
698
699 for open_buffer in this.opened_buffers.values_mut() {
700 match open_buffer {
701 OpenBuffer::Strong(_) => {}
702 OpenBuffer::Weak(buffer) => {
703 if let Some(buffer) = buffer.upgrade(cx) {
704 *open_buffer = OpenBuffer::Strong(buffer);
705 }
706 }
707 OpenBuffer::Loading(_) => unreachable!(),
708 }
709 }
710
711 for worktree_handle in this.worktrees.iter_mut() {
712 match worktree_handle {
713 WorktreeHandle::Strong(_) => {}
714 WorktreeHandle::Weak(worktree) => {
715 if let Some(worktree) = worktree.upgrade(cx) {
716 *worktree_handle = WorktreeHandle::Strong(worktree);
717 }
718 }
719 }
720 }
721
722 remote_id_rx
723 .borrow()
724 .ok_or_else(|| anyhow!("no project id"))
725 } else {
726 Err(anyhow!("can't share a remote project"))
727 }
728 })?;
729
730 rpc.request(proto::ShareProject { project_id }).await?;
731
732 let mut tasks = Vec::new();
733 this.update(&mut cx, |this, cx| {
734 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
735 worktree.update(cx, |worktree, cx| {
736 let worktree = worktree.as_local_mut().unwrap();
737 tasks.push(worktree.share(project_id, cx));
738 });
739 }
740 });
741 for task in tasks {
742 task.await?;
743 }
744 this.update(&mut cx, |_, cx| cx.notify());
745 Ok(())
746 })
747 }
748
749 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
750 let rpc = self.client.clone();
751
752 if let ProjectClientState::Local {
753 is_shared,
754 remote_id_rx,
755 ..
756 } = &mut self.client_state
757 {
758 if !*is_shared {
759 return;
760 }
761
762 *is_shared = false;
763 self.collaborators.clear();
764 self.shared_buffers.clear();
765 for worktree_handle in self.worktrees.iter_mut() {
766 if let WorktreeHandle::Strong(worktree) = worktree_handle {
767 let is_visible = worktree.update(cx, |worktree, _| {
768 worktree.as_local_mut().unwrap().unshare();
769 worktree.is_visible()
770 });
771 if !is_visible {
772 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
773 }
774 }
775 }
776
777 for open_buffer in self.opened_buffers.values_mut() {
778 match open_buffer {
779 OpenBuffer::Strong(buffer) => {
780 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
781 }
782 _ => {}
783 }
784 }
785
786 if let Some(project_id) = *remote_id_rx.borrow() {
787 rpc.send(proto::UnshareProject { project_id }).log_err();
788 }
789
790 cx.notify();
791 } else {
792 log::error!("attempted to unshare a remote project");
793 }
794 }
795
796 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
797 if let ProjectClientState::Remote {
798 sharing_has_stopped,
799 ..
800 } = &mut self.client_state
801 {
802 *sharing_has_stopped = true;
803 self.collaborators.clear();
804 cx.notify();
805 }
806 }
807
808 pub fn is_read_only(&self) -> bool {
809 match &self.client_state {
810 ProjectClientState::Local { .. } => false,
811 ProjectClientState::Remote {
812 sharing_has_stopped,
813 ..
814 } => *sharing_has_stopped,
815 }
816 }
817
818 pub fn is_local(&self) -> bool {
819 match &self.client_state {
820 ProjectClientState::Local { .. } => true,
821 ProjectClientState::Remote { .. } => false,
822 }
823 }
824
825 pub fn is_remote(&self) -> bool {
826 !self.is_local()
827 }
828
829 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
830 if self.is_remote() {
831 return Err(anyhow!("creating buffers as a guest is not supported yet"));
832 }
833
834 let buffer = cx.add_model(|cx| {
835 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
836 });
837 self.register_buffer(&buffer, cx)?;
838 Ok(buffer)
839 }
840
841 pub fn open_path(
842 &mut self,
843 path: impl Into<ProjectPath>,
844 cx: &mut ModelContext<Self>,
845 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
846 let task = self.open_buffer(path, cx);
847 cx.spawn_weak(|_, cx| async move {
848 let buffer = task.await?;
849 let project_entry_id = buffer
850 .read_with(&cx, |buffer, cx| {
851 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
852 })
853 .ok_or_else(|| anyhow!("no project entry"))?;
854 Ok((project_entry_id, buffer.into()))
855 })
856 }
857
858 pub fn open_buffer(
859 &mut self,
860 path: impl Into<ProjectPath>,
861 cx: &mut ModelContext<Self>,
862 ) -> Task<Result<ModelHandle<Buffer>>> {
863 let project_path = path.into();
864 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
865 worktree
866 } else {
867 return Task::ready(Err(anyhow!("no such worktree")));
868 };
869
870 // If there is already a buffer for the given path, then return it.
871 let existing_buffer = self.get_open_buffer(&project_path, cx);
872 if let Some(existing_buffer) = existing_buffer {
873 return Task::ready(Ok(existing_buffer));
874 }
875
876 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
877 // If the given path is already being loaded, then wait for that existing
878 // task to complete and return the same buffer.
879 hash_map::Entry::Occupied(e) => e.get().clone(),
880
881 // Otherwise, record the fact that this path is now being loaded.
882 hash_map::Entry::Vacant(entry) => {
883 let (mut tx, rx) = postage::watch::channel();
884 entry.insert(rx.clone());
885
886 let load_buffer = if worktree.read(cx).is_local() {
887 self.open_local_buffer(&project_path.path, &worktree, cx)
888 } else {
889 self.open_remote_buffer(&project_path.path, &worktree, cx)
890 };
891
892 cx.spawn(move |this, mut cx| async move {
893 let load_result = load_buffer.await;
894 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
895 // Record the fact that the buffer is no longer loading.
896 this.loading_buffers.remove(&project_path);
897 let buffer = load_result.map_err(Arc::new)?;
898 Ok(buffer)
899 }));
900 })
901 .detach();
902 rx
903 }
904 };
905
906 cx.foreground().spawn(async move {
907 loop {
908 if let Some(result) = loading_watch.borrow().as_ref() {
909 match result {
910 Ok(buffer) => return Ok(buffer.clone()),
911 Err(error) => return Err(anyhow!("{}", error)),
912 }
913 }
914 loading_watch.next().await;
915 }
916 })
917 }
918
919 fn open_local_buffer(
920 &mut self,
921 path: &Arc<Path>,
922 worktree: &ModelHandle<Worktree>,
923 cx: &mut ModelContext<Self>,
924 ) -> Task<Result<ModelHandle<Buffer>>> {
925 let load_buffer = worktree.update(cx, |worktree, cx| {
926 let worktree = worktree.as_local_mut().unwrap();
927 worktree.load_buffer(path, cx)
928 });
929 cx.spawn(|this, mut cx| async move {
930 let buffer = load_buffer.await?;
931 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
932 Ok(buffer)
933 })
934 }
935
936 fn open_remote_buffer(
937 &mut self,
938 path: &Arc<Path>,
939 worktree: &ModelHandle<Worktree>,
940 cx: &mut ModelContext<Self>,
941 ) -> Task<Result<ModelHandle<Buffer>>> {
942 let rpc = self.client.clone();
943 let project_id = self.remote_id().unwrap();
944 let remote_worktree_id = worktree.read(cx).id();
945 let path = path.clone();
946 let path_string = path.to_string_lossy().to_string();
947 cx.spawn(|this, mut cx| async move {
948 let response = rpc
949 .request(proto::OpenBufferByPath {
950 project_id,
951 worktree_id: remote_worktree_id.to_proto(),
952 path: path_string,
953 })
954 .await?;
955 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
956 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
957 .await
958 })
959 }
960
961 fn open_local_buffer_via_lsp(
962 &mut self,
963 abs_path: lsp::Url,
964 lsp_adapter: Arc<dyn LspAdapter>,
965 lsp_server: Arc<LanguageServer>,
966 cx: &mut ModelContext<Self>,
967 ) -> Task<Result<ModelHandle<Buffer>>> {
968 cx.spawn(|this, mut cx| async move {
969 let abs_path = abs_path
970 .to_file_path()
971 .map_err(|_| anyhow!("can't convert URI to path"))?;
972 let (worktree, relative_path) = if let Some(result) =
973 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
974 {
975 result
976 } else {
977 let worktree = this
978 .update(&mut cx, |this, cx| {
979 this.create_local_worktree(&abs_path, false, cx)
980 })
981 .await?;
982 this.update(&mut cx, |this, cx| {
983 this.language_servers.insert(
984 (worktree.read(cx).id(), lsp_adapter.name()),
985 (lsp_adapter, lsp_server),
986 );
987 });
988 (worktree, PathBuf::new())
989 };
990
991 let project_path = ProjectPath {
992 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
993 path: relative_path.into(),
994 };
995 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
996 .await
997 })
998 }
999
1000 pub fn open_buffer_by_id(
1001 &mut self,
1002 id: u64,
1003 cx: &mut ModelContext<Self>,
1004 ) -> Task<Result<ModelHandle<Buffer>>> {
1005 if let Some(buffer) = self.buffer_for_id(id, cx) {
1006 Task::ready(Ok(buffer))
1007 } else if self.is_local() {
1008 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1009 } else if let Some(project_id) = self.remote_id() {
1010 let request = self
1011 .client
1012 .request(proto::OpenBufferById { project_id, id });
1013 cx.spawn(|this, mut cx| async move {
1014 let buffer = request
1015 .await?
1016 .buffer
1017 .ok_or_else(|| anyhow!("invalid buffer"))?;
1018 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1019 .await
1020 })
1021 } else {
1022 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1023 }
1024 }
1025
1026 pub fn save_buffer_as(
1027 &mut self,
1028 buffer: ModelHandle<Buffer>,
1029 abs_path: PathBuf,
1030 cx: &mut ModelContext<Project>,
1031 ) -> Task<Result<()>> {
1032 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1033 cx.spawn(|this, mut cx| async move {
1034 let (worktree, path) = worktree_task.await?;
1035 worktree
1036 .update(&mut cx, |worktree, cx| {
1037 worktree
1038 .as_local_mut()
1039 .unwrap()
1040 .save_buffer_as(buffer.clone(), path, cx)
1041 })
1042 .await?;
1043 this.update(&mut cx, |this, cx| {
1044 this.assign_language_to_buffer(&buffer, cx);
1045 this.register_buffer_with_language_server(&buffer, cx);
1046 });
1047 Ok(())
1048 })
1049 }
1050
1051 pub fn get_open_buffer(
1052 &mut self,
1053 path: &ProjectPath,
1054 cx: &mut ModelContext<Self>,
1055 ) -> Option<ModelHandle<Buffer>> {
1056 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1057 self.opened_buffers.values().find_map(|buffer| {
1058 let buffer = buffer.upgrade(cx)?;
1059 let file = File::from_dyn(buffer.read(cx).file())?;
1060 if file.worktree == worktree && file.path() == &path.path {
1061 Some(buffer)
1062 } else {
1063 None
1064 }
1065 })
1066 }
1067
1068 fn register_buffer(
1069 &mut self,
1070 buffer: &ModelHandle<Buffer>,
1071 cx: &mut ModelContext<Self>,
1072 ) -> Result<()> {
1073 let remote_id = buffer.read(cx).remote_id();
1074 let open_buffer = if self.is_remote() || self.is_shared() {
1075 OpenBuffer::Strong(buffer.clone())
1076 } else {
1077 OpenBuffer::Weak(buffer.downgrade())
1078 };
1079
1080 match self.opened_buffers.insert(remote_id, open_buffer) {
1081 None => {}
1082 Some(OpenBuffer::Loading(operations)) => {
1083 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1084 }
1085 Some(OpenBuffer::Weak(existing_handle)) => {
1086 if existing_handle.upgrade(cx).is_some() {
1087 Err(anyhow!(
1088 "already registered buffer with remote id {}",
1089 remote_id
1090 ))?
1091 }
1092 }
1093 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1094 "already registered buffer with remote id {}",
1095 remote_id
1096 ))?,
1097 }
1098 cx.subscribe(buffer, |this, buffer, event, cx| {
1099 this.on_buffer_event(buffer, event, cx);
1100 })
1101 .detach();
1102
1103 self.assign_language_to_buffer(buffer, cx);
1104 self.register_buffer_with_language_server(buffer, cx);
1105
1106 Ok(())
1107 }
1108
1109 fn register_buffer_with_language_server(
1110 &mut self,
1111 buffer_handle: &ModelHandle<Buffer>,
1112 cx: &mut ModelContext<Self>,
1113 ) {
1114 let buffer = buffer_handle.read(cx);
1115 let buffer_id = buffer.remote_id();
1116 if let Some(file) = File::from_dyn(buffer.file()) {
1117 if file.is_local() {
1118 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1119 let initial_snapshot = buffer.text_snapshot();
1120 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1121
1122 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1123 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1124 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1125 .log_err();
1126 }
1127 }
1128
1129 if let Some((_, server)) = language_server {
1130 server
1131 .notify::<lsp::notification::DidOpenTextDocument>(
1132 lsp::DidOpenTextDocumentParams {
1133 text_document: lsp::TextDocumentItem::new(
1134 uri,
1135 Default::default(),
1136 0,
1137 initial_snapshot.text(),
1138 ),
1139 }
1140 .clone(),
1141 )
1142 .log_err();
1143 buffer_handle.update(cx, |buffer, cx| {
1144 buffer.set_completion_triggers(
1145 server
1146 .capabilities()
1147 .completion_provider
1148 .as_ref()
1149 .and_then(|provider| provider.trigger_characters.clone())
1150 .unwrap_or(Vec::new()),
1151 cx,
1152 )
1153 });
1154 self.buffer_snapshots
1155 .insert(buffer_id, vec![(0, initial_snapshot)]);
1156 }
1157
1158 cx.observe_release(buffer_handle, |this, buffer, cx| {
1159 if let Some(file) = File::from_dyn(buffer.file()) {
1160 if file.is_local() {
1161 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1162 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1163 server
1164 .notify::<lsp::notification::DidCloseTextDocument>(
1165 lsp::DidCloseTextDocumentParams {
1166 text_document: lsp::TextDocumentIdentifier::new(
1167 uri.clone(),
1168 ),
1169 },
1170 )
1171 .log_err();
1172 }
1173 }
1174 }
1175 })
1176 .detach();
1177 }
1178 }
1179 }
1180
1181 fn on_buffer_event(
1182 &mut self,
1183 buffer: ModelHandle<Buffer>,
1184 event: &BufferEvent,
1185 cx: &mut ModelContext<Self>,
1186 ) -> Option<()> {
1187 match event {
1188 BufferEvent::Operation(operation) => {
1189 let project_id = self.remote_id()?;
1190 let request = self.client.request(proto::UpdateBuffer {
1191 project_id,
1192 buffer_id: buffer.read(cx).remote_id(),
1193 operations: vec![language::proto::serialize_operation(&operation)],
1194 });
1195 cx.background().spawn(request).detach_and_log_err(cx);
1196 }
1197 BufferEvent::Edited { .. } => {
1198 let (_, language_server) = self
1199 .language_server_for_buffer(buffer.read(cx), cx)?
1200 .clone();
1201 let buffer = buffer.read(cx);
1202 let file = File::from_dyn(buffer.file())?;
1203 let abs_path = file.as_local()?.abs_path(cx);
1204 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1205 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1206 let (version, prev_snapshot) = buffer_snapshots.last()?;
1207 let next_snapshot = buffer.text_snapshot();
1208 let next_version = version + 1;
1209
1210 let content_changes = buffer
1211 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1212 .map(|edit| {
1213 let edit_start = edit.new.start.0;
1214 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1215 let new_text = next_snapshot
1216 .text_for_range(edit.new.start.1..edit.new.end.1)
1217 .collect();
1218 lsp::TextDocumentContentChangeEvent {
1219 range: Some(lsp::Range::new(
1220 point_to_lsp(edit_start),
1221 point_to_lsp(edit_end),
1222 )),
1223 range_length: None,
1224 text: new_text,
1225 }
1226 })
1227 .collect();
1228
1229 buffer_snapshots.push((next_version, next_snapshot));
1230
1231 language_server
1232 .notify::<lsp::notification::DidChangeTextDocument>(
1233 lsp::DidChangeTextDocumentParams {
1234 text_document: lsp::VersionedTextDocumentIdentifier::new(
1235 uri,
1236 next_version,
1237 ),
1238 content_changes,
1239 },
1240 )
1241 .log_err();
1242 }
1243 BufferEvent::Saved => {
1244 let file = File::from_dyn(buffer.read(cx).file())?;
1245 let worktree_id = file.worktree_id(cx);
1246 let abs_path = file.as_local()?.abs_path(cx);
1247 let text_document = lsp::TextDocumentIdentifier {
1248 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1249 };
1250
1251 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1252 server
1253 .notify::<lsp::notification::DidSaveTextDocument>(
1254 lsp::DidSaveTextDocumentParams {
1255 text_document: text_document.clone(),
1256 text: None,
1257 },
1258 )
1259 .log_err();
1260 }
1261 }
1262 _ => {}
1263 }
1264
1265 None
1266 }
1267
1268 fn language_servers_for_worktree(
1269 &self,
1270 worktree_id: WorktreeId,
1271 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1272 self.language_servers.iter().filter_map(
1273 move |((language_server_worktree_id, _), server)| {
1274 if *language_server_worktree_id == worktree_id {
1275 Some(server)
1276 } else {
1277 None
1278 }
1279 },
1280 )
1281 }
1282
1283 fn assign_language_to_buffer(
1284 &mut self,
1285 buffer: &ModelHandle<Buffer>,
1286 cx: &mut ModelContext<Self>,
1287 ) -> Option<()> {
1288 // If the buffer has a language, set it and start the language server if we haven't already.
1289 let full_path = buffer.read(cx).file()?.full_path(cx);
1290 let language = self.languages.select_language(&full_path)?;
1291 buffer.update(cx, |buffer, cx| {
1292 buffer.set_language(Some(language.clone()), cx);
1293 });
1294
1295 let file = File::from_dyn(buffer.read(cx).file())?;
1296 let worktree = file.worktree.read(cx).as_local()?;
1297 let worktree_id = worktree.id();
1298 let worktree_abs_path = worktree.abs_path().clone();
1299 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1300
1301 None
1302 }
1303
1304 fn start_language_server(
1305 &mut self,
1306 worktree_id: WorktreeId,
1307 worktree_path: Arc<Path>,
1308 language: Arc<Language>,
1309 cx: &mut ModelContext<Self>,
1310 ) {
1311 let adapter = if let Some(adapter) = language.lsp_adapter() {
1312 adapter
1313 } else {
1314 return;
1315 };
1316 let key = (worktree_id, adapter.name());
1317 self.started_language_servers
1318 .entry(key.clone())
1319 .or_insert_with(|| {
1320 let server_id = post_inc(&mut self.next_language_server_id);
1321 let language_server = self.languages.start_language_server(
1322 server_id,
1323 language.clone(),
1324 worktree_path,
1325 self.client.http_client(),
1326 cx,
1327 );
1328 cx.spawn_weak(|this, mut cx| async move {
1329 let language_server = language_server?.await.log_err()?;
1330 let this = this.upgrade(&cx)?;
1331 let (language_server_events_tx, language_server_events_rx) =
1332 smol::channel::unbounded();
1333
1334 language_server
1335 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1336 let language_server_events_tx = language_server_events_tx.clone();
1337 move |params, _| {
1338 language_server_events_tx
1339 .try_send(LanguageServerEvent::DiagnosticsUpdate(params))
1340 .ok();
1341 }
1342 })
1343 .detach();
1344
1345 language_server
1346 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1347 let settings = this
1348 .read_with(&cx, |this, _| this.language_server_settings.clone());
1349 move |params, _| {
1350 let settings = settings.lock().clone();
1351 async move {
1352 Ok(params
1353 .items
1354 .into_iter()
1355 .map(|item| {
1356 if let Some(section) = &item.section {
1357 settings
1358 .get(section)
1359 .cloned()
1360 .unwrap_or(serde_json::Value::Null)
1361 } else {
1362 settings.clone()
1363 }
1364 })
1365 .collect())
1366 }
1367 }
1368 })
1369 .detach();
1370
1371 language_server
1372 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1373 let language_server_events_tx = language_server_events_tx.clone();
1374 move |params, _| {
1375 language_server_events_tx
1376 .try_send(LanguageServerEvent::WorkspaceEdit(params))
1377 .ok();
1378 async move {
1379 Ok(lsp::ApplyWorkspaceEditResponse {
1380 applied: true,
1381 failed_change: None,
1382 failure_reason: None,
1383 })
1384 }
1385 }
1386 })
1387 .detach();
1388
1389 language_server
1390 .on_notification::<lsp::notification::Progress, _>(move |params, _| {
1391 let token = match params.token {
1392 lsp::NumberOrString::String(token) => token,
1393 lsp::NumberOrString::Number(token) => {
1394 log::info!("skipping numeric progress token {}", token);
1395 return;
1396 }
1397 };
1398
1399 match params.value {
1400 lsp::ProgressParamsValue::WorkDone(progress) => match progress {
1401 lsp::WorkDoneProgress::Begin(_) => {
1402 language_server_events_tx
1403 .try_send(LanguageServerEvent::WorkStart { token })
1404 .ok();
1405 }
1406 lsp::WorkDoneProgress::Report(report) => {
1407 language_server_events_tx
1408 .try_send(LanguageServerEvent::WorkProgress {
1409 token,
1410 progress: LanguageServerProgress {
1411 message: report.message,
1412 percentage: report
1413 .percentage
1414 .map(|p| p as usize),
1415 last_update_at: Instant::now(),
1416 },
1417 })
1418 .ok();
1419 }
1420 lsp::WorkDoneProgress::End(_) => {
1421 language_server_events_tx
1422 .try_send(LanguageServerEvent::WorkEnd { token })
1423 .ok();
1424 }
1425 },
1426 }
1427 })
1428 .detach();
1429
1430 let language_server = language_server
1431 .initialize(adapter.initialization_options())
1432 .await
1433 .log_err()?;
1434
1435 // Process all the LSP events.
1436 cx.spawn(|mut cx| {
1437 let this = this.downgrade();
1438 let adapter = adapter.clone();
1439 let language_server = language_server.clone();
1440 async move {
1441 while let Ok(event) = language_server_events_rx.recv().await {
1442 let this = this.upgrade(&cx)?;
1443 Self::on_lsp_event(
1444 this,
1445 server_id,
1446 &adapter,
1447 &language_server,
1448 event,
1449 &mut cx,
1450 )
1451 .await;
1452
1453 // Don't starve the main thread when lots of events arrive all at once.
1454 smol::future::yield_now().await;
1455 }
1456 Some(())
1457 }
1458 })
1459 .detach();
1460
1461 this.update(&mut cx, |this, cx| {
1462 this.language_servers
1463 .insert(key.clone(), (adapter, language_server.clone()));
1464 this.language_server_statuses.insert(
1465 server_id,
1466 LanguageServerStatus {
1467 name: language_server.name().to_string(),
1468 pending_work: Default::default(),
1469 pending_diagnostic_updates: 0,
1470 },
1471 );
1472 language_server
1473 .notify::<lsp::notification::DidChangeConfiguration>(
1474 lsp::DidChangeConfigurationParams {
1475 settings: this.language_server_settings.lock().clone(),
1476 },
1477 )
1478 .ok();
1479
1480 if let Some(project_id) = this.remote_id() {
1481 this.client
1482 .send(proto::StartLanguageServer {
1483 project_id,
1484 server: Some(proto::LanguageServer {
1485 id: server_id as u64,
1486 name: language_server.name().to_string(),
1487 }),
1488 })
1489 .log_err();
1490 }
1491
1492 // Tell the language server about every open buffer in the worktree that matches the language.
1493 for buffer in this.opened_buffers.values() {
1494 if let Some(buffer_handle) = buffer.upgrade(cx) {
1495 let buffer = buffer_handle.read(cx);
1496 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1497 file
1498 } else {
1499 continue;
1500 };
1501 let language = if let Some(language) = buffer.language() {
1502 language
1503 } else {
1504 continue;
1505 };
1506 if file.worktree.read(cx).id() != key.0
1507 || language.lsp_adapter().map(|a| a.name())
1508 != Some(key.1.clone())
1509 {
1510 continue;
1511 }
1512
1513 let file = file.as_local()?;
1514 let versions = this
1515 .buffer_snapshots
1516 .entry(buffer.remote_id())
1517 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1518 let (version, initial_snapshot) = versions.last().unwrap();
1519 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1520 language_server
1521 .notify::<lsp::notification::DidOpenTextDocument>(
1522 lsp::DidOpenTextDocumentParams {
1523 text_document: lsp::TextDocumentItem::new(
1524 uri,
1525 Default::default(),
1526 *version,
1527 initial_snapshot.text(),
1528 ),
1529 },
1530 )
1531 .log_err()?;
1532 buffer_handle.update(cx, |buffer, cx| {
1533 buffer.set_completion_triggers(
1534 language_server
1535 .capabilities()
1536 .completion_provider
1537 .as_ref()
1538 .and_then(|provider| {
1539 provider.trigger_characters.clone()
1540 })
1541 .unwrap_or(Vec::new()),
1542 cx,
1543 )
1544 });
1545 }
1546 }
1547
1548 cx.notify();
1549 Some(())
1550 });
1551
1552 Some(language_server)
1553 })
1554 });
1555 }
1556
1557 pub fn restart_language_servers_for_buffers(
1558 &mut self,
1559 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1560 cx: &mut ModelContext<Self>,
1561 ) -> Option<()> {
1562 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1563 .into_iter()
1564 .filter_map(|buffer| {
1565 let file = File::from_dyn(buffer.read(cx).file())?;
1566 let worktree = file.worktree.read(cx).as_local()?;
1567 let worktree_id = worktree.id();
1568 let worktree_abs_path = worktree.abs_path().clone();
1569 let full_path = file.full_path(cx);
1570 Some((worktree_id, worktree_abs_path, full_path))
1571 })
1572 .collect();
1573 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1574 let language = self.languages.select_language(&full_path)?;
1575 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1576 }
1577
1578 None
1579 }
1580
1581 fn restart_language_server(
1582 &mut self,
1583 worktree_id: WorktreeId,
1584 worktree_path: Arc<Path>,
1585 language: Arc<Language>,
1586 cx: &mut ModelContext<Self>,
1587 ) {
1588 let adapter = if let Some(adapter) = language.lsp_adapter() {
1589 adapter
1590 } else {
1591 return;
1592 };
1593 let key = (worktree_id, adapter.name());
1594 let server_to_shutdown = self.language_servers.remove(&key);
1595 self.started_language_servers.remove(&key);
1596 server_to_shutdown
1597 .as_ref()
1598 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1599 cx.spawn_weak(|this, mut cx| async move {
1600 if let Some(this) = this.upgrade(&cx) {
1601 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1602 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1603 shutdown_task.await;
1604 }
1605 }
1606
1607 this.update(&mut cx, |this, cx| {
1608 this.start_language_server(worktree_id, worktree_path, language, cx);
1609 });
1610 }
1611 })
1612 .detach();
1613 }
1614
1615 async fn on_lsp_event(
1616 this: ModelHandle<Self>,
1617 language_server_id: usize,
1618 adapter: &Arc<dyn LspAdapter>,
1619 language_server: &Arc<LanguageServer>,
1620 event: LanguageServerEvent,
1621 cx: &mut AsyncAppContext,
1622 ) {
1623 let disk_based_diagnostics_progress_token = adapter.disk_based_diagnostics_progress_token();
1624 match event {
1625 LanguageServerEvent::WorkStart { token } => {
1626 this.update(cx, |this, cx| {
1627 let language_server_status = if let Some(status) =
1628 this.language_server_statuses.get_mut(&language_server_id)
1629 {
1630 status
1631 } else {
1632 return;
1633 };
1634
1635 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1636 language_server_status.pending_diagnostic_updates += 1;
1637 if language_server_status.pending_diagnostic_updates == 1 {
1638 this.disk_based_diagnostics_started(cx);
1639 this.broadcast_language_server_update(
1640 language_server_id,
1641 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1642 proto::LspDiskBasedDiagnosticsUpdating {},
1643 ),
1644 );
1645 }
1646 } else {
1647 this.on_lsp_work_start(language_server_id, token.clone(), cx);
1648 this.broadcast_language_server_update(
1649 language_server_id,
1650 proto::update_language_server::Variant::WorkStart(
1651 proto::LspWorkStart { token },
1652 ),
1653 );
1654 }
1655 });
1656 }
1657 LanguageServerEvent::WorkProgress { token, progress } => {
1658 this.update(cx, |this, cx| {
1659 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1660 this.on_lsp_work_progress(
1661 language_server_id,
1662 token.clone(),
1663 progress.clone(),
1664 cx,
1665 );
1666 this.broadcast_language_server_update(
1667 language_server_id,
1668 proto::update_language_server::Variant::WorkProgress(
1669 proto::LspWorkProgress {
1670 token,
1671 message: progress.message,
1672 percentage: progress.percentage.map(|p| p as u32),
1673 },
1674 ),
1675 );
1676 }
1677 });
1678 }
1679 LanguageServerEvent::WorkEnd { token } => {
1680 this.update(cx, |this, cx| {
1681 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1682 let language_server_status = if let Some(status) =
1683 this.language_server_statuses.get_mut(&language_server_id)
1684 {
1685 status
1686 } else {
1687 return;
1688 };
1689
1690 language_server_status.pending_diagnostic_updates -= 1;
1691 if language_server_status.pending_diagnostic_updates == 0 {
1692 this.disk_based_diagnostics_finished(cx);
1693 this.broadcast_language_server_update(
1694 language_server_id,
1695 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1696 proto::LspDiskBasedDiagnosticsUpdated {},
1697 ),
1698 );
1699 }
1700 } else {
1701 this.on_lsp_work_end(language_server_id, token.clone(), cx);
1702 this.broadcast_language_server_update(
1703 language_server_id,
1704 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1705 token,
1706 }),
1707 );
1708 }
1709 });
1710 }
1711 LanguageServerEvent::DiagnosticsUpdate(mut params) => {
1712 this.update(cx, |this, cx| {
1713 adapter.process_diagnostics(&mut params);
1714
1715 if disk_based_diagnostics_progress_token.is_none() {
1716 this.disk_based_diagnostics_started(cx);
1717 this.broadcast_language_server_update(
1718 language_server_id,
1719 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1720 proto::LspDiskBasedDiagnosticsUpdating {},
1721 ),
1722 );
1723 }
1724 this.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1725 .log_err();
1726 if disk_based_diagnostics_progress_token.is_none() {
1727 this.disk_based_diagnostics_finished(cx);
1728 this.broadcast_language_server_update(
1729 language_server_id,
1730 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1731 proto::LspDiskBasedDiagnosticsUpdated {},
1732 ),
1733 );
1734 }
1735 });
1736 }
1737 LanguageServerEvent::WorkspaceEdit(params) => {
1738 let transaction = Self::deserialize_workspace_edit(
1739 this,
1740 params.edit,
1741 false,
1742 adapter.clone(),
1743 language_server.clone(),
1744 cx,
1745 )
1746 .await
1747 .log_err();
1748
1749 // Check if there is a code action currently running, using the state that is
1750 // set in `start_code_action`. If so, then store the transaction for later use.
1751 }
1752 }
1753 }
1754
1755 fn on_lsp_work_start(
1756 &mut self,
1757 language_server_id: usize,
1758 token: String,
1759 cx: &mut ModelContext<Self>,
1760 ) {
1761 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1762 status.pending_work.insert(
1763 token,
1764 LanguageServerProgress {
1765 message: None,
1766 percentage: None,
1767 last_update_at: Instant::now(),
1768 },
1769 );
1770 cx.notify();
1771 }
1772 }
1773
1774 fn on_lsp_work_progress(
1775 &mut self,
1776 language_server_id: usize,
1777 token: String,
1778 progress: LanguageServerProgress,
1779 cx: &mut ModelContext<Self>,
1780 ) {
1781 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1782 status.pending_work.insert(token, progress);
1783 cx.notify();
1784 }
1785 }
1786
1787 fn on_lsp_work_end(
1788 &mut self,
1789 language_server_id: usize,
1790 token: String,
1791 cx: &mut ModelContext<Self>,
1792 ) {
1793 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1794 status.pending_work.remove(&token);
1795 cx.notify();
1796 }
1797 }
1798
1799 fn broadcast_language_server_update(
1800 &self,
1801 language_server_id: usize,
1802 event: proto::update_language_server::Variant,
1803 ) {
1804 if let Some(project_id) = self.remote_id() {
1805 self.client
1806 .send(proto::UpdateLanguageServer {
1807 project_id,
1808 language_server_id: language_server_id as u64,
1809 variant: Some(event),
1810 })
1811 .log_err();
1812 }
1813 }
1814
1815 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1816 for (_, server) in self.language_servers.values() {
1817 server
1818 .notify::<lsp::notification::DidChangeConfiguration>(
1819 lsp::DidChangeConfigurationParams {
1820 settings: settings.clone(),
1821 },
1822 )
1823 .ok();
1824 }
1825 *self.language_server_settings.lock() = settings;
1826 }
1827
1828 pub fn language_server_statuses(
1829 &self,
1830 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1831 self.language_server_statuses.values()
1832 }
1833
1834 pub fn update_diagnostics(
1835 &mut self,
1836 params: lsp::PublishDiagnosticsParams,
1837 disk_based_sources: &[&str],
1838 cx: &mut ModelContext<Self>,
1839 ) -> Result<()> {
1840 let abs_path = params
1841 .uri
1842 .to_file_path()
1843 .map_err(|_| anyhow!("URI is not a file"))?;
1844 let mut next_group_id = 0;
1845 let mut diagnostics = Vec::default();
1846 let mut primary_diagnostic_group_ids = HashMap::default();
1847 let mut sources_by_group_id = HashMap::default();
1848 let mut supporting_diagnostics = HashMap::default();
1849 for diagnostic in ¶ms.diagnostics {
1850 let source = diagnostic.source.as_ref();
1851 let code = diagnostic.code.as_ref().map(|code| match code {
1852 lsp::NumberOrString::Number(code) => code.to_string(),
1853 lsp::NumberOrString::String(code) => code.clone(),
1854 });
1855 let range = range_from_lsp(diagnostic.range);
1856 let is_supporting = diagnostic
1857 .related_information
1858 .as_ref()
1859 .map_or(false, |infos| {
1860 infos.iter().any(|info| {
1861 primary_diagnostic_group_ids.contains_key(&(
1862 source,
1863 code.clone(),
1864 range_from_lsp(info.location.range),
1865 ))
1866 })
1867 });
1868
1869 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1870 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1871 });
1872
1873 if is_supporting {
1874 supporting_diagnostics.insert(
1875 (source, code.clone(), range),
1876 (diagnostic.severity, is_unnecessary),
1877 );
1878 } else {
1879 let group_id = post_inc(&mut next_group_id);
1880 let is_disk_based = source.map_or(false, |source| {
1881 disk_based_sources.contains(&source.as_str())
1882 });
1883
1884 sources_by_group_id.insert(group_id, source);
1885 primary_diagnostic_group_ids
1886 .insert((source, code.clone(), range.clone()), group_id);
1887
1888 diagnostics.push(DiagnosticEntry {
1889 range,
1890 diagnostic: Diagnostic {
1891 code: code.clone(),
1892 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1893 message: diagnostic.message.clone(),
1894 group_id,
1895 is_primary: true,
1896 is_valid: true,
1897 is_disk_based,
1898 is_unnecessary,
1899 },
1900 });
1901 if let Some(infos) = &diagnostic.related_information {
1902 for info in infos {
1903 if info.location.uri == params.uri && !info.message.is_empty() {
1904 let range = range_from_lsp(info.location.range);
1905 diagnostics.push(DiagnosticEntry {
1906 range,
1907 diagnostic: Diagnostic {
1908 code: code.clone(),
1909 severity: DiagnosticSeverity::INFORMATION,
1910 message: info.message.clone(),
1911 group_id,
1912 is_primary: false,
1913 is_valid: true,
1914 is_disk_based,
1915 is_unnecessary: false,
1916 },
1917 });
1918 }
1919 }
1920 }
1921 }
1922 }
1923
1924 for entry in &mut diagnostics {
1925 let diagnostic = &mut entry.diagnostic;
1926 if !diagnostic.is_primary {
1927 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1928 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1929 source,
1930 diagnostic.code.clone(),
1931 entry.range.clone(),
1932 )) {
1933 if let Some(severity) = severity {
1934 diagnostic.severity = severity;
1935 }
1936 diagnostic.is_unnecessary = is_unnecessary;
1937 }
1938 }
1939 }
1940
1941 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1942 Ok(())
1943 }
1944
1945 pub fn update_diagnostic_entries(
1946 &mut self,
1947 abs_path: PathBuf,
1948 version: Option<i32>,
1949 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1950 cx: &mut ModelContext<Project>,
1951 ) -> Result<(), anyhow::Error> {
1952 let (worktree, relative_path) = self
1953 .find_local_worktree(&abs_path, cx)
1954 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1955 if !worktree.read(cx).is_visible() {
1956 return Ok(());
1957 }
1958
1959 let project_path = ProjectPath {
1960 worktree_id: worktree.read(cx).id(),
1961 path: relative_path.into(),
1962 };
1963
1964 for buffer in self.opened_buffers.values() {
1965 if let Some(buffer) = buffer.upgrade(cx) {
1966 if buffer
1967 .read(cx)
1968 .file()
1969 .map_or(false, |file| *file.path() == project_path.path)
1970 {
1971 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1972 break;
1973 }
1974 }
1975 }
1976 worktree.update(cx, |worktree, cx| {
1977 worktree
1978 .as_local_mut()
1979 .ok_or_else(|| anyhow!("not a local worktree"))?
1980 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1981 })?;
1982 cx.emit(Event::DiagnosticsUpdated(project_path));
1983 Ok(())
1984 }
1985
1986 fn update_buffer_diagnostics(
1987 &mut self,
1988 buffer: &ModelHandle<Buffer>,
1989 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1990 version: Option<i32>,
1991 cx: &mut ModelContext<Self>,
1992 ) -> Result<()> {
1993 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1994 Ordering::Equal
1995 .then_with(|| b.is_primary.cmp(&a.is_primary))
1996 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1997 .then_with(|| a.severity.cmp(&b.severity))
1998 .then_with(|| a.message.cmp(&b.message))
1999 }
2000
2001 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2002
2003 diagnostics.sort_unstable_by(|a, b| {
2004 Ordering::Equal
2005 .then_with(|| a.range.start.cmp(&b.range.start))
2006 .then_with(|| b.range.end.cmp(&a.range.end))
2007 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2008 });
2009
2010 let mut sanitized_diagnostics = Vec::new();
2011 let edits_since_save = Patch::new(
2012 snapshot
2013 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2014 .collect(),
2015 );
2016 for entry in diagnostics {
2017 let start;
2018 let end;
2019 if entry.diagnostic.is_disk_based {
2020 // Some diagnostics are based on files on disk instead of buffers'
2021 // current contents. Adjust these diagnostics' ranges to reflect
2022 // any unsaved edits.
2023 start = edits_since_save.old_to_new(entry.range.start);
2024 end = edits_since_save.old_to_new(entry.range.end);
2025 } else {
2026 start = entry.range.start;
2027 end = entry.range.end;
2028 }
2029
2030 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2031 ..snapshot.clip_point_utf16(end, Bias::Right);
2032
2033 // Expand empty ranges by one character
2034 if range.start == range.end {
2035 range.end.column += 1;
2036 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2037 if range.start == range.end && range.end.column > 0 {
2038 range.start.column -= 1;
2039 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2040 }
2041 }
2042
2043 sanitized_diagnostics.push(DiagnosticEntry {
2044 range,
2045 diagnostic: entry.diagnostic,
2046 });
2047 }
2048 drop(edits_since_save);
2049
2050 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2051 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2052 Ok(())
2053 }
2054
2055 pub fn format(
2056 &self,
2057 buffers: HashSet<ModelHandle<Buffer>>,
2058 push_to_history: bool,
2059 cx: &mut ModelContext<Project>,
2060 ) -> Task<Result<ProjectTransaction>> {
2061 let mut local_buffers = Vec::new();
2062 let mut remote_buffers = None;
2063 for buffer_handle in buffers {
2064 let buffer = buffer_handle.read(cx);
2065 if let Some(file) = File::from_dyn(buffer.file()) {
2066 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2067 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2068 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2069 }
2070 } else {
2071 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2072 }
2073 } else {
2074 return Task::ready(Ok(Default::default()));
2075 }
2076 }
2077
2078 let remote_buffers = self.remote_id().zip(remote_buffers);
2079 let client = self.client.clone();
2080
2081 cx.spawn(|this, mut cx| async move {
2082 let mut project_transaction = ProjectTransaction::default();
2083
2084 if let Some((project_id, remote_buffers)) = remote_buffers {
2085 let response = client
2086 .request(proto::FormatBuffers {
2087 project_id,
2088 buffer_ids: remote_buffers
2089 .iter()
2090 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2091 .collect(),
2092 })
2093 .await?
2094 .transaction
2095 .ok_or_else(|| anyhow!("missing transaction"))?;
2096 project_transaction = this
2097 .update(&mut cx, |this, cx| {
2098 this.deserialize_project_transaction(response, push_to_history, cx)
2099 })
2100 .await?;
2101 }
2102
2103 for (buffer, buffer_abs_path, language_server) in local_buffers {
2104 let text_document = lsp::TextDocumentIdentifier::new(
2105 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2106 );
2107 let capabilities = &language_server.capabilities();
2108 let lsp_edits = if capabilities
2109 .document_formatting_provider
2110 .as_ref()
2111 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2112 {
2113 language_server
2114 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2115 text_document,
2116 options: lsp::FormattingOptions {
2117 tab_size: 4,
2118 insert_spaces: true,
2119 insert_final_newline: Some(true),
2120 ..Default::default()
2121 },
2122 work_done_progress_params: Default::default(),
2123 })
2124 .await?
2125 } else if capabilities
2126 .document_range_formatting_provider
2127 .as_ref()
2128 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2129 {
2130 let buffer_start = lsp::Position::new(0, 0);
2131 let buffer_end =
2132 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2133 language_server
2134 .request::<lsp::request::RangeFormatting>(
2135 lsp::DocumentRangeFormattingParams {
2136 text_document,
2137 range: lsp::Range::new(buffer_start, buffer_end),
2138 options: lsp::FormattingOptions {
2139 tab_size: 4,
2140 insert_spaces: true,
2141 insert_final_newline: Some(true),
2142 ..Default::default()
2143 },
2144 work_done_progress_params: Default::default(),
2145 },
2146 )
2147 .await?
2148 } else {
2149 continue;
2150 };
2151
2152 if let Some(lsp_edits) = lsp_edits {
2153 let edits = this
2154 .update(&mut cx, |this, cx| {
2155 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2156 })
2157 .await?;
2158 buffer.update(&mut cx, |buffer, cx| {
2159 buffer.finalize_last_transaction();
2160 buffer.start_transaction();
2161 for (range, text) in edits {
2162 buffer.edit([range], text, cx);
2163 }
2164 if buffer.end_transaction(cx).is_some() {
2165 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2166 if !push_to_history {
2167 buffer.forget_transaction(transaction.id);
2168 }
2169 project_transaction.0.insert(cx.handle(), transaction);
2170 }
2171 });
2172 }
2173 }
2174
2175 Ok(project_transaction)
2176 })
2177 }
2178
2179 pub fn definition<T: ToPointUtf16>(
2180 &self,
2181 buffer: &ModelHandle<Buffer>,
2182 position: T,
2183 cx: &mut ModelContext<Self>,
2184 ) -> Task<Result<Vec<Location>>> {
2185 let position = position.to_point_utf16(buffer.read(cx));
2186 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2187 }
2188
2189 pub fn references<T: ToPointUtf16>(
2190 &self,
2191 buffer: &ModelHandle<Buffer>,
2192 position: T,
2193 cx: &mut ModelContext<Self>,
2194 ) -> Task<Result<Vec<Location>>> {
2195 let position = position.to_point_utf16(buffer.read(cx));
2196 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2197 }
2198
2199 pub fn document_highlights<T: ToPointUtf16>(
2200 &self,
2201 buffer: &ModelHandle<Buffer>,
2202 position: T,
2203 cx: &mut ModelContext<Self>,
2204 ) -> Task<Result<Vec<DocumentHighlight>>> {
2205 let position = position.to_point_utf16(buffer.read(cx));
2206
2207 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2208 }
2209
2210 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2211 if self.is_local() {
2212 let mut language_servers = HashMap::default();
2213 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2214 if let Some(worktree) = self
2215 .worktree_for_id(*worktree_id, cx)
2216 .and_then(|worktree| worktree.read(cx).as_local())
2217 {
2218 language_servers
2219 .entry(Arc::as_ptr(language_server))
2220 .or_insert((
2221 lsp_adapter.clone(),
2222 language_server.clone(),
2223 *worktree_id,
2224 worktree.abs_path().clone(),
2225 ));
2226 }
2227 }
2228
2229 let mut requests = Vec::new();
2230 for (_, language_server, _, _) in language_servers.values() {
2231 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
2232 lsp::WorkspaceSymbolParams {
2233 query: query.to_string(),
2234 ..Default::default()
2235 },
2236 ));
2237 }
2238
2239 cx.spawn_weak(|this, cx| async move {
2240 let responses = futures::future::try_join_all(requests).await?;
2241
2242 let mut symbols = Vec::new();
2243 if let Some(this) = this.upgrade(&cx) {
2244 this.read_with(&cx, |this, cx| {
2245 for ((adapter, _, source_worktree_id, worktree_abs_path), lsp_symbols) in
2246 language_servers.into_values().zip(responses)
2247 {
2248 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
2249 |lsp_symbol| {
2250 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2251 let mut worktree_id = source_worktree_id;
2252 let path;
2253 if let Some((worktree, rel_path)) =
2254 this.find_local_worktree(&abs_path, cx)
2255 {
2256 worktree_id = worktree.read(cx).id();
2257 path = rel_path;
2258 } else {
2259 path = relativize_path(&worktree_abs_path, &abs_path);
2260 }
2261
2262 let label = this
2263 .languages
2264 .select_language(&path)
2265 .and_then(|language| {
2266 language
2267 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2268 })
2269 .unwrap_or_else(|| {
2270 CodeLabel::plain(lsp_symbol.name.clone(), None)
2271 });
2272 let signature = this.symbol_signature(worktree_id, &path);
2273
2274 Some(Symbol {
2275 source_worktree_id,
2276 worktree_id,
2277 language_server_name: adapter.name(),
2278 name: lsp_symbol.name,
2279 kind: lsp_symbol.kind,
2280 label,
2281 path,
2282 range: range_from_lsp(lsp_symbol.location.range),
2283 signature,
2284 })
2285 },
2286 ));
2287 }
2288 })
2289 }
2290
2291 Ok(symbols)
2292 })
2293 } else if let Some(project_id) = self.remote_id() {
2294 let request = self.client.request(proto::GetProjectSymbols {
2295 project_id,
2296 query: query.to_string(),
2297 });
2298 cx.spawn_weak(|this, cx| async move {
2299 let response = request.await?;
2300 let mut symbols = Vec::new();
2301 if let Some(this) = this.upgrade(&cx) {
2302 this.read_with(&cx, |this, _| {
2303 symbols.extend(
2304 response
2305 .symbols
2306 .into_iter()
2307 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2308 );
2309 })
2310 }
2311 Ok(symbols)
2312 })
2313 } else {
2314 Task::ready(Ok(Default::default()))
2315 }
2316 }
2317
2318 pub fn open_buffer_for_symbol(
2319 &mut self,
2320 symbol: &Symbol,
2321 cx: &mut ModelContext<Self>,
2322 ) -> Task<Result<ModelHandle<Buffer>>> {
2323 if self.is_local() {
2324 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2325 symbol.source_worktree_id,
2326 symbol.language_server_name.clone(),
2327 )) {
2328 server.clone()
2329 } else {
2330 return Task::ready(Err(anyhow!(
2331 "language server for worktree and language not found"
2332 )));
2333 };
2334
2335 let worktree_abs_path = if let Some(worktree_abs_path) = self
2336 .worktree_for_id(symbol.worktree_id, cx)
2337 .and_then(|worktree| worktree.read(cx).as_local())
2338 .map(|local_worktree| local_worktree.abs_path())
2339 {
2340 worktree_abs_path
2341 } else {
2342 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2343 };
2344 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2345 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2346 uri
2347 } else {
2348 return Task::ready(Err(anyhow!("invalid symbol path")));
2349 };
2350
2351 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2352 } else if let Some(project_id) = self.remote_id() {
2353 let request = self.client.request(proto::OpenBufferForSymbol {
2354 project_id,
2355 symbol: Some(serialize_symbol(symbol)),
2356 });
2357 cx.spawn(|this, mut cx| async move {
2358 let response = request.await?;
2359 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2360 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2361 .await
2362 })
2363 } else {
2364 Task::ready(Err(anyhow!("project does not have a remote id")))
2365 }
2366 }
2367
2368 pub fn completions<T: ToPointUtf16>(
2369 &self,
2370 source_buffer_handle: &ModelHandle<Buffer>,
2371 position: T,
2372 cx: &mut ModelContext<Self>,
2373 ) -> Task<Result<Vec<Completion>>> {
2374 let source_buffer_handle = source_buffer_handle.clone();
2375 let source_buffer = source_buffer_handle.read(cx);
2376 let buffer_id = source_buffer.remote_id();
2377 let language = source_buffer.language().cloned();
2378 let worktree;
2379 let buffer_abs_path;
2380 if let Some(file) = File::from_dyn(source_buffer.file()) {
2381 worktree = file.worktree.clone();
2382 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2383 } else {
2384 return Task::ready(Ok(Default::default()));
2385 };
2386
2387 let position = position.to_point_utf16(source_buffer);
2388 let anchor = source_buffer.anchor_after(position);
2389
2390 if worktree.read(cx).as_local().is_some() {
2391 let buffer_abs_path = buffer_abs_path.unwrap();
2392 let (_, lang_server) =
2393 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2394 server.clone()
2395 } else {
2396 return Task::ready(Ok(Default::default()));
2397 };
2398
2399 cx.spawn(|_, cx| async move {
2400 let clipped_position = source_buffer_handle
2401 .read_with(&cx, |this, _| this.clip_point_utf16(position, Bias::Left));
2402 if clipped_position != position {
2403 log::info!("Completion position out of date");
2404 return Ok(Default::default());
2405 }
2406
2407 let completions = lang_server
2408 .request::<lsp::request::Completion>(lsp::CompletionParams {
2409 text_document_position: lsp::TextDocumentPositionParams::new(
2410 lsp::TextDocumentIdentifier::new(
2411 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2412 ),
2413 point_to_lsp(position),
2414 ),
2415 context: Default::default(),
2416 work_done_progress_params: Default::default(),
2417 partial_result_params: Default::default(),
2418 })
2419 .await
2420 .context("lsp completion request failed")?;
2421
2422 let completions = if let Some(completions) = completions {
2423 match completions {
2424 lsp::CompletionResponse::Array(completions) => completions,
2425 lsp::CompletionResponse::List(list) => list.items,
2426 }
2427 } else {
2428 Default::default()
2429 };
2430
2431 source_buffer_handle.read_with(&cx, |this, _| {
2432 Ok(completions
2433 .into_iter()
2434 .filter_map(|lsp_completion| {
2435 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2436 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2437 (range_from_lsp(edit.range), edit.new_text.clone())
2438 }
2439 None => (
2440 this.common_prefix_at(position, &lsp_completion.label),
2441 lsp_completion.label.clone(),
2442 ),
2443 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2444 log::info!("unsupported insert/replace completion");
2445 return None;
2446 }
2447 };
2448
2449 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2450 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2451 if clipped_start == old_range.start && clipped_end == old_range.end {
2452 Some(Completion {
2453 old_range: this.anchor_before(old_range.start)
2454 ..this.anchor_after(old_range.end),
2455 new_text,
2456 label: language
2457 .as_ref()
2458 .and_then(|l| l.label_for_completion(&lsp_completion))
2459 .unwrap_or_else(|| {
2460 CodeLabel::plain(
2461 lsp_completion.label.clone(),
2462 lsp_completion.filter_text.as_deref(),
2463 )
2464 }),
2465 lsp_completion,
2466 })
2467 } else {
2468 log::info!("completion out of expected range");
2469 None
2470 }
2471 })
2472 .collect())
2473 })
2474 })
2475 } else if let Some(project_id) = self.remote_id() {
2476 let rpc = self.client.clone();
2477 let message = proto::GetCompletions {
2478 project_id,
2479 buffer_id,
2480 position: Some(language::proto::serialize_anchor(&anchor)),
2481 version: serialize_version(&source_buffer.version()),
2482 };
2483 cx.spawn_weak(|_, mut cx| async move {
2484 let response = rpc.request(message).await?;
2485
2486 source_buffer_handle
2487 .update(&mut cx, |buffer, _| {
2488 buffer.wait_for_version(deserialize_version(response.version))
2489 })
2490 .await;
2491
2492 response
2493 .completions
2494 .into_iter()
2495 .map(|completion| {
2496 language::proto::deserialize_completion(completion, language.as_ref())
2497 })
2498 .collect()
2499 })
2500 } else {
2501 Task::ready(Ok(Default::default()))
2502 }
2503 }
2504
2505 pub fn apply_additional_edits_for_completion(
2506 &self,
2507 buffer_handle: ModelHandle<Buffer>,
2508 completion: Completion,
2509 push_to_history: bool,
2510 cx: &mut ModelContext<Self>,
2511 ) -> Task<Result<Option<Transaction>>> {
2512 let buffer = buffer_handle.read(cx);
2513 let buffer_id = buffer.remote_id();
2514
2515 if self.is_local() {
2516 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2517 {
2518 server.clone()
2519 } else {
2520 return Task::ready(Ok(Default::default()));
2521 };
2522
2523 cx.spawn(|this, mut cx| async move {
2524 let resolved_completion = lang_server
2525 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2526 .await?;
2527 if let Some(edits) = resolved_completion.additional_text_edits {
2528 let edits = this
2529 .update(&mut cx, |this, cx| {
2530 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2531 })
2532 .await?;
2533 buffer_handle.update(&mut cx, |buffer, cx| {
2534 buffer.finalize_last_transaction();
2535 buffer.start_transaction();
2536 for (range, text) in edits {
2537 buffer.edit([range], text, cx);
2538 }
2539 let transaction = if buffer.end_transaction(cx).is_some() {
2540 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2541 if !push_to_history {
2542 buffer.forget_transaction(transaction.id);
2543 }
2544 Some(transaction)
2545 } else {
2546 None
2547 };
2548 Ok(transaction)
2549 })
2550 } else {
2551 Ok(None)
2552 }
2553 })
2554 } else if let Some(project_id) = self.remote_id() {
2555 let client = self.client.clone();
2556 cx.spawn(|_, mut cx| async move {
2557 let response = client
2558 .request(proto::ApplyCompletionAdditionalEdits {
2559 project_id,
2560 buffer_id,
2561 completion: Some(language::proto::serialize_completion(&completion)),
2562 })
2563 .await?;
2564
2565 if let Some(transaction) = response.transaction {
2566 let transaction = language::proto::deserialize_transaction(transaction)?;
2567 buffer_handle
2568 .update(&mut cx, |buffer, _| {
2569 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2570 })
2571 .await;
2572 if push_to_history {
2573 buffer_handle.update(&mut cx, |buffer, _| {
2574 buffer.push_transaction(transaction.clone(), Instant::now());
2575 });
2576 }
2577 Ok(Some(transaction))
2578 } else {
2579 Ok(None)
2580 }
2581 })
2582 } else {
2583 Task::ready(Err(anyhow!("project does not have a remote id")))
2584 }
2585 }
2586
2587 pub fn code_actions<T: Clone + ToOffset>(
2588 &self,
2589 buffer_handle: &ModelHandle<Buffer>,
2590 range: Range<T>,
2591 cx: &mut ModelContext<Self>,
2592 ) -> Task<Result<Vec<CodeAction>>> {
2593 let buffer_handle = buffer_handle.clone();
2594 let buffer = buffer_handle.read(cx);
2595 let snapshot = buffer.snapshot();
2596 let relevant_diagnostics = snapshot
2597 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2598 .map(|entry| entry.to_lsp_diagnostic_stub())
2599 .collect();
2600 let buffer_id = buffer.remote_id();
2601 let worktree;
2602 let buffer_abs_path;
2603 if let Some(file) = File::from_dyn(buffer.file()) {
2604 worktree = file.worktree.clone();
2605 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2606 } else {
2607 return Task::ready(Ok(Default::default()));
2608 };
2609 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2610
2611 if worktree.read(cx).as_local().is_some() {
2612 let buffer_abs_path = buffer_abs_path.unwrap();
2613 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2614 {
2615 server.clone()
2616 } else {
2617 return Task::ready(Ok(Default::default()));
2618 };
2619
2620 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2621 cx.foreground().spawn(async move {
2622 if !lang_server.capabilities().code_action_provider.is_some() {
2623 return Ok(Default::default());
2624 }
2625
2626 Ok(lang_server
2627 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2628 text_document: lsp::TextDocumentIdentifier::new(
2629 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2630 ),
2631 range: lsp_range,
2632 work_done_progress_params: Default::default(),
2633 partial_result_params: Default::default(),
2634 context: lsp::CodeActionContext {
2635 diagnostics: relevant_diagnostics,
2636 only: Some(vec![
2637 lsp::CodeActionKind::QUICKFIX,
2638 lsp::CodeActionKind::REFACTOR,
2639 lsp::CodeActionKind::REFACTOR_EXTRACT,
2640 lsp::CodeActionKind::SOURCE,
2641 ]),
2642 },
2643 })
2644 .await?
2645 .unwrap_or_default()
2646 .into_iter()
2647 .filter_map(|entry| {
2648 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2649 Some(CodeAction {
2650 range: range.clone(),
2651 lsp_action,
2652 })
2653 } else {
2654 None
2655 }
2656 })
2657 .collect())
2658 })
2659 } else if let Some(project_id) = self.remote_id() {
2660 let rpc = self.client.clone();
2661 let version = buffer.version();
2662 cx.spawn_weak(|_, mut cx| async move {
2663 let response = rpc
2664 .request(proto::GetCodeActions {
2665 project_id,
2666 buffer_id,
2667 start: Some(language::proto::serialize_anchor(&range.start)),
2668 end: Some(language::proto::serialize_anchor(&range.end)),
2669 version: serialize_version(&version),
2670 })
2671 .await?;
2672
2673 buffer_handle
2674 .update(&mut cx, |buffer, _| {
2675 buffer.wait_for_version(deserialize_version(response.version))
2676 })
2677 .await;
2678
2679 response
2680 .actions
2681 .into_iter()
2682 .map(language::proto::deserialize_code_action)
2683 .collect()
2684 })
2685 } else {
2686 Task::ready(Ok(Default::default()))
2687 }
2688 }
2689
2690 pub fn apply_code_action(
2691 &self,
2692 buffer_handle: ModelHandle<Buffer>,
2693 mut action: CodeAction,
2694 push_to_history: bool,
2695 cx: &mut ModelContext<Self>,
2696 ) -> Task<Result<ProjectTransaction>> {
2697 if self.is_local() {
2698 let buffer = buffer_handle.read(cx);
2699 let (lsp_adapter, lang_server) =
2700 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2701 server.clone()
2702 } else {
2703 return Task::ready(Ok(Default::default()));
2704 };
2705 let range = action.range.to_point_utf16(buffer);
2706
2707 cx.spawn(|this, mut cx| async move {
2708 if let Some(lsp_range) = action
2709 .lsp_action
2710 .data
2711 .as_mut()
2712 .and_then(|d| d.get_mut("codeActionParams"))
2713 .and_then(|d| d.get_mut("range"))
2714 {
2715 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2716 action.lsp_action = lang_server
2717 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2718 .await?;
2719 } else {
2720 let actions = this
2721 .update(&mut cx, |this, cx| {
2722 this.code_actions(&buffer_handle, action.range, cx)
2723 })
2724 .await?;
2725 action.lsp_action = actions
2726 .into_iter()
2727 .find(|a| a.lsp_action.title == action.lsp_action.title)
2728 .ok_or_else(|| anyhow!("code action is outdated"))?
2729 .lsp_action;
2730 }
2731
2732 if let Some(edit) = action.lsp_action.edit {
2733 Self::deserialize_workspace_edit(
2734 this,
2735 edit,
2736 push_to_history,
2737 lsp_adapter,
2738 lang_server,
2739 &mut cx,
2740 )
2741 .await
2742 } else if let Some(command) = action.lsp_action.command {
2743 this.update(&mut cx, |this, _| this.start_code_action());
2744 lang_server
2745 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2746 command: command.command,
2747 arguments: command.arguments.unwrap_or_default(),
2748 ..Default::default()
2749 })
2750 .await?;
2751 Ok(this.update(&mut cx, |this, cx| this.finish_code_action(cx)))
2752 } else {
2753 Ok(ProjectTransaction::default())
2754 }
2755 })
2756 } else if let Some(project_id) = self.remote_id() {
2757 let client = self.client.clone();
2758 let request = proto::ApplyCodeAction {
2759 project_id,
2760 buffer_id: buffer_handle.read(cx).remote_id(),
2761 action: Some(language::proto::serialize_code_action(&action)),
2762 };
2763 cx.spawn(|this, mut cx| async move {
2764 let response = client
2765 .request(request)
2766 .await?
2767 .transaction
2768 .ok_or_else(|| anyhow!("missing transaction"))?;
2769 this.update(&mut cx, |this, cx| {
2770 this.deserialize_project_transaction(response, push_to_history, cx)
2771 })
2772 .await
2773 })
2774 } else {
2775 Task::ready(Err(anyhow!("project does not have a remote id")))
2776 }
2777 }
2778
2779 async fn deserialize_workspace_edit(
2780 this: ModelHandle<Self>,
2781 edit: lsp::WorkspaceEdit,
2782 push_to_history: bool,
2783 lsp_adapter: Arc<dyn LspAdapter>,
2784 language_server: Arc<LanguageServer>,
2785 cx: &mut AsyncAppContext,
2786 ) -> Result<ProjectTransaction> {
2787 let fs = this.read_with(cx, |this, _| this.fs.clone());
2788 let mut operations = Vec::new();
2789 if let Some(document_changes) = edit.document_changes {
2790 match document_changes {
2791 lsp::DocumentChanges::Edits(edits) => {
2792 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2793 }
2794 lsp::DocumentChanges::Operations(ops) => operations = ops,
2795 }
2796 } else if let Some(changes) = edit.changes {
2797 operations.extend(changes.into_iter().map(|(uri, edits)| {
2798 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2799 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2800 uri,
2801 version: None,
2802 },
2803 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2804 })
2805 }));
2806 }
2807
2808 let mut project_transaction = ProjectTransaction::default();
2809 for operation in operations {
2810 match operation {
2811 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2812 let abs_path = op
2813 .uri
2814 .to_file_path()
2815 .map_err(|_| anyhow!("can't convert URI to path"))?;
2816
2817 if let Some(parent_path) = abs_path.parent() {
2818 fs.create_dir(parent_path).await?;
2819 }
2820 if abs_path.ends_with("/") {
2821 fs.create_dir(&abs_path).await?;
2822 } else {
2823 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2824 .await?;
2825 }
2826 }
2827 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2828 let source_abs_path = op
2829 .old_uri
2830 .to_file_path()
2831 .map_err(|_| anyhow!("can't convert URI to path"))?;
2832 let target_abs_path = op
2833 .new_uri
2834 .to_file_path()
2835 .map_err(|_| anyhow!("can't convert URI to path"))?;
2836 fs.rename(
2837 &source_abs_path,
2838 &target_abs_path,
2839 op.options.map(Into::into).unwrap_or_default(),
2840 )
2841 .await?;
2842 }
2843 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2844 let abs_path = op
2845 .uri
2846 .to_file_path()
2847 .map_err(|_| anyhow!("can't convert URI to path"))?;
2848 let options = op.options.map(Into::into).unwrap_or_default();
2849 if abs_path.ends_with("/") {
2850 fs.remove_dir(&abs_path, options).await?;
2851 } else {
2852 fs.remove_file(&abs_path, options).await?;
2853 }
2854 }
2855 lsp::DocumentChangeOperation::Edit(op) => {
2856 let buffer_to_edit = this
2857 .update(cx, |this, cx| {
2858 this.open_local_buffer_via_lsp(
2859 op.text_document.uri,
2860 lsp_adapter.clone(),
2861 language_server.clone(),
2862 cx,
2863 )
2864 })
2865 .await?;
2866
2867 let edits = this
2868 .update(cx, |this, cx| {
2869 let edits = op.edits.into_iter().map(|edit| match edit {
2870 lsp::OneOf::Left(edit) => edit,
2871 lsp::OneOf::Right(edit) => edit.text_edit,
2872 });
2873 this.edits_from_lsp(
2874 &buffer_to_edit,
2875 edits,
2876 op.text_document.version,
2877 cx,
2878 )
2879 })
2880 .await?;
2881
2882 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2883 buffer.finalize_last_transaction();
2884 buffer.start_transaction();
2885 for (range, text) in edits {
2886 buffer.edit([range], text, cx);
2887 }
2888 let transaction = if buffer.end_transaction(cx).is_some() {
2889 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2890 if !push_to_history {
2891 buffer.forget_transaction(transaction.id);
2892 }
2893 Some(transaction)
2894 } else {
2895 None
2896 };
2897
2898 transaction
2899 });
2900 if let Some(transaction) = transaction {
2901 project_transaction.0.insert(buffer_to_edit, transaction);
2902 }
2903 }
2904 }
2905 }
2906
2907 Ok(project_transaction)
2908 }
2909
2910 fn start_code_action(&mut self) {
2911 // Set some state that will be read inside of `on_lsp_event` when handling a `WorkspaceEdit`
2912 // event, and will cause the `ProjectTransaction` to be stored.
2913 }
2914
2915 fn finish_code_action(&mut self, cx: &mut ModelContext<Self>) -> ProjectTransaction {
2916 // Retrieve all stored `ProjectTransactions` that have been received since `start_code_action`
2917 // was called, and combine them together.
2918 Default::default()
2919 }
2920
2921 pub fn prepare_rename<T: ToPointUtf16>(
2922 &self,
2923 buffer: ModelHandle<Buffer>,
2924 position: T,
2925 cx: &mut ModelContext<Self>,
2926 ) -> Task<Result<Option<Range<Anchor>>>> {
2927 let position = position.to_point_utf16(buffer.read(cx));
2928 self.request_lsp(buffer, PrepareRename { position }, cx)
2929 }
2930
2931 pub fn perform_rename<T: ToPointUtf16>(
2932 &self,
2933 buffer: ModelHandle<Buffer>,
2934 position: T,
2935 new_name: String,
2936 push_to_history: bool,
2937 cx: &mut ModelContext<Self>,
2938 ) -> Task<Result<ProjectTransaction>> {
2939 let position = position.to_point_utf16(buffer.read(cx));
2940 self.request_lsp(
2941 buffer,
2942 PerformRename {
2943 position,
2944 new_name,
2945 push_to_history,
2946 },
2947 cx,
2948 )
2949 }
2950
2951 pub fn search(
2952 &self,
2953 query: SearchQuery,
2954 cx: &mut ModelContext<Self>,
2955 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2956 if self.is_local() {
2957 let snapshots = self
2958 .visible_worktrees(cx)
2959 .filter_map(|tree| {
2960 let tree = tree.read(cx).as_local()?;
2961 Some(tree.snapshot())
2962 })
2963 .collect::<Vec<_>>();
2964
2965 let background = cx.background().clone();
2966 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2967 if path_count == 0 {
2968 return Task::ready(Ok(Default::default()));
2969 }
2970 let workers = background.num_cpus().min(path_count);
2971 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2972 cx.background()
2973 .spawn({
2974 let fs = self.fs.clone();
2975 let background = cx.background().clone();
2976 let query = query.clone();
2977 async move {
2978 let fs = &fs;
2979 let query = &query;
2980 let matching_paths_tx = &matching_paths_tx;
2981 let paths_per_worker = (path_count + workers - 1) / workers;
2982 let snapshots = &snapshots;
2983 background
2984 .scoped(|scope| {
2985 for worker_ix in 0..workers {
2986 let worker_start_ix = worker_ix * paths_per_worker;
2987 let worker_end_ix = worker_start_ix + paths_per_worker;
2988 scope.spawn(async move {
2989 let mut snapshot_start_ix = 0;
2990 let mut abs_path = PathBuf::new();
2991 for snapshot in snapshots {
2992 let snapshot_end_ix =
2993 snapshot_start_ix + snapshot.visible_file_count();
2994 if worker_end_ix <= snapshot_start_ix {
2995 break;
2996 } else if worker_start_ix > snapshot_end_ix {
2997 snapshot_start_ix = snapshot_end_ix;
2998 continue;
2999 } else {
3000 let start_in_snapshot = worker_start_ix
3001 .saturating_sub(snapshot_start_ix);
3002 let end_in_snapshot =
3003 cmp::min(worker_end_ix, snapshot_end_ix)
3004 - snapshot_start_ix;
3005
3006 for entry in snapshot
3007 .files(false, start_in_snapshot)
3008 .take(end_in_snapshot - start_in_snapshot)
3009 {
3010 if matching_paths_tx.is_closed() {
3011 break;
3012 }
3013
3014 abs_path.clear();
3015 abs_path.push(&snapshot.abs_path());
3016 abs_path.push(&entry.path);
3017 let matches = if let Some(file) =
3018 fs.open_sync(&abs_path).await.log_err()
3019 {
3020 query.detect(file).unwrap_or(false)
3021 } else {
3022 false
3023 };
3024
3025 if matches {
3026 let project_path =
3027 (snapshot.id(), entry.path.clone());
3028 if matching_paths_tx
3029 .send(project_path)
3030 .await
3031 .is_err()
3032 {
3033 break;
3034 }
3035 }
3036 }
3037
3038 snapshot_start_ix = snapshot_end_ix;
3039 }
3040 }
3041 });
3042 }
3043 })
3044 .await;
3045 }
3046 })
3047 .detach();
3048
3049 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3050 let open_buffers = self
3051 .opened_buffers
3052 .values()
3053 .filter_map(|b| b.upgrade(cx))
3054 .collect::<HashSet<_>>();
3055 cx.spawn(|this, cx| async move {
3056 for buffer in &open_buffers {
3057 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3058 buffers_tx.send((buffer.clone(), snapshot)).await?;
3059 }
3060
3061 let open_buffers = Rc::new(RefCell::new(open_buffers));
3062 while let Some(project_path) = matching_paths_rx.next().await {
3063 if buffers_tx.is_closed() {
3064 break;
3065 }
3066
3067 let this = this.clone();
3068 let open_buffers = open_buffers.clone();
3069 let buffers_tx = buffers_tx.clone();
3070 cx.spawn(|mut cx| async move {
3071 if let Some(buffer) = this
3072 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3073 .await
3074 .log_err()
3075 {
3076 if open_buffers.borrow_mut().insert(buffer.clone()) {
3077 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3078 buffers_tx.send((buffer, snapshot)).await?;
3079 }
3080 }
3081
3082 Ok::<_, anyhow::Error>(())
3083 })
3084 .detach();
3085 }
3086
3087 Ok::<_, anyhow::Error>(())
3088 })
3089 .detach_and_log_err(cx);
3090
3091 let background = cx.background().clone();
3092 cx.background().spawn(async move {
3093 let query = &query;
3094 let mut matched_buffers = Vec::new();
3095 for _ in 0..workers {
3096 matched_buffers.push(HashMap::default());
3097 }
3098 background
3099 .scoped(|scope| {
3100 for worker_matched_buffers in matched_buffers.iter_mut() {
3101 let mut buffers_rx = buffers_rx.clone();
3102 scope.spawn(async move {
3103 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3104 let buffer_matches = query
3105 .search(snapshot.as_rope())
3106 .await
3107 .iter()
3108 .map(|range| {
3109 snapshot.anchor_before(range.start)
3110 ..snapshot.anchor_after(range.end)
3111 })
3112 .collect::<Vec<_>>();
3113 if !buffer_matches.is_empty() {
3114 worker_matched_buffers
3115 .insert(buffer.clone(), buffer_matches);
3116 }
3117 }
3118 });
3119 }
3120 })
3121 .await;
3122 Ok(matched_buffers.into_iter().flatten().collect())
3123 })
3124 } else if let Some(project_id) = self.remote_id() {
3125 let request = self.client.request(query.to_proto(project_id));
3126 cx.spawn(|this, mut cx| async move {
3127 let response = request.await?;
3128 let mut result = HashMap::default();
3129 for location in response.locations {
3130 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3131 let target_buffer = this
3132 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3133 .await?;
3134 let start = location
3135 .start
3136 .and_then(deserialize_anchor)
3137 .ok_or_else(|| anyhow!("missing target start"))?;
3138 let end = location
3139 .end
3140 .and_then(deserialize_anchor)
3141 .ok_or_else(|| anyhow!("missing target end"))?;
3142 result
3143 .entry(target_buffer)
3144 .or_insert(Vec::new())
3145 .push(start..end)
3146 }
3147 Ok(result)
3148 })
3149 } else {
3150 Task::ready(Ok(Default::default()))
3151 }
3152 }
3153
3154 fn request_lsp<R: LspCommand>(
3155 &self,
3156 buffer_handle: ModelHandle<Buffer>,
3157 request: R,
3158 cx: &mut ModelContext<Self>,
3159 ) -> Task<Result<R::Response>>
3160 where
3161 <R::LspRequest as lsp::request::Request>::Result: Send,
3162 {
3163 let buffer = buffer_handle.read(cx);
3164 if self.is_local() {
3165 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3166 if let Some((file, (_, language_server))) =
3167 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3168 {
3169 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3170 return cx.spawn(|this, cx| async move {
3171 if !request.check_capabilities(&language_server.capabilities()) {
3172 return Ok(Default::default());
3173 }
3174
3175 let response = language_server
3176 .request::<R::LspRequest>(lsp_params)
3177 .await
3178 .context("lsp request failed")?;
3179 request
3180 .response_from_lsp(response, this, buffer_handle, cx)
3181 .await
3182 });
3183 }
3184 } else if let Some(project_id) = self.remote_id() {
3185 let rpc = self.client.clone();
3186 let message = request.to_proto(project_id, buffer);
3187 return cx.spawn(|this, cx| async move {
3188 let response = rpc.request(message).await?;
3189 request
3190 .response_from_proto(response, this, buffer_handle, cx)
3191 .await
3192 });
3193 }
3194 Task::ready(Ok(Default::default()))
3195 }
3196
3197 pub fn find_or_create_local_worktree(
3198 &mut self,
3199 abs_path: impl AsRef<Path>,
3200 visible: bool,
3201 cx: &mut ModelContext<Self>,
3202 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3203 let abs_path = abs_path.as_ref();
3204 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3205 Task::ready(Ok((tree.clone(), relative_path.into())))
3206 } else {
3207 let worktree = self.create_local_worktree(abs_path, visible, cx);
3208 cx.foreground()
3209 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3210 }
3211 }
3212
3213 pub fn find_local_worktree(
3214 &self,
3215 abs_path: &Path,
3216 cx: &AppContext,
3217 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3218 for tree in self.worktrees(cx) {
3219 if let Some(relative_path) = tree
3220 .read(cx)
3221 .as_local()
3222 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3223 {
3224 return Some((tree.clone(), relative_path.into()));
3225 }
3226 }
3227 None
3228 }
3229
3230 pub fn is_shared(&self) -> bool {
3231 match &self.client_state {
3232 ProjectClientState::Local { is_shared, .. } => *is_shared,
3233 ProjectClientState::Remote { .. } => false,
3234 }
3235 }
3236
3237 fn create_local_worktree(
3238 &mut self,
3239 abs_path: impl AsRef<Path>,
3240 visible: bool,
3241 cx: &mut ModelContext<Self>,
3242 ) -> Task<Result<ModelHandle<Worktree>>> {
3243 let fs = self.fs.clone();
3244 let client = self.client.clone();
3245 let next_entry_id = self.next_entry_id.clone();
3246 let path: Arc<Path> = abs_path.as_ref().into();
3247 let task = self
3248 .loading_local_worktrees
3249 .entry(path.clone())
3250 .or_insert_with(|| {
3251 cx.spawn(|project, mut cx| {
3252 async move {
3253 let worktree = Worktree::local(
3254 client.clone(),
3255 path.clone(),
3256 visible,
3257 fs,
3258 next_entry_id,
3259 &mut cx,
3260 )
3261 .await;
3262 project.update(&mut cx, |project, _| {
3263 project.loading_local_worktrees.remove(&path);
3264 });
3265 let worktree = worktree?;
3266
3267 let (remote_project_id, is_shared) =
3268 project.update(&mut cx, |project, cx| {
3269 project.add_worktree(&worktree, cx);
3270 (project.remote_id(), project.is_shared())
3271 });
3272
3273 if let Some(project_id) = remote_project_id {
3274 if is_shared {
3275 worktree
3276 .update(&mut cx, |worktree, cx| {
3277 worktree.as_local_mut().unwrap().share(project_id, cx)
3278 })
3279 .await?;
3280 } else {
3281 worktree
3282 .update(&mut cx, |worktree, cx| {
3283 worktree.as_local_mut().unwrap().register(project_id, cx)
3284 })
3285 .await?;
3286 }
3287 }
3288
3289 Ok(worktree)
3290 }
3291 .map_err(|err| Arc::new(err))
3292 })
3293 .shared()
3294 })
3295 .clone();
3296 cx.foreground().spawn(async move {
3297 match task.await {
3298 Ok(worktree) => Ok(worktree),
3299 Err(err) => Err(anyhow!("{}", err)),
3300 }
3301 })
3302 }
3303
3304 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3305 self.worktrees.retain(|worktree| {
3306 worktree
3307 .upgrade(cx)
3308 .map_or(false, |w| w.read(cx).id() != id)
3309 });
3310 cx.notify();
3311 }
3312
3313 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3314 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3315 if worktree.read(cx).is_local() {
3316 cx.subscribe(&worktree, |this, worktree, _, cx| {
3317 this.update_local_worktree_buffers(worktree, cx);
3318 })
3319 .detach();
3320 }
3321
3322 let push_strong_handle = {
3323 let worktree = worktree.read(cx);
3324 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3325 };
3326 if push_strong_handle {
3327 self.worktrees
3328 .push(WorktreeHandle::Strong(worktree.clone()));
3329 } else {
3330 cx.observe_release(&worktree, |this, _, cx| {
3331 this.worktrees
3332 .retain(|worktree| worktree.upgrade(cx).is_some());
3333 cx.notify();
3334 })
3335 .detach();
3336 self.worktrees
3337 .push(WorktreeHandle::Weak(worktree.downgrade()));
3338 }
3339 cx.notify();
3340 }
3341
3342 fn update_local_worktree_buffers(
3343 &mut self,
3344 worktree_handle: ModelHandle<Worktree>,
3345 cx: &mut ModelContext<Self>,
3346 ) {
3347 let snapshot = worktree_handle.read(cx).snapshot();
3348 let mut buffers_to_delete = Vec::new();
3349 for (buffer_id, buffer) in &self.opened_buffers {
3350 if let Some(buffer) = buffer.upgrade(cx) {
3351 buffer.update(cx, |buffer, cx| {
3352 if let Some(old_file) = File::from_dyn(buffer.file()) {
3353 if old_file.worktree != worktree_handle {
3354 return;
3355 }
3356
3357 let new_file = if let Some(entry) = old_file
3358 .entry_id
3359 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3360 {
3361 File {
3362 is_local: true,
3363 entry_id: Some(entry.id),
3364 mtime: entry.mtime,
3365 path: entry.path.clone(),
3366 worktree: worktree_handle.clone(),
3367 }
3368 } else if let Some(entry) =
3369 snapshot.entry_for_path(old_file.path().as_ref())
3370 {
3371 File {
3372 is_local: true,
3373 entry_id: Some(entry.id),
3374 mtime: entry.mtime,
3375 path: entry.path.clone(),
3376 worktree: worktree_handle.clone(),
3377 }
3378 } else {
3379 File {
3380 is_local: true,
3381 entry_id: None,
3382 path: old_file.path().clone(),
3383 mtime: old_file.mtime(),
3384 worktree: worktree_handle.clone(),
3385 }
3386 };
3387
3388 if let Some(project_id) = self.remote_id() {
3389 self.client
3390 .send(proto::UpdateBufferFile {
3391 project_id,
3392 buffer_id: *buffer_id as u64,
3393 file: Some(new_file.to_proto()),
3394 })
3395 .log_err();
3396 }
3397 buffer.file_updated(Box::new(new_file), cx).detach();
3398 }
3399 });
3400 } else {
3401 buffers_to_delete.push(*buffer_id);
3402 }
3403 }
3404
3405 for buffer_id in buffers_to_delete {
3406 self.opened_buffers.remove(&buffer_id);
3407 }
3408 }
3409
3410 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3411 let new_active_entry = entry.and_then(|project_path| {
3412 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3413 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3414 Some(entry.id)
3415 });
3416 if new_active_entry != self.active_entry {
3417 self.active_entry = new_active_entry;
3418 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3419 }
3420 }
3421
3422 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3423 self.language_servers_with_diagnostics_running > 0
3424 }
3425
3426 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3427 let mut summary = DiagnosticSummary::default();
3428 for (_, path_summary) in self.diagnostic_summaries(cx) {
3429 summary.error_count += path_summary.error_count;
3430 summary.warning_count += path_summary.warning_count;
3431 summary.info_count += path_summary.info_count;
3432 summary.hint_count += path_summary.hint_count;
3433 }
3434 summary
3435 }
3436
3437 pub fn diagnostic_summaries<'a>(
3438 &'a self,
3439 cx: &'a AppContext,
3440 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3441 self.worktrees(cx).flat_map(move |worktree| {
3442 let worktree = worktree.read(cx);
3443 let worktree_id = worktree.id();
3444 worktree
3445 .diagnostic_summaries()
3446 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3447 })
3448 }
3449
3450 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3451 self.language_servers_with_diagnostics_running += 1;
3452 if self.language_servers_with_diagnostics_running == 1 {
3453 cx.emit(Event::DiskBasedDiagnosticsStarted);
3454 }
3455 }
3456
3457 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3458 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3459 self.language_servers_with_diagnostics_running -= 1;
3460 if self.language_servers_with_diagnostics_running == 0 {
3461 cx.emit(Event::DiskBasedDiagnosticsFinished);
3462 }
3463 }
3464
3465 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3466 self.active_entry
3467 }
3468
3469 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3470 self.worktree_for_id(path.worktree_id, cx)?
3471 .read(cx)
3472 .entry_for_path(&path.path)
3473 .map(|entry| entry.id)
3474 }
3475
3476 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3477 let worktree = self.worktree_for_entry(entry_id, cx)?;
3478 let worktree = worktree.read(cx);
3479 let worktree_id = worktree.id();
3480 let path = worktree.entry_for_id(entry_id)?.path.clone();
3481 Some(ProjectPath { worktree_id, path })
3482 }
3483
3484 // RPC message handlers
3485
3486 async fn handle_unshare_project(
3487 this: ModelHandle<Self>,
3488 _: TypedEnvelope<proto::UnshareProject>,
3489 _: Arc<Client>,
3490 mut cx: AsyncAppContext,
3491 ) -> Result<()> {
3492 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3493 Ok(())
3494 }
3495
3496 async fn handle_add_collaborator(
3497 this: ModelHandle<Self>,
3498 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3499 _: Arc<Client>,
3500 mut cx: AsyncAppContext,
3501 ) -> Result<()> {
3502 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3503 let collaborator = envelope
3504 .payload
3505 .collaborator
3506 .take()
3507 .ok_or_else(|| anyhow!("empty collaborator"))?;
3508
3509 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3510 this.update(&mut cx, |this, cx| {
3511 this.collaborators
3512 .insert(collaborator.peer_id, collaborator);
3513 cx.notify();
3514 });
3515
3516 Ok(())
3517 }
3518
3519 async fn handle_remove_collaborator(
3520 this: ModelHandle<Self>,
3521 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3522 _: Arc<Client>,
3523 mut cx: AsyncAppContext,
3524 ) -> Result<()> {
3525 this.update(&mut cx, |this, cx| {
3526 let peer_id = PeerId(envelope.payload.peer_id);
3527 let replica_id = this
3528 .collaborators
3529 .remove(&peer_id)
3530 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3531 .replica_id;
3532 for (_, buffer) in &this.opened_buffers {
3533 if let Some(buffer) = buffer.upgrade(cx) {
3534 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3535 }
3536 }
3537 cx.emit(Event::CollaboratorLeft(peer_id));
3538 cx.notify();
3539 Ok(())
3540 })
3541 }
3542
3543 async fn handle_register_worktree(
3544 this: ModelHandle<Self>,
3545 envelope: TypedEnvelope<proto::RegisterWorktree>,
3546 client: Arc<Client>,
3547 mut cx: AsyncAppContext,
3548 ) -> Result<()> {
3549 this.update(&mut cx, |this, cx| {
3550 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3551 let replica_id = this.replica_id();
3552 let worktree = proto::Worktree {
3553 id: envelope.payload.worktree_id,
3554 root_name: envelope.payload.root_name,
3555 entries: Default::default(),
3556 diagnostic_summaries: Default::default(),
3557 visible: envelope.payload.visible,
3558 };
3559 let (worktree, load_task) =
3560 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3561 this.add_worktree(&worktree, cx);
3562 load_task.detach();
3563 Ok(())
3564 })
3565 }
3566
3567 async fn handle_unregister_worktree(
3568 this: ModelHandle<Self>,
3569 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3570 _: Arc<Client>,
3571 mut cx: AsyncAppContext,
3572 ) -> Result<()> {
3573 this.update(&mut cx, |this, cx| {
3574 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3575 this.remove_worktree(worktree_id, cx);
3576 Ok(())
3577 })
3578 }
3579
3580 async fn handle_update_worktree(
3581 this: ModelHandle<Self>,
3582 envelope: TypedEnvelope<proto::UpdateWorktree>,
3583 _: Arc<Client>,
3584 mut cx: AsyncAppContext,
3585 ) -> Result<()> {
3586 this.update(&mut cx, |this, cx| {
3587 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3588 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3589 worktree.update(cx, |worktree, _| {
3590 let worktree = worktree.as_remote_mut().unwrap();
3591 worktree.update_from_remote(envelope)
3592 })?;
3593 }
3594 Ok(())
3595 })
3596 }
3597
3598 async fn handle_update_diagnostic_summary(
3599 this: ModelHandle<Self>,
3600 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3601 _: Arc<Client>,
3602 mut cx: AsyncAppContext,
3603 ) -> Result<()> {
3604 this.update(&mut cx, |this, cx| {
3605 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3606 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3607 if let Some(summary) = envelope.payload.summary {
3608 let project_path = ProjectPath {
3609 worktree_id,
3610 path: Path::new(&summary.path).into(),
3611 };
3612 worktree.update(cx, |worktree, _| {
3613 worktree
3614 .as_remote_mut()
3615 .unwrap()
3616 .update_diagnostic_summary(project_path.path.clone(), &summary);
3617 });
3618 cx.emit(Event::DiagnosticsUpdated(project_path));
3619 }
3620 }
3621 Ok(())
3622 })
3623 }
3624
3625 async fn handle_start_language_server(
3626 this: ModelHandle<Self>,
3627 envelope: TypedEnvelope<proto::StartLanguageServer>,
3628 _: Arc<Client>,
3629 mut cx: AsyncAppContext,
3630 ) -> Result<()> {
3631 let server = envelope
3632 .payload
3633 .server
3634 .ok_or_else(|| anyhow!("invalid server"))?;
3635 this.update(&mut cx, |this, cx| {
3636 this.language_server_statuses.insert(
3637 server.id as usize,
3638 LanguageServerStatus {
3639 name: server.name,
3640 pending_work: Default::default(),
3641 pending_diagnostic_updates: 0,
3642 },
3643 );
3644 cx.notify();
3645 });
3646 Ok(())
3647 }
3648
3649 async fn handle_update_language_server(
3650 this: ModelHandle<Self>,
3651 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3652 _: Arc<Client>,
3653 mut cx: AsyncAppContext,
3654 ) -> Result<()> {
3655 let language_server_id = envelope.payload.language_server_id as usize;
3656 match envelope
3657 .payload
3658 .variant
3659 .ok_or_else(|| anyhow!("invalid variant"))?
3660 {
3661 proto::update_language_server::Variant::WorkStart(payload) => {
3662 this.update(&mut cx, |this, cx| {
3663 this.on_lsp_work_start(language_server_id, payload.token, cx);
3664 })
3665 }
3666 proto::update_language_server::Variant::WorkProgress(payload) => {
3667 this.update(&mut cx, |this, cx| {
3668 this.on_lsp_work_progress(
3669 language_server_id,
3670 payload.token,
3671 LanguageServerProgress {
3672 message: payload.message,
3673 percentage: payload.percentage.map(|p| p as usize),
3674 last_update_at: Instant::now(),
3675 },
3676 cx,
3677 );
3678 })
3679 }
3680 proto::update_language_server::Variant::WorkEnd(payload) => {
3681 this.update(&mut cx, |this, cx| {
3682 this.on_lsp_work_end(language_server_id, payload.token, cx);
3683 })
3684 }
3685 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3686 this.update(&mut cx, |this, cx| {
3687 this.disk_based_diagnostics_started(cx);
3688 })
3689 }
3690 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3691 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3692 }
3693 }
3694
3695 Ok(())
3696 }
3697
3698 async fn handle_update_buffer(
3699 this: ModelHandle<Self>,
3700 envelope: TypedEnvelope<proto::UpdateBuffer>,
3701 _: Arc<Client>,
3702 mut cx: AsyncAppContext,
3703 ) -> Result<()> {
3704 this.update(&mut cx, |this, cx| {
3705 let payload = envelope.payload.clone();
3706 let buffer_id = payload.buffer_id;
3707 let ops = payload
3708 .operations
3709 .into_iter()
3710 .map(|op| language::proto::deserialize_operation(op))
3711 .collect::<Result<Vec<_>, _>>()?;
3712 match this.opened_buffers.entry(buffer_id) {
3713 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3714 OpenBuffer::Strong(buffer) => {
3715 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3716 }
3717 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3718 OpenBuffer::Weak(_) => {}
3719 },
3720 hash_map::Entry::Vacant(e) => {
3721 e.insert(OpenBuffer::Loading(ops));
3722 }
3723 }
3724 Ok(())
3725 })
3726 }
3727
3728 async fn handle_update_buffer_file(
3729 this: ModelHandle<Self>,
3730 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3731 _: Arc<Client>,
3732 mut cx: AsyncAppContext,
3733 ) -> Result<()> {
3734 this.update(&mut cx, |this, cx| {
3735 let payload = envelope.payload.clone();
3736 let buffer_id = payload.buffer_id;
3737 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3738 let worktree = this
3739 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3740 .ok_or_else(|| anyhow!("no such worktree"))?;
3741 let file = File::from_proto(file, worktree.clone(), cx)?;
3742 let buffer = this
3743 .opened_buffers
3744 .get_mut(&buffer_id)
3745 .and_then(|b| b.upgrade(cx))
3746 .ok_or_else(|| anyhow!("no such buffer"))?;
3747 buffer.update(cx, |buffer, cx| {
3748 buffer.file_updated(Box::new(file), cx).detach();
3749 });
3750 Ok(())
3751 })
3752 }
3753
3754 async fn handle_save_buffer(
3755 this: ModelHandle<Self>,
3756 envelope: TypedEnvelope<proto::SaveBuffer>,
3757 _: Arc<Client>,
3758 mut cx: AsyncAppContext,
3759 ) -> Result<proto::BufferSaved> {
3760 let buffer_id = envelope.payload.buffer_id;
3761 let requested_version = deserialize_version(envelope.payload.version);
3762
3763 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3764 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3765 let buffer = this
3766 .opened_buffers
3767 .get(&buffer_id)
3768 .map(|buffer| buffer.upgrade(cx).unwrap())
3769 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3770 Ok::<_, anyhow::Error>((project_id, buffer))
3771 })?;
3772 buffer
3773 .update(&mut cx, |buffer, _| {
3774 buffer.wait_for_version(requested_version)
3775 })
3776 .await;
3777
3778 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3779 Ok(proto::BufferSaved {
3780 project_id,
3781 buffer_id,
3782 version: serialize_version(&saved_version),
3783 mtime: Some(mtime.into()),
3784 })
3785 }
3786
3787 async fn handle_format_buffers(
3788 this: ModelHandle<Self>,
3789 envelope: TypedEnvelope<proto::FormatBuffers>,
3790 _: Arc<Client>,
3791 mut cx: AsyncAppContext,
3792 ) -> Result<proto::FormatBuffersResponse> {
3793 let sender_id = envelope.original_sender_id()?;
3794 let format = this.update(&mut cx, |this, cx| {
3795 let mut buffers = HashSet::default();
3796 for buffer_id in &envelope.payload.buffer_ids {
3797 buffers.insert(
3798 this.opened_buffers
3799 .get(buffer_id)
3800 .map(|buffer| buffer.upgrade(cx).unwrap())
3801 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3802 );
3803 }
3804 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3805 })?;
3806
3807 let project_transaction = format.await?;
3808 let project_transaction = this.update(&mut cx, |this, cx| {
3809 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3810 });
3811 Ok(proto::FormatBuffersResponse {
3812 transaction: Some(project_transaction),
3813 })
3814 }
3815
3816 async fn handle_get_completions(
3817 this: ModelHandle<Self>,
3818 envelope: TypedEnvelope<proto::GetCompletions>,
3819 _: Arc<Client>,
3820 mut cx: AsyncAppContext,
3821 ) -> Result<proto::GetCompletionsResponse> {
3822 let position = envelope
3823 .payload
3824 .position
3825 .and_then(language::proto::deserialize_anchor)
3826 .ok_or_else(|| anyhow!("invalid position"))?;
3827 let version = deserialize_version(envelope.payload.version);
3828 let buffer = this.read_with(&cx, |this, cx| {
3829 this.opened_buffers
3830 .get(&envelope.payload.buffer_id)
3831 .map(|buffer| buffer.upgrade(cx).unwrap())
3832 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3833 })?;
3834 buffer
3835 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3836 .await;
3837 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3838 let completions = this
3839 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3840 .await?;
3841
3842 Ok(proto::GetCompletionsResponse {
3843 completions: completions
3844 .iter()
3845 .map(language::proto::serialize_completion)
3846 .collect(),
3847 version: serialize_version(&version),
3848 })
3849 }
3850
3851 async fn handle_apply_additional_edits_for_completion(
3852 this: ModelHandle<Self>,
3853 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3854 _: Arc<Client>,
3855 mut cx: AsyncAppContext,
3856 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3857 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3858 let buffer = this
3859 .opened_buffers
3860 .get(&envelope.payload.buffer_id)
3861 .map(|buffer| buffer.upgrade(cx).unwrap())
3862 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3863 let language = buffer.read(cx).language();
3864 let completion = language::proto::deserialize_completion(
3865 envelope
3866 .payload
3867 .completion
3868 .ok_or_else(|| anyhow!("invalid completion"))?,
3869 language,
3870 )?;
3871 Ok::<_, anyhow::Error>(
3872 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3873 )
3874 })?;
3875
3876 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3877 transaction: apply_additional_edits
3878 .await?
3879 .as_ref()
3880 .map(language::proto::serialize_transaction),
3881 })
3882 }
3883
3884 async fn handle_get_code_actions(
3885 this: ModelHandle<Self>,
3886 envelope: TypedEnvelope<proto::GetCodeActions>,
3887 _: Arc<Client>,
3888 mut cx: AsyncAppContext,
3889 ) -> Result<proto::GetCodeActionsResponse> {
3890 let start = envelope
3891 .payload
3892 .start
3893 .and_then(language::proto::deserialize_anchor)
3894 .ok_or_else(|| anyhow!("invalid start"))?;
3895 let end = envelope
3896 .payload
3897 .end
3898 .and_then(language::proto::deserialize_anchor)
3899 .ok_or_else(|| anyhow!("invalid end"))?;
3900 let buffer = this.update(&mut cx, |this, cx| {
3901 this.opened_buffers
3902 .get(&envelope.payload.buffer_id)
3903 .map(|buffer| buffer.upgrade(cx).unwrap())
3904 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3905 })?;
3906 buffer
3907 .update(&mut cx, |buffer, _| {
3908 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3909 })
3910 .await;
3911
3912 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3913 let code_actions = this.update(&mut cx, |this, cx| {
3914 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3915 })?;
3916
3917 Ok(proto::GetCodeActionsResponse {
3918 actions: code_actions
3919 .await?
3920 .iter()
3921 .map(language::proto::serialize_code_action)
3922 .collect(),
3923 version: serialize_version(&version),
3924 })
3925 }
3926
3927 async fn handle_apply_code_action(
3928 this: ModelHandle<Self>,
3929 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3930 _: Arc<Client>,
3931 mut cx: AsyncAppContext,
3932 ) -> Result<proto::ApplyCodeActionResponse> {
3933 let sender_id = envelope.original_sender_id()?;
3934 let action = language::proto::deserialize_code_action(
3935 envelope
3936 .payload
3937 .action
3938 .ok_or_else(|| anyhow!("invalid action"))?,
3939 )?;
3940 let apply_code_action = this.update(&mut cx, |this, cx| {
3941 let buffer = this
3942 .opened_buffers
3943 .get(&envelope.payload.buffer_id)
3944 .map(|buffer| buffer.upgrade(cx).unwrap())
3945 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3946 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3947 })?;
3948
3949 let project_transaction = apply_code_action.await?;
3950 let project_transaction = this.update(&mut cx, |this, cx| {
3951 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3952 });
3953 Ok(proto::ApplyCodeActionResponse {
3954 transaction: Some(project_transaction),
3955 })
3956 }
3957
3958 async fn handle_lsp_command<T: LspCommand>(
3959 this: ModelHandle<Self>,
3960 envelope: TypedEnvelope<T::ProtoRequest>,
3961 _: Arc<Client>,
3962 mut cx: AsyncAppContext,
3963 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3964 where
3965 <T::LspRequest as lsp::request::Request>::Result: Send,
3966 {
3967 let sender_id = envelope.original_sender_id()?;
3968 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3969 let buffer_handle = this.read_with(&cx, |this, _| {
3970 this.opened_buffers
3971 .get(&buffer_id)
3972 .and_then(|buffer| buffer.upgrade(&cx))
3973 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3974 })?;
3975 let request = T::from_proto(
3976 envelope.payload,
3977 this.clone(),
3978 buffer_handle.clone(),
3979 cx.clone(),
3980 )
3981 .await?;
3982 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3983 let response = this
3984 .update(&mut cx, |this, cx| {
3985 this.request_lsp(buffer_handle, request, cx)
3986 })
3987 .await?;
3988 this.update(&mut cx, |this, cx| {
3989 Ok(T::response_to_proto(
3990 response,
3991 this,
3992 sender_id,
3993 &buffer_version,
3994 cx,
3995 ))
3996 })
3997 }
3998
3999 async fn handle_get_project_symbols(
4000 this: ModelHandle<Self>,
4001 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4002 _: Arc<Client>,
4003 mut cx: AsyncAppContext,
4004 ) -> Result<proto::GetProjectSymbolsResponse> {
4005 let symbols = this
4006 .update(&mut cx, |this, cx| {
4007 this.symbols(&envelope.payload.query, cx)
4008 })
4009 .await?;
4010
4011 Ok(proto::GetProjectSymbolsResponse {
4012 symbols: symbols.iter().map(serialize_symbol).collect(),
4013 })
4014 }
4015
4016 async fn handle_search_project(
4017 this: ModelHandle<Self>,
4018 envelope: TypedEnvelope<proto::SearchProject>,
4019 _: Arc<Client>,
4020 mut cx: AsyncAppContext,
4021 ) -> Result<proto::SearchProjectResponse> {
4022 let peer_id = envelope.original_sender_id()?;
4023 let query = SearchQuery::from_proto(envelope.payload)?;
4024 let result = this
4025 .update(&mut cx, |this, cx| this.search(query, cx))
4026 .await?;
4027
4028 this.update(&mut cx, |this, cx| {
4029 let mut locations = Vec::new();
4030 for (buffer, ranges) in result {
4031 for range in ranges {
4032 let start = serialize_anchor(&range.start);
4033 let end = serialize_anchor(&range.end);
4034 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4035 locations.push(proto::Location {
4036 buffer: Some(buffer),
4037 start: Some(start),
4038 end: Some(end),
4039 });
4040 }
4041 }
4042 Ok(proto::SearchProjectResponse { locations })
4043 })
4044 }
4045
4046 async fn handle_open_buffer_for_symbol(
4047 this: ModelHandle<Self>,
4048 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4049 _: Arc<Client>,
4050 mut cx: AsyncAppContext,
4051 ) -> Result<proto::OpenBufferForSymbolResponse> {
4052 let peer_id = envelope.original_sender_id()?;
4053 let symbol = envelope
4054 .payload
4055 .symbol
4056 .ok_or_else(|| anyhow!("invalid symbol"))?;
4057 let symbol = this.read_with(&cx, |this, _| {
4058 let symbol = this.deserialize_symbol(symbol)?;
4059 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4060 if signature == symbol.signature {
4061 Ok(symbol)
4062 } else {
4063 Err(anyhow!("invalid symbol signature"))
4064 }
4065 })?;
4066 let buffer = this
4067 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4068 .await?;
4069
4070 Ok(proto::OpenBufferForSymbolResponse {
4071 buffer: Some(this.update(&mut cx, |this, cx| {
4072 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4073 })),
4074 })
4075 }
4076
4077 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4078 let mut hasher = Sha256::new();
4079 hasher.update(worktree_id.to_proto().to_be_bytes());
4080 hasher.update(path.to_string_lossy().as_bytes());
4081 hasher.update(self.nonce.to_be_bytes());
4082 hasher.finalize().as_slice().try_into().unwrap()
4083 }
4084
4085 async fn handle_open_buffer_by_id(
4086 this: ModelHandle<Self>,
4087 envelope: TypedEnvelope<proto::OpenBufferById>,
4088 _: Arc<Client>,
4089 mut cx: AsyncAppContext,
4090 ) -> Result<proto::OpenBufferResponse> {
4091 let peer_id = envelope.original_sender_id()?;
4092 let buffer = this
4093 .update(&mut cx, |this, cx| {
4094 this.open_buffer_by_id(envelope.payload.id, cx)
4095 })
4096 .await?;
4097 this.update(&mut cx, |this, cx| {
4098 Ok(proto::OpenBufferResponse {
4099 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4100 })
4101 })
4102 }
4103
4104 async fn handle_open_buffer_by_path(
4105 this: ModelHandle<Self>,
4106 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4107 _: Arc<Client>,
4108 mut cx: AsyncAppContext,
4109 ) -> Result<proto::OpenBufferResponse> {
4110 let peer_id = envelope.original_sender_id()?;
4111 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4112 let open_buffer = this.update(&mut cx, |this, cx| {
4113 this.open_buffer(
4114 ProjectPath {
4115 worktree_id,
4116 path: PathBuf::from(envelope.payload.path).into(),
4117 },
4118 cx,
4119 )
4120 });
4121
4122 let buffer = open_buffer.await?;
4123 this.update(&mut cx, |this, cx| {
4124 Ok(proto::OpenBufferResponse {
4125 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4126 })
4127 })
4128 }
4129
4130 fn serialize_project_transaction_for_peer(
4131 &mut self,
4132 project_transaction: ProjectTransaction,
4133 peer_id: PeerId,
4134 cx: &AppContext,
4135 ) -> proto::ProjectTransaction {
4136 let mut serialized_transaction = proto::ProjectTransaction {
4137 buffers: Default::default(),
4138 transactions: Default::default(),
4139 };
4140 for (buffer, transaction) in project_transaction.0 {
4141 serialized_transaction
4142 .buffers
4143 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4144 serialized_transaction
4145 .transactions
4146 .push(language::proto::serialize_transaction(&transaction));
4147 }
4148 serialized_transaction
4149 }
4150
4151 fn deserialize_project_transaction(
4152 &mut self,
4153 message: proto::ProjectTransaction,
4154 push_to_history: bool,
4155 cx: &mut ModelContext<Self>,
4156 ) -> Task<Result<ProjectTransaction>> {
4157 cx.spawn(|this, mut cx| async move {
4158 let mut project_transaction = ProjectTransaction::default();
4159 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4160 let buffer = this
4161 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4162 .await?;
4163 let transaction = language::proto::deserialize_transaction(transaction)?;
4164 project_transaction.0.insert(buffer, transaction);
4165 }
4166
4167 for (buffer, transaction) in &project_transaction.0 {
4168 buffer
4169 .update(&mut cx, |buffer, _| {
4170 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4171 })
4172 .await;
4173
4174 if push_to_history {
4175 buffer.update(&mut cx, |buffer, _| {
4176 buffer.push_transaction(transaction.clone(), Instant::now());
4177 });
4178 }
4179 }
4180
4181 Ok(project_transaction)
4182 })
4183 }
4184
4185 fn serialize_buffer_for_peer(
4186 &mut self,
4187 buffer: &ModelHandle<Buffer>,
4188 peer_id: PeerId,
4189 cx: &AppContext,
4190 ) -> proto::Buffer {
4191 let buffer_id = buffer.read(cx).remote_id();
4192 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4193 if shared_buffers.insert(buffer_id) {
4194 proto::Buffer {
4195 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4196 }
4197 } else {
4198 proto::Buffer {
4199 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4200 }
4201 }
4202 }
4203
4204 fn deserialize_buffer(
4205 &mut self,
4206 buffer: proto::Buffer,
4207 cx: &mut ModelContext<Self>,
4208 ) -> Task<Result<ModelHandle<Buffer>>> {
4209 let replica_id = self.replica_id();
4210
4211 let opened_buffer_tx = self.opened_buffer.0.clone();
4212 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4213 cx.spawn(|this, mut cx| async move {
4214 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4215 proto::buffer::Variant::Id(id) => {
4216 let buffer = loop {
4217 let buffer = this.read_with(&cx, |this, cx| {
4218 this.opened_buffers
4219 .get(&id)
4220 .and_then(|buffer| buffer.upgrade(cx))
4221 });
4222 if let Some(buffer) = buffer {
4223 break buffer;
4224 }
4225 opened_buffer_rx
4226 .next()
4227 .await
4228 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4229 };
4230 Ok(buffer)
4231 }
4232 proto::buffer::Variant::State(mut buffer) => {
4233 let mut buffer_worktree = None;
4234 let mut buffer_file = None;
4235 if let Some(file) = buffer.file.take() {
4236 this.read_with(&cx, |this, cx| {
4237 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4238 let worktree =
4239 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4240 anyhow!("no worktree found for id {}", file.worktree_id)
4241 })?;
4242 buffer_file =
4243 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4244 as Box<dyn language::File>);
4245 buffer_worktree = Some(worktree);
4246 Ok::<_, anyhow::Error>(())
4247 })?;
4248 }
4249
4250 let buffer = cx.add_model(|cx| {
4251 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4252 });
4253
4254 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4255
4256 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4257 Ok(buffer)
4258 }
4259 }
4260 })
4261 }
4262
4263 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4264 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4265 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4266 let start = serialized_symbol
4267 .start
4268 .ok_or_else(|| anyhow!("invalid start"))?;
4269 let end = serialized_symbol
4270 .end
4271 .ok_or_else(|| anyhow!("invalid end"))?;
4272 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4273 let path = PathBuf::from(serialized_symbol.path);
4274 let language = self.languages.select_language(&path);
4275 Ok(Symbol {
4276 source_worktree_id,
4277 worktree_id,
4278 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4279 label: language
4280 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4281 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4282 name: serialized_symbol.name,
4283 path,
4284 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4285 kind,
4286 signature: serialized_symbol
4287 .signature
4288 .try_into()
4289 .map_err(|_| anyhow!("invalid signature"))?,
4290 })
4291 }
4292
4293 async fn handle_buffer_saved(
4294 this: ModelHandle<Self>,
4295 envelope: TypedEnvelope<proto::BufferSaved>,
4296 _: Arc<Client>,
4297 mut cx: AsyncAppContext,
4298 ) -> Result<()> {
4299 let version = deserialize_version(envelope.payload.version);
4300 let mtime = envelope
4301 .payload
4302 .mtime
4303 .ok_or_else(|| anyhow!("missing mtime"))?
4304 .into();
4305
4306 this.update(&mut cx, |this, cx| {
4307 let buffer = this
4308 .opened_buffers
4309 .get(&envelope.payload.buffer_id)
4310 .and_then(|buffer| buffer.upgrade(cx));
4311 if let Some(buffer) = buffer {
4312 buffer.update(cx, |buffer, cx| {
4313 buffer.did_save(version, mtime, None, cx);
4314 });
4315 }
4316 Ok(())
4317 })
4318 }
4319
4320 async fn handle_buffer_reloaded(
4321 this: ModelHandle<Self>,
4322 envelope: TypedEnvelope<proto::BufferReloaded>,
4323 _: Arc<Client>,
4324 mut cx: AsyncAppContext,
4325 ) -> Result<()> {
4326 let payload = envelope.payload.clone();
4327 let version = deserialize_version(payload.version);
4328 let mtime = payload
4329 .mtime
4330 .ok_or_else(|| anyhow!("missing mtime"))?
4331 .into();
4332 this.update(&mut cx, |this, cx| {
4333 let buffer = this
4334 .opened_buffers
4335 .get(&payload.buffer_id)
4336 .and_then(|buffer| buffer.upgrade(cx));
4337 if let Some(buffer) = buffer {
4338 buffer.update(cx, |buffer, cx| {
4339 buffer.did_reload(version, mtime, cx);
4340 });
4341 }
4342 Ok(())
4343 })
4344 }
4345
4346 pub fn match_paths<'a>(
4347 &self,
4348 query: &'a str,
4349 include_ignored: bool,
4350 smart_case: bool,
4351 max_results: usize,
4352 cancel_flag: &'a AtomicBool,
4353 cx: &AppContext,
4354 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4355 let worktrees = self
4356 .worktrees(cx)
4357 .filter(|worktree| worktree.read(cx).is_visible())
4358 .collect::<Vec<_>>();
4359 let include_root_name = worktrees.len() > 1;
4360 let candidate_sets = worktrees
4361 .into_iter()
4362 .map(|worktree| CandidateSet {
4363 snapshot: worktree.read(cx).snapshot(),
4364 include_ignored,
4365 include_root_name,
4366 })
4367 .collect::<Vec<_>>();
4368
4369 let background = cx.background().clone();
4370 async move {
4371 fuzzy::match_paths(
4372 candidate_sets.as_slice(),
4373 query,
4374 smart_case,
4375 max_results,
4376 cancel_flag,
4377 background,
4378 )
4379 .await
4380 }
4381 }
4382
4383 fn edits_from_lsp(
4384 &mut self,
4385 buffer: &ModelHandle<Buffer>,
4386 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4387 version: Option<i32>,
4388 cx: &mut ModelContext<Self>,
4389 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4390 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4391 cx.background().spawn(async move {
4392 let snapshot = snapshot?;
4393 let mut lsp_edits = lsp_edits
4394 .into_iter()
4395 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4396 .peekable();
4397
4398 let mut edits = Vec::new();
4399 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4400 // Combine any LSP edits that are adjacent.
4401 //
4402 // Also, combine LSP edits that are separated from each other by only
4403 // a newline. This is important because for some code actions,
4404 // Rust-analyzer rewrites the entire buffer via a series of edits that
4405 // are separated by unchanged newline characters.
4406 //
4407 // In order for the diffing logic below to work properly, any edits that
4408 // cancel each other out must be combined into one.
4409 while let Some((next_range, next_text)) = lsp_edits.peek() {
4410 if next_range.start > range.end {
4411 if next_range.start.row > range.end.row + 1
4412 || next_range.start.column > 0
4413 || snapshot.clip_point_utf16(
4414 PointUtf16::new(range.end.row, u32::MAX),
4415 Bias::Left,
4416 ) > range.end
4417 {
4418 break;
4419 }
4420 new_text.push('\n');
4421 }
4422 range.end = next_range.end;
4423 new_text.push_str(&next_text);
4424 lsp_edits.next();
4425 }
4426
4427 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4428 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4429 {
4430 return Err(anyhow!("invalid edits received from language server"));
4431 }
4432
4433 // For multiline edits, perform a diff of the old and new text so that
4434 // we can identify the changes more precisely, preserving the locations
4435 // of any anchors positioned in the unchanged regions.
4436 if range.end.row > range.start.row {
4437 let mut offset = range.start.to_offset(&snapshot);
4438 let old_text = snapshot.text_for_range(range).collect::<String>();
4439
4440 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4441 let mut moved_since_edit = true;
4442 for change in diff.iter_all_changes() {
4443 let tag = change.tag();
4444 let value = change.value();
4445 match tag {
4446 ChangeTag::Equal => {
4447 offset += value.len();
4448 moved_since_edit = true;
4449 }
4450 ChangeTag::Delete => {
4451 let start = snapshot.anchor_after(offset);
4452 let end = snapshot.anchor_before(offset + value.len());
4453 if moved_since_edit {
4454 edits.push((start..end, String::new()));
4455 } else {
4456 edits.last_mut().unwrap().0.end = end;
4457 }
4458 offset += value.len();
4459 moved_since_edit = false;
4460 }
4461 ChangeTag::Insert => {
4462 if moved_since_edit {
4463 let anchor = snapshot.anchor_after(offset);
4464 edits.push((anchor.clone()..anchor, value.to_string()));
4465 } else {
4466 edits.last_mut().unwrap().1.push_str(value);
4467 }
4468 moved_since_edit = false;
4469 }
4470 }
4471 }
4472 } else if range.end == range.start {
4473 let anchor = snapshot.anchor_after(range.start);
4474 edits.push((anchor.clone()..anchor, new_text));
4475 } else {
4476 let edit_start = snapshot.anchor_after(range.start);
4477 let edit_end = snapshot.anchor_before(range.end);
4478 edits.push((edit_start..edit_end, new_text));
4479 }
4480 }
4481
4482 Ok(edits)
4483 })
4484 }
4485
4486 fn buffer_snapshot_for_lsp_version(
4487 &mut self,
4488 buffer: &ModelHandle<Buffer>,
4489 version: Option<i32>,
4490 cx: &AppContext,
4491 ) -> Result<TextBufferSnapshot> {
4492 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4493
4494 if let Some(version) = version {
4495 let buffer_id = buffer.read(cx).remote_id();
4496 let snapshots = self
4497 .buffer_snapshots
4498 .get_mut(&buffer_id)
4499 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4500 let mut found_snapshot = None;
4501 snapshots.retain(|(snapshot_version, snapshot)| {
4502 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4503 false
4504 } else {
4505 if *snapshot_version == version {
4506 found_snapshot = Some(snapshot.clone());
4507 }
4508 true
4509 }
4510 });
4511
4512 found_snapshot.ok_or_else(|| {
4513 anyhow!(
4514 "snapshot not found for buffer {} at version {}",
4515 buffer_id,
4516 version
4517 )
4518 })
4519 } else {
4520 Ok((buffer.read(cx)).text_snapshot())
4521 }
4522 }
4523
4524 fn language_server_for_buffer(
4525 &self,
4526 buffer: &Buffer,
4527 cx: &AppContext,
4528 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4529 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4530 let worktree_id = file.worktree_id(cx);
4531 self.language_servers
4532 .get(&(worktree_id, language.lsp_adapter()?.name()))
4533 } else {
4534 None
4535 }
4536 }
4537}
4538
4539impl WorktreeHandle {
4540 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4541 match self {
4542 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4543 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4544 }
4545 }
4546}
4547
4548impl OpenBuffer {
4549 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4550 match self {
4551 OpenBuffer::Strong(handle) => Some(handle.clone()),
4552 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4553 OpenBuffer::Loading(_) => None,
4554 }
4555 }
4556}
4557
4558struct CandidateSet {
4559 snapshot: Snapshot,
4560 include_ignored: bool,
4561 include_root_name: bool,
4562}
4563
4564impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4565 type Candidates = CandidateSetIter<'a>;
4566
4567 fn id(&self) -> usize {
4568 self.snapshot.id().to_usize()
4569 }
4570
4571 fn len(&self) -> usize {
4572 if self.include_ignored {
4573 self.snapshot.file_count()
4574 } else {
4575 self.snapshot.visible_file_count()
4576 }
4577 }
4578
4579 fn prefix(&self) -> Arc<str> {
4580 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4581 self.snapshot.root_name().into()
4582 } else if self.include_root_name {
4583 format!("{}/", self.snapshot.root_name()).into()
4584 } else {
4585 "".into()
4586 }
4587 }
4588
4589 fn candidates(&'a self, start: usize) -> Self::Candidates {
4590 CandidateSetIter {
4591 traversal: self.snapshot.files(self.include_ignored, start),
4592 }
4593 }
4594}
4595
4596struct CandidateSetIter<'a> {
4597 traversal: Traversal<'a>,
4598}
4599
4600impl<'a> Iterator for CandidateSetIter<'a> {
4601 type Item = PathMatchCandidate<'a>;
4602
4603 fn next(&mut self) -> Option<Self::Item> {
4604 self.traversal.next().map(|entry| {
4605 if let EntryKind::File(char_bag) = entry.kind {
4606 PathMatchCandidate {
4607 path: &entry.path,
4608 char_bag,
4609 }
4610 } else {
4611 unreachable!()
4612 }
4613 })
4614 }
4615}
4616
4617impl Entity for Project {
4618 type Event = Event;
4619
4620 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4621 match &self.client_state {
4622 ProjectClientState::Local { remote_id_rx, .. } => {
4623 if let Some(project_id) = *remote_id_rx.borrow() {
4624 self.client
4625 .send(proto::UnregisterProject { project_id })
4626 .log_err();
4627 }
4628 }
4629 ProjectClientState::Remote { remote_id, .. } => {
4630 self.client
4631 .send(proto::LeaveProject {
4632 project_id: *remote_id,
4633 })
4634 .log_err();
4635 }
4636 }
4637 }
4638
4639 fn app_will_quit(
4640 &mut self,
4641 _: &mut MutableAppContext,
4642 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4643 let shutdown_futures = self
4644 .language_servers
4645 .drain()
4646 .filter_map(|(_, (_, server))| server.shutdown())
4647 .collect::<Vec<_>>();
4648 Some(
4649 async move {
4650 futures::future::join_all(shutdown_futures).await;
4651 }
4652 .boxed(),
4653 )
4654 }
4655}
4656
4657impl Collaborator {
4658 fn from_proto(
4659 message: proto::Collaborator,
4660 user_store: &ModelHandle<UserStore>,
4661 cx: &mut AsyncAppContext,
4662 ) -> impl Future<Output = Result<Self>> {
4663 let user = user_store.update(cx, |user_store, cx| {
4664 user_store.fetch_user(message.user_id, cx)
4665 });
4666
4667 async move {
4668 Ok(Self {
4669 peer_id: PeerId(message.peer_id),
4670 user: user.await?,
4671 replica_id: message.replica_id as ReplicaId,
4672 })
4673 }
4674 }
4675}
4676
4677impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4678 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4679 Self {
4680 worktree_id,
4681 path: path.as_ref().into(),
4682 }
4683 }
4684}
4685
4686impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4687 fn from(options: lsp::CreateFileOptions) -> Self {
4688 Self {
4689 overwrite: options.overwrite.unwrap_or(false),
4690 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4691 }
4692 }
4693}
4694
4695impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4696 fn from(options: lsp::RenameFileOptions) -> Self {
4697 Self {
4698 overwrite: options.overwrite.unwrap_or(false),
4699 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4700 }
4701 }
4702}
4703
4704impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4705 fn from(options: lsp::DeleteFileOptions) -> Self {
4706 Self {
4707 recursive: options.recursive.unwrap_or(false),
4708 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4709 }
4710 }
4711}
4712
4713fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4714 proto::Symbol {
4715 source_worktree_id: symbol.source_worktree_id.to_proto(),
4716 worktree_id: symbol.worktree_id.to_proto(),
4717 language_server_name: symbol.language_server_name.0.to_string(),
4718 name: symbol.name.clone(),
4719 kind: unsafe { mem::transmute(symbol.kind) },
4720 path: symbol.path.to_string_lossy().to_string(),
4721 start: Some(proto::Point {
4722 row: symbol.range.start.row,
4723 column: symbol.range.start.column,
4724 }),
4725 end: Some(proto::Point {
4726 row: symbol.range.end.row,
4727 column: symbol.range.end.column,
4728 }),
4729 signature: symbol.signature.to_vec(),
4730 }
4731}
4732
4733fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4734 let mut path_components = path.components();
4735 let mut base_components = base.components();
4736 let mut components: Vec<Component> = Vec::new();
4737 loop {
4738 match (path_components.next(), base_components.next()) {
4739 (None, None) => break,
4740 (Some(a), None) => {
4741 components.push(a);
4742 components.extend(path_components.by_ref());
4743 break;
4744 }
4745 (None, _) => components.push(Component::ParentDir),
4746 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4747 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4748 (Some(a), Some(_)) => {
4749 components.push(Component::ParentDir);
4750 for _ in base_components {
4751 components.push(Component::ParentDir);
4752 }
4753 components.push(a);
4754 components.extend(path_components.by_ref());
4755 break;
4756 }
4757 }
4758 }
4759 components.iter().map(|c| c.as_os_str()).collect()
4760}
4761
4762impl Item for Buffer {
4763 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4764 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4765 }
4766}
4767
4768#[cfg(test)]
4769mod tests {
4770 use super::{Event, *};
4771 use fs::RealFs;
4772 use futures::{future, StreamExt};
4773 use gpui::test::subscribe;
4774 use language::{
4775 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4776 ToPoint,
4777 };
4778 use lsp::Url;
4779 use serde_json::json;
4780 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4781 use unindent::Unindent as _;
4782 use util::{assert_set_eq, test::temp_tree};
4783 use worktree::WorktreeHandle as _;
4784
4785 #[gpui::test]
4786 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4787 let dir = temp_tree(json!({
4788 "root": {
4789 "apple": "",
4790 "banana": {
4791 "carrot": {
4792 "date": "",
4793 "endive": "",
4794 }
4795 },
4796 "fennel": {
4797 "grape": "",
4798 }
4799 }
4800 }));
4801
4802 let root_link_path = dir.path().join("root_link");
4803 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4804 unix::fs::symlink(
4805 &dir.path().join("root/fennel"),
4806 &dir.path().join("root/finnochio"),
4807 )
4808 .unwrap();
4809
4810 let project = Project::test(Arc::new(RealFs), cx);
4811
4812 let (tree, _) = project
4813 .update(cx, |project, cx| {
4814 project.find_or_create_local_worktree(&root_link_path, true, cx)
4815 })
4816 .await
4817 .unwrap();
4818
4819 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4820 .await;
4821 cx.read(|cx| {
4822 let tree = tree.read(cx);
4823 assert_eq!(tree.file_count(), 5);
4824 assert_eq!(
4825 tree.inode_for_path("fennel/grape"),
4826 tree.inode_for_path("finnochio/grape")
4827 );
4828 });
4829
4830 let cancel_flag = Default::default();
4831 let results = project
4832 .read_with(cx, |project, cx| {
4833 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4834 })
4835 .await;
4836 assert_eq!(
4837 results
4838 .into_iter()
4839 .map(|result| result.path)
4840 .collect::<Vec<Arc<Path>>>(),
4841 vec![
4842 PathBuf::from("banana/carrot/date").into(),
4843 PathBuf::from("banana/carrot/endive").into(),
4844 ]
4845 );
4846 }
4847
4848 #[gpui::test]
4849 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4850 cx.foreground().forbid_parking();
4851
4852 let mut rust_language = Language::new(
4853 LanguageConfig {
4854 name: "Rust".into(),
4855 path_suffixes: vec!["rs".to_string()],
4856 ..Default::default()
4857 },
4858 Some(tree_sitter_rust::language()),
4859 );
4860 let mut json_language = Language::new(
4861 LanguageConfig {
4862 name: "JSON".into(),
4863 path_suffixes: vec!["json".to_string()],
4864 ..Default::default()
4865 },
4866 None,
4867 );
4868 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4869 name: "the-rust-language-server",
4870 capabilities: lsp::ServerCapabilities {
4871 completion_provider: Some(lsp::CompletionOptions {
4872 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4873 ..Default::default()
4874 }),
4875 ..Default::default()
4876 },
4877 ..Default::default()
4878 });
4879 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4880 name: "the-json-language-server",
4881 capabilities: lsp::ServerCapabilities {
4882 completion_provider: Some(lsp::CompletionOptions {
4883 trigger_characters: Some(vec![":".to_string()]),
4884 ..Default::default()
4885 }),
4886 ..Default::default()
4887 },
4888 ..Default::default()
4889 });
4890
4891 let fs = FakeFs::new(cx.background());
4892 fs.insert_tree(
4893 "/the-root",
4894 json!({
4895 "test.rs": "const A: i32 = 1;",
4896 "test2.rs": "",
4897 "Cargo.toml": "a = 1",
4898 "package.json": "{\"a\": 1}",
4899 }),
4900 )
4901 .await;
4902
4903 let project = Project::test(fs, cx);
4904 project.update(cx, |project, _| {
4905 project.languages.add(Arc::new(rust_language));
4906 project.languages.add(Arc::new(json_language));
4907 });
4908
4909 let worktree_id = project
4910 .update(cx, |project, cx| {
4911 project.find_or_create_local_worktree("/the-root", true, cx)
4912 })
4913 .await
4914 .unwrap()
4915 .0
4916 .read_with(cx, |tree, _| tree.id());
4917
4918 // Open a buffer without an associated language server.
4919 let toml_buffer = project
4920 .update(cx, |project, cx| {
4921 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4922 })
4923 .await
4924 .unwrap();
4925
4926 // Open a buffer with an associated language server.
4927 let rust_buffer = project
4928 .update(cx, |project, cx| {
4929 project.open_buffer((worktree_id, "test.rs"), cx)
4930 })
4931 .await
4932 .unwrap();
4933
4934 // A server is started up, and it is notified about Rust files.
4935 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4936 assert_eq!(
4937 fake_rust_server
4938 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4939 .await
4940 .text_document,
4941 lsp::TextDocumentItem {
4942 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4943 version: 0,
4944 text: "const A: i32 = 1;".to_string(),
4945 language_id: Default::default()
4946 }
4947 );
4948
4949 // The buffer is configured based on the language server's capabilities.
4950 rust_buffer.read_with(cx, |buffer, _| {
4951 assert_eq!(
4952 buffer.completion_triggers(),
4953 &[".".to_string(), "::".to_string()]
4954 );
4955 });
4956 toml_buffer.read_with(cx, |buffer, _| {
4957 assert!(buffer.completion_triggers().is_empty());
4958 });
4959
4960 // Edit a buffer. The changes are reported to the language server.
4961 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4962 assert_eq!(
4963 fake_rust_server
4964 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4965 .await
4966 .text_document,
4967 lsp::VersionedTextDocumentIdentifier::new(
4968 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4969 1
4970 )
4971 );
4972
4973 // Open a third buffer with a different associated language server.
4974 let json_buffer = project
4975 .update(cx, |project, cx| {
4976 project.open_buffer((worktree_id, "package.json"), cx)
4977 })
4978 .await
4979 .unwrap();
4980
4981 // A json language server is started up and is only notified about the json buffer.
4982 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4983 assert_eq!(
4984 fake_json_server
4985 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4986 .await
4987 .text_document,
4988 lsp::TextDocumentItem {
4989 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4990 version: 0,
4991 text: "{\"a\": 1}".to_string(),
4992 language_id: Default::default()
4993 }
4994 );
4995
4996 // This buffer is configured based on the second language server's
4997 // capabilities.
4998 json_buffer.read_with(cx, |buffer, _| {
4999 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5000 });
5001
5002 // When opening another buffer whose language server is already running,
5003 // it is also configured based on the existing language server's capabilities.
5004 let rust_buffer2 = project
5005 .update(cx, |project, cx| {
5006 project.open_buffer((worktree_id, "test2.rs"), cx)
5007 })
5008 .await
5009 .unwrap();
5010 rust_buffer2.read_with(cx, |buffer, _| {
5011 assert_eq!(
5012 buffer.completion_triggers(),
5013 &[".".to_string(), "::".to_string()]
5014 );
5015 });
5016
5017 // Changes are reported only to servers matching the buffer's language.
5018 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5019 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5020 assert_eq!(
5021 fake_rust_server
5022 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5023 .await
5024 .text_document,
5025 lsp::VersionedTextDocumentIdentifier::new(
5026 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5027 1
5028 )
5029 );
5030
5031 // Save notifications are reported to all servers.
5032 toml_buffer
5033 .update(cx, |buffer, cx| buffer.save(cx))
5034 .await
5035 .unwrap();
5036 assert_eq!(
5037 fake_rust_server
5038 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5039 .await
5040 .text_document,
5041 lsp::TextDocumentIdentifier::new(
5042 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5043 )
5044 );
5045 assert_eq!(
5046 fake_json_server
5047 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5048 .await
5049 .text_document,
5050 lsp::TextDocumentIdentifier::new(
5051 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5052 )
5053 );
5054
5055 // Restart language servers
5056 project.update(cx, |project, cx| {
5057 project.restart_language_servers_for_buffers(
5058 vec![rust_buffer.clone(), json_buffer.clone()],
5059 cx,
5060 );
5061 });
5062
5063 let mut rust_shutdown_requests = fake_rust_server
5064 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5065 let mut json_shutdown_requests = fake_json_server
5066 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5067 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5068
5069 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5070 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5071
5072 // Ensure both rust documents are reopened in new rust language server without worrying about order
5073 assert_set_eq!(
5074 [
5075 fake_rust_server
5076 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5077 .await
5078 .text_document,
5079 fake_rust_server
5080 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5081 .await
5082 .text_document,
5083 ],
5084 [
5085 lsp::TextDocumentItem {
5086 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5087 version: 1,
5088 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5089 language_id: Default::default()
5090 },
5091 lsp::TextDocumentItem {
5092 uri: lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5093 version: 1,
5094 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5095 language_id: Default::default()
5096 },
5097 ]
5098 );
5099
5100 // Ensure json document is reopened in new json language server
5101 assert_eq!(
5102 fake_json_server
5103 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5104 .await
5105 .text_document,
5106 lsp::TextDocumentItem {
5107 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5108 version: 0,
5109 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5110 language_id: Default::default()
5111 }
5112 );
5113
5114 // Close notifications are reported only to servers matching the buffer's language.
5115 cx.update(|_| drop(json_buffer));
5116 let close_message = lsp::DidCloseTextDocumentParams {
5117 text_document: lsp::TextDocumentIdentifier::new(
5118 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5119 ),
5120 };
5121 assert_eq!(
5122 fake_json_server
5123 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5124 .await,
5125 close_message,
5126 );
5127 }
5128
5129 #[gpui::test]
5130 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5131 cx.foreground().forbid_parking();
5132
5133 let progress_token = "the-progress-token";
5134 let mut language = Language::new(
5135 LanguageConfig {
5136 name: "Rust".into(),
5137 path_suffixes: vec!["rs".to_string()],
5138 ..Default::default()
5139 },
5140 Some(tree_sitter_rust::language()),
5141 );
5142 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5143 disk_based_diagnostics_progress_token: Some(progress_token),
5144 disk_based_diagnostics_sources: &["disk"],
5145 ..Default::default()
5146 });
5147
5148 let fs = FakeFs::new(cx.background());
5149 fs.insert_tree(
5150 "/dir",
5151 json!({
5152 "a.rs": "fn a() { A }",
5153 "b.rs": "const y: i32 = 1",
5154 }),
5155 )
5156 .await;
5157
5158 let project = Project::test(fs, cx);
5159 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5160
5161 let (tree, _) = project
5162 .update(cx, |project, cx| {
5163 project.find_or_create_local_worktree("/dir", true, cx)
5164 })
5165 .await
5166 .unwrap();
5167 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5168
5169 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5170 .await;
5171
5172 // Cause worktree to start the fake language server
5173 let _buffer = project
5174 .update(cx, |project, cx| {
5175 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5176 })
5177 .await
5178 .unwrap();
5179
5180 let mut events = subscribe(&project, cx);
5181
5182 let mut fake_server = fake_servers.next().await.unwrap();
5183 fake_server.start_progress(progress_token).await;
5184 assert_eq!(
5185 events.next().await.unwrap(),
5186 Event::DiskBasedDiagnosticsStarted
5187 );
5188
5189 fake_server.start_progress(progress_token).await;
5190 fake_server.end_progress(progress_token).await;
5191 fake_server.start_progress(progress_token).await;
5192
5193 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5194 lsp::PublishDiagnosticsParams {
5195 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5196 version: None,
5197 diagnostics: vec![lsp::Diagnostic {
5198 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5199 severity: Some(lsp::DiagnosticSeverity::ERROR),
5200 message: "undefined variable 'A'".to_string(),
5201 ..Default::default()
5202 }],
5203 },
5204 );
5205 assert_eq!(
5206 events.next().await.unwrap(),
5207 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5208 );
5209
5210 fake_server.end_progress(progress_token).await;
5211 fake_server.end_progress(progress_token).await;
5212 assert_eq!(
5213 events.next().await.unwrap(),
5214 Event::DiskBasedDiagnosticsUpdated
5215 );
5216 assert_eq!(
5217 events.next().await.unwrap(),
5218 Event::DiskBasedDiagnosticsFinished
5219 );
5220
5221 let buffer = project
5222 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5223 .await
5224 .unwrap();
5225
5226 buffer.read_with(cx, |buffer, _| {
5227 let snapshot = buffer.snapshot();
5228 let diagnostics = snapshot
5229 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5230 .collect::<Vec<_>>();
5231 assert_eq!(
5232 diagnostics,
5233 &[DiagnosticEntry {
5234 range: Point::new(0, 9)..Point::new(0, 10),
5235 diagnostic: Diagnostic {
5236 severity: lsp::DiagnosticSeverity::ERROR,
5237 message: "undefined variable 'A'".to_string(),
5238 group_id: 0,
5239 is_primary: true,
5240 ..Default::default()
5241 }
5242 }]
5243 )
5244 });
5245 }
5246
5247 #[gpui::test]
5248 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5249 cx.foreground().forbid_parking();
5250
5251 let mut language = Language::new(
5252 LanguageConfig {
5253 name: "Rust".into(),
5254 path_suffixes: vec!["rs".to_string()],
5255 ..Default::default()
5256 },
5257 Some(tree_sitter_rust::language()),
5258 );
5259 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5260 disk_based_diagnostics_sources: &["disk"],
5261 ..Default::default()
5262 });
5263
5264 let text = "
5265 fn a() { A }
5266 fn b() { BB }
5267 fn c() { CCC }
5268 "
5269 .unindent();
5270
5271 let fs = FakeFs::new(cx.background());
5272 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5273
5274 let project = Project::test(fs, cx);
5275 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5276
5277 let worktree_id = project
5278 .update(cx, |project, cx| {
5279 project.find_or_create_local_worktree("/dir", true, cx)
5280 })
5281 .await
5282 .unwrap()
5283 .0
5284 .read_with(cx, |tree, _| tree.id());
5285
5286 let buffer = project
5287 .update(cx, |project, cx| {
5288 project.open_buffer((worktree_id, "a.rs"), cx)
5289 })
5290 .await
5291 .unwrap();
5292
5293 let mut fake_server = fake_servers.next().await.unwrap();
5294 let open_notification = fake_server
5295 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5296 .await;
5297
5298 // Edit the buffer, moving the content down
5299 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5300 let change_notification_1 = fake_server
5301 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5302 .await;
5303 assert!(
5304 change_notification_1.text_document.version > open_notification.text_document.version
5305 );
5306
5307 // Report some diagnostics for the initial version of the buffer
5308 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5309 lsp::PublishDiagnosticsParams {
5310 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5311 version: Some(open_notification.text_document.version),
5312 diagnostics: vec![
5313 lsp::Diagnostic {
5314 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5315 severity: Some(DiagnosticSeverity::ERROR),
5316 message: "undefined variable 'A'".to_string(),
5317 source: Some("disk".to_string()),
5318 ..Default::default()
5319 },
5320 lsp::Diagnostic {
5321 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5322 severity: Some(DiagnosticSeverity::ERROR),
5323 message: "undefined variable 'BB'".to_string(),
5324 source: Some("disk".to_string()),
5325 ..Default::default()
5326 },
5327 lsp::Diagnostic {
5328 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5329 severity: Some(DiagnosticSeverity::ERROR),
5330 source: Some("disk".to_string()),
5331 message: "undefined variable 'CCC'".to_string(),
5332 ..Default::default()
5333 },
5334 ],
5335 },
5336 );
5337
5338 // The diagnostics have moved down since they were created.
5339 buffer.next_notification(cx).await;
5340 buffer.read_with(cx, |buffer, _| {
5341 assert_eq!(
5342 buffer
5343 .snapshot()
5344 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5345 .collect::<Vec<_>>(),
5346 &[
5347 DiagnosticEntry {
5348 range: Point::new(3, 9)..Point::new(3, 11),
5349 diagnostic: Diagnostic {
5350 severity: DiagnosticSeverity::ERROR,
5351 message: "undefined variable 'BB'".to_string(),
5352 is_disk_based: true,
5353 group_id: 1,
5354 is_primary: true,
5355 ..Default::default()
5356 },
5357 },
5358 DiagnosticEntry {
5359 range: Point::new(4, 9)..Point::new(4, 12),
5360 diagnostic: Diagnostic {
5361 severity: DiagnosticSeverity::ERROR,
5362 message: "undefined variable 'CCC'".to_string(),
5363 is_disk_based: true,
5364 group_id: 2,
5365 is_primary: true,
5366 ..Default::default()
5367 }
5368 }
5369 ]
5370 );
5371 assert_eq!(
5372 chunks_with_diagnostics(buffer, 0..buffer.len()),
5373 [
5374 ("\n\nfn a() { ".to_string(), None),
5375 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5376 (" }\nfn b() { ".to_string(), None),
5377 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5378 (" }\nfn c() { ".to_string(), None),
5379 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5380 (" }\n".to_string(), None),
5381 ]
5382 );
5383 assert_eq!(
5384 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5385 [
5386 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5387 (" }\nfn c() { ".to_string(), None),
5388 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5389 ]
5390 );
5391 });
5392
5393 // Ensure overlapping diagnostics are highlighted correctly.
5394 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5395 lsp::PublishDiagnosticsParams {
5396 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5397 version: Some(open_notification.text_document.version),
5398 diagnostics: vec![
5399 lsp::Diagnostic {
5400 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5401 severity: Some(DiagnosticSeverity::ERROR),
5402 message: "undefined variable 'A'".to_string(),
5403 source: Some("disk".to_string()),
5404 ..Default::default()
5405 },
5406 lsp::Diagnostic {
5407 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5408 severity: Some(DiagnosticSeverity::WARNING),
5409 message: "unreachable statement".to_string(),
5410 source: Some("disk".to_string()),
5411 ..Default::default()
5412 },
5413 ],
5414 },
5415 );
5416
5417 buffer.next_notification(cx).await;
5418 buffer.read_with(cx, |buffer, _| {
5419 assert_eq!(
5420 buffer
5421 .snapshot()
5422 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5423 .collect::<Vec<_>>(),
5424 &[
5425 DiagnosticEntry {
5426 range: Point::new(2, 9)..Point::new(2, 12),
5427 diagnostic: Diagnostic {
5428 severity: DiagnosticSeverity::WARNING,
5429 message: "unreachable statement".to_string(),
5430 is_disk_based: true,
5431 group_id: 1,
5432 is_primary: true,
5433 ..Default::default()
5434 }
5435 },
5436 DiagnosticEntry {
5437 range: Point::new(2, 9)..Point::new(2, 10),
5438 diagnostic: Diagnostic {
5439 severity: DiagnosticSeverity::ERROR,
5440 message: "undefined variable 'A'".to_string(),
5441 is_disk_based: true,
5442 group_id: 0,
5443 is_primary: true,
5444 ..Default::default()
5445 },
5446 }
5447 ]
5448 );
5449 assert_eq!(
5450 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5451 [
5452 ("fn a() { ".to_string(), None),
5453 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5454 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5455 ("\n".to_string(), None),
5456 ]
5457 );
5458 assert_eq!(
5459 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5460 [
5461 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5462 ("\n".to_string(), None),
5463 ]
5464 );
5465 });
5466
5467 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5468 // changes since the last save.
5469 buffer.update(cx, |buffer, cx| {
5470 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5471 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5472 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5473 });
5474 let change_notification_2 = fake_server
5475 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5476 .await;
5477 assert!(
5478 change_notification_2.text_document.version
5479 > change_notification_1.text_document.version
5480 );
5481
5482 // Handle out-of-order diagnostics
5483 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5484 lsp::PublishDiagnosticsParams {
5485 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5486 version: Some(change_notification_2.text_document.version),
5487 diagnostics: vec![
5488 lsp::Diagnostic {
5489 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5490 severity: Some(DiagnosticSeverity::ERROR),
5491 message: "undefined variable 'BB'".to_string(),
5492 source: Some("disk".to_string()),
5493 ..Default::default()
5494 },
5495 lsp::Diagnostic {
5496 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5497 severity: Some(DiagnosticSeverity::WARNING),
5498 message: "undefined variable 'A'".to_string(),
5499 source: Some("disk".to_string()),
5500 ..Default::default()
5501 },
5502 ],
5503 },
5504 );
5505
5506 buffer.next_notification(cx).await;
5507 buffer.read_with(cx, |buffer, _| {
5508 assert_eq!(
5509 buffer
5510 .snapshot()
5511 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5512 .collect::<Vec<_>>(),
5513 &[
5514 DiagnosticEntry {
5515 range: Point::new(2, 21)..Point::new(2, 22),
5516 diagnostic: Diagnostic {
5517 severity: DiagnosticSeverity::WARNING,
5518 message: "undefined variable 'A'".to_string(),
5519 is_disk_based: true,
5520 group_id: 1,
5521 is_primary: true,
5522 ..Default::default()
5523 }
5524 },
5525 DiagnosticEntry {
5526 range: Point::new(3, 9)..Point::new(3, 14),
5527 diagnostic: Diagnostic {
5528 severity: DiagnosticSeverity::ERROR,
5529 message: "undefined variable 'BB'".to_string(),
5530 is_disk_based: true,
5531 group_id: 0,
5532 is_primary: true,
5533 ..Default::default()
5534 },
5535 }
5536 ]
5537 );
5538 });
5539 }
5540
5541 #[gpui::test]
5542 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5543 cx.foreground().forbid_parking();
5544
5545 let text = concat!(
5546 "let one = ;\n", //
5547 "let two = \n",
5548 "let three = 3;\n",
5549 );
5550
5551 let fs = FakeFs::new(cx.background());
5552 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5553
5554 let project = Project::test(fs, cx);
5555 let worktree_id = project
5556 .update(cx, |project, cx| {
5557 project.find_or_create_local_worktree("/dir", true, cx)
5558 })
5559 .await
5560 .unwrap()
5561 .0
5562 .read_with(cx, |tree, _| tree.id());
5563
5564 let buffer = project
5565 .update(cx, |project, cx| {
5566 project.open_buffer((worktree_id, "a.rs"), cx)
5567 })
5568 .await
5569 .unwrap();
5570
5571 project.update(cx, |project, cx| {
5572 project
5573 .update_buffer_diagnostics(
5574 &buffer,
5575 vec![
5576 DiagnosticEntry {
5577 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5578 diagnostic: Diagnostic {
5579 severity: DiagnosticSeverity::ERROR,
5580 message: "syntax error 1".to_string(),
5581 ..Default::default()
5582 },
5583 },
5584 DiagnosticEntry {
5585 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5586 diagnostic: Diagnostic {
5587 severity: DiagnosticSeverity::ERROR,
5588 message: "syntax error 2".to_string(),
5589 ..Default::default()
5590 },
5591 },
5592 ],
5593 None,
5594 cx,
5595 )
5596 .unwrap();
5597 });
5598
5599 // An empty range is extended forward to include the following character.
5600 // At the end of a line, an empty range is extended backward to include
5601 // the preceding character.
5602 buffer.read_with(cx, |buffer, _| {
5603 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5604 assert_eq!(
5605 chunks
5606 .iter()
5607 .map(|(s, d)| (s.as_str(), *d))
5608 .collect::<Vec<_>>(),
5609 &[
5610 ("let one = ", None),
5611 (";", Some(DiagnosticSeverity::ERROR)),
5612 ("\nlet two =", None),
5613 (" ", Some(DiagnosticSeverity::ERROR)),
5614 ("\nlet three = 3;\n", None)
5615 ]
5616 );
5617 });
5618 }
5619
5620 #[gpui::test]
5621 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5622 cx.foreground().forbid_parking();
5623
5624 let mut language = Language::new(
5625 LanguageConfig {
5626 name: "Rust".into(),
5627 path_suffixes: vec!["rs".to_string()],
5628 ..Default::default()
5629 },
5630 Some(tree_sitter_rust::language()),
5631 );
5632 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5633
5634 let text = "
5635 fn a() {
5636 f1();
5637 }
5638 fn b() {
5639 f2();
5640 }
5641 fn c() {
5642 f3();
5643 }
5644 "
5645 .unindent();
5646
5647 let fs = FakeFs::new(cx.background());
5648 fs.insert_tree(
5649 "/dir",
5650 json!({
5651 "a.rs": text.clone(),
5652 }),
5653 )
5654 .await;
5655
5656 let project = Project::test(fs, cx);
5657 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5658
5659 let worktree_id = project
5660 .update(cx, |project, cx| {
5661 project.find_or_create_local_worktree("/dir", true, cx)
5662 })
5663 .await
5664 .unwrap()
5665 .0
5666 .read_with(cx, |tree, _| tree.id());
5667
5668 let buffer = project
5669 .update(cx, |project, cx| {
5670 project.open_buffer((worktree_id, "a.rs"), cx)
5671 })
5672 .await
5673 .unwrap();
5674
5675 let mut fake_server = fake_servers.next().await.unwrap();
5676 let lsp_document_version = fake_server
5677 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5678 .await
5679 .text_document
5680 .version;
5681
5682 // Simulate editing the buffer after the language server computes some edits.
5683 buffer.update(cx, |buffer, cx| {
5684 buffer.edit(
5685 [Point::new(0, 0)..Point::new(0, 0)],
5686 "// above first function\n",
5687 cx,
5688 );
5689 buffer.edit(
5690 [Point::new(2, 0)..Point::new(2, 0)],
5691 " // inside first function\n",
5692 cx,
5693 );
5694 buffer.edit(
5695 [Point::new(6, 4)..Point::new(6, 4)],
5696 "// inside second function ",
5697 cx,
5698 );
5699
5700 assert_eq!(
5701 buffer.text(),
5702 "
5703 // above first function
5704 fn a() {
5705 // inside first function
5706 f1();
5707 }
5708 fn b() {
5709 // inside second function f2();
5710 }
5711 fn c() {
5712 f3();
5713 }
5714 "
5715 .unindent()
5716 );
5717 });
5718
5719 let edits = project
5720 .update(cx, |project, cx| {
5721 project.edits_from_lsp(
5722 &buffer,
5723 vec![
5724 // replace body of first function
5725 lsp::TextEdit {
5726 range: lsp::Range::new(
5727 lsp::Position::new(0, 0),
5728 lsp::Position::new(3, 0),
5729 ),
5730 new_text: "
5731 fn a() {
5732 f10();
5733 }
5734 "
5735 .unindent(),
5736 },
5737 // edit inside second function
5738 lsp::TextEdit {
5739 range: lsp::Range::new(
5740 lsp::Position::new(4, 6),
5741 lsp::Position::new(4, 6),
5742 ),
5743 new_text: "00".into(),
5744 },
5745 // edit inside third function via two distinct edits
5746 lsp::TextEdit {
5747 range: lsp::Range::new(
5748 lsp::Position::new(7, 5),
5749 lsp::Position::new(7, 5),
5750 ),
5751 new_text: "4000".into(),
5752 },
5753 lsp::TextEdit {
5754 range: lsp::Range::new(
5755 lsp::Position::new(7, 5),
5756 lsp::Position::new(7, 6),
5757 ),
5758 new_text: "".into(),
5759 },
5760 ],
5761 Some(lsp_document_version),
5762 cx,
5763 )
5764 })
5765 .await
5766 .unwrap();
5767
5768 buffer.update(cx, |buffer, cx| {
5769 for (range, new_text) in edits {
5770 buffer.edit([range], new_text, cx);
5771 }
5772 assert_eq!(
5773 buffer.text(),
5774 "
5775 // above first function
5776 fn a() {
5777 // inside first function
5778 f10();
5779 }
5780 fn b() {
5781 // inside second function f200();
5782 }
5783 fn c() {
5784 f4000();
5785 }
5786 "
5787 .unindent()
5788 );
5789 });
5790 }
5791
5792 #[gpui::test]
5793 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5794 cx.foreground().forbid_parking();
5795
5796 let text = "
5797 use a::b;
5798 use a::c;
5799
5800 fn f() {
5801 b();
5802 c();
5803 }
5804 "
5805 .unindent();
5806
5807 let fs = FakeFs::new(cx.background());
5808 fs.insert_tree(
5809 "/dir",
5810 json!({
5811 "a.rs": text.clone(),
5812 }),
5813 )
5814 .await;
5815
5816 let project = Project::test(fs, cx);
5817 let worktree_id = project
5818 .update(cx, |project, cx| {
5819 project.find_or_create_local_worktree("/dir", true, cx)
5820 })
5821 .await
5822 .unwrap()
5823 .0
5824 .read_with(cx, |tree, _| tree.id());
5825
5826 let buffer = project
5827 .update(cx, |project, cx| {
5828 project.open_buffer((worktree_id, "a.rs"), cx)
5829 })
5830 .await
5831 .unwrap();
5832
5833 // Simulate the language server sending us a small edit in the form of a very large diff.
5834 // Rust-analyzer does this when performing a merge-imports code action.
5835 let edits = project
5836 .update(cx, |project, cx| {
5837 project.edits_from_lsp(
5838 &buffer,
5839 [
5840 // Replace the first use statement without editing the semicolon.
5841 lsp::TextEdit {
5842 range: lsp::Range::new(
5843 lsp::Position::new(0, 4),
5844 lsp::Position::new(0, 8),
5845 ),
5846 new_text: "a::{b, c}".into(),
5847 },
5848 // Reinsert the remainder of the file between the semicolon and the final
5849 // newline of the file.
5850 lsp::TextEdit {
5851 range: lsp::Range::new(
5852 lsp::Position::new(0, 9),
5853 lsp::Position::new(0, 9),
5854 ),
5855 new_text: "\n\n".into(),
5856 },
5857 lsp::TextEdit {
5858 range: lsp::Range::new(
5859 lsp::Position::new(0, 9),
5860 lsp::Position::new(0, 9),
5861 ),
5862 new_text: "
5863 fn f() {
5864 b();
5865 c();
5866 }"
5867 .unindent(),
5868 },
5869 // Delete everything after the first newline of the file.
5870 lsp::TextEdit {
5871 range: lsp::Range::new(
5872 lsp::Position::new(1, 0),
5873 lsp::Position::new(7, 0),
5874 ),
5875 new_text: "".into(),
5876 },
5877 ],
5878 None,
5879 cx,
5880 )
5881 })
5882 .await
5883 .unwrap();
5884
5885 buffer.update(cx, |buffer, cx| {
5886 let edits = edits
5887 .into_iter()
5888 .map(|(range, text)| {
5889 (
5890 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5891 text,
5892 )
5893 })
5894 .collect::<Vec<_>>();
5895
5896 assert_eq!(
5897 edits,
5898 [
5899 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5900 (Point::new(1, 0)..Point::new(2, 0), "".into())
5901 ]
5902 );
5903
5904 for (range, new_text) in edits {
5905 buffer.edit([range], new_text, cx);
5906 }
5907 assert_eq!(
5908 buffer.text(),
5909 "
5910 use a::{b, c};
5911
5912 fn f() {
5913 b();
5914 c();
5915 }
5916 "
5917 .unindent()
5918 );
5919 });
5920 }
5921
5922 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5923 buffer: &Buffer,
5924 range: Range<T>,
5925 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5926 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5927 for chunk in buffer.snapshot().chunks(range, true) {
5928 if chunks.last().map_or(false, |prev_chunk| {
5929 prev_chunk.1 == chunk.diagnostic_severity
5930 }) {
5931 chunks.last_mut().unwrap().0.push_str(chunk.text);
5932 } else {
5933 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
5934 }
5935 }
5936 chunks
5937 }
5938
5939 #[gpui::test]
5940 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5941 let dir = temp_tree(json!({
5942 "root": {
5943 "dir1": {},
5944 "dir2": {
5945 "dir3": {}
5946 }
5947 }
5948 }));
5949
5950 let project = Project::test(Arc::new(RealFs), cx);
5951 let (tree, _) = project
5952 .update(cx, |project, cx| {
5953 project.find_or_create_local_worktree(&dir.path(), true, cx)
5954 })
5955 .await
5956 .unwrap();
5957
5958 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5959 .await;
5960
5961 let cancel_flag = Default::default();
5962 let results = project
5963 .read_with(cx, |project, cx| {
5964 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5965 })
5966 .await;
5967
5968 assert!(results.is_empty());
5969 }
5970
5971 #[gpui::test]
5972 async fn test_definition(cx: &mut gpui::TestAppContext) {
5973 let mut language = Language::new(
5974 LanguageConfig {
5975 name: "Rust".into(),
5976 path_suffixes: vec!["rs".to_string()],
5977 ..Default::default()
5978 },
5979 Some(tree_sitter_rust::language()),
5980 );
5981 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
5982
5983 let fs = FakeFs::new(cx.background());
5984 fs.insert_tree(
5985 "/dir",
5986 json!({
5987 "a.rs": "const fn a() { A }",
5988 "b.rs": "const y: i32 = crate::a()",
5989 }),
5990 )
5991 .await;
5992
5993 let project = Project::test(fs, cx);
5994 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5995
5996 let (tree, _) = project
5997 .update(cx, |project, cx| {
5998 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5999 })
6000 .await
6001 .unwrap();
6002 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6003 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6004 .await;
6005
6006 let buffer = project
6007 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6008 .await
6009 .unwrap();
6010
6011 let fake_server = fake_servers.next().await.unwrap();
6012 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6013 let params = params.text_document_position_params;
6014 assert_eq!(
6015 params.text_document.uri.to_file_path().unwrap(),
6016 Path::new("/dir/b.rs"),
6017 );
6018 assert_eq!(params.position, lsp::Position::new(0, 22));
6019
6020 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6021 lsp::Location::new(
6022 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6023 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6024 ),
6025 )))
6026 });
6027
6028 let mut definitions = project
6029 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6030 .await
6031 .unwrap();
6032
6033 assert_eq!(definitions.len(), 1);
6034 let definition = definitions.pop().unwrap();
6035 cx.update(|cx| {
6036 let target_buffer = definition.buffer.read(cx);
6037 assert_eq!(
6038 target_buffer
6039 .file()
6040 .unwrap()
6041 .as_local()
6042 .unwrap()
6043 .abs_path(cx),
6044 Path::new("/dir/a.rs"),
6045 );
6046 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6047 assert_eq!(
6048 list_worktrees(&project, cx),
6049 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6050 );
6051
6052 drop(definition);
6053 });
6054 cx.read(|cx| {
6055 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6056 });
6057
6058 fn list_worktrees<'a>(
6059 project: &'a ModelHandle<Project>,
6060 cx: &'a AppContext,
6061 ) -> Vec<(&'a Path, bool)> {
6062 project
6063 .read(cx)
6064 .worktrees(cx)
6065 .map(|worktree| {
6066 let worktree = worktree.read(cx);
6067 (
6068 worktree.as_local().unwrap().abs_path().as_ref(),
6069 worktree.is_visible(),
6070 )
6071 })
6072 .collect::<Vec<_>>()
6073 }
6074 }
6075
6076 #[gpui::test]
6077 async fn test_apply_code_action(cx: &mut gpui::TestAppContext) {
6078 let mut language = Language::new(
6079 LanguageConfig {
6080 name: "TypeScript".into(),
6081 path_suffixes: vec!["ts".to_string()],
6082 ..Default::default()
6083 },
6084 None,
6085 );
6086 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6087
6088 let fs = FakeFs::new(cx.background());
6089 fs.insert_tree(
6090 "/dir",
6091 json!({
6092 "a.ts": "",
6093 }),
6094 )
6095 .await;
6096
6097 let project = Project::test(fs, cx);
6098 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6099
6100 let (tree, _) = project
6101 .update(cx, |project, cx| {
6102 project.find_or_create_local_worktree("/dir", true, cx)
6103 })
6104 .await
6105 .unwrap();
6106 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6107 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6108 .await;
6109
6110 let buffer = project
6111 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6112 .await
6113 .unwrap();
6114
6115 let fake_server = fake_language_servers.next().await.unwrap();
6116
6117 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6118 fake_server
6119 .handle_request::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
6120 Ok(Some(vec![
6121 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6122 title: "The code action".into(),
6123 command: Some(lsp::Command {
6124 title: "The command".into(),
6125 command: "_the/command".into(),
6126 arguments: Some(vec![json!("the-argument")]),
6127 }),
6128 ..Default::default()
6129 }),
6130 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6131 title: "two".into(),
6132 ..Default::default()
6133 }),
6134 ]))
6135 })
6136 .next()
6137 .await;
6138
6139 let action = actions.await.unwrap()[0].clone();
6140 let apply = project.update(cx, |project, cx| {
6141 project.apply_code_action(buffer.clone(), action, true, cx)
6142 });
6143 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6144 |action, _| async move { Ok(action) },
6145 );
6146 fake_server
6147 .handle_request::<lsp::request::ExecuteCommand, _, _>(move |params, cx| async move {
6148 // fake_server.send();
6149 Ok(Some(json!(null)))
6150 })
6151 .next()
6152 .await;
6153 }
6154
6155 #[gpui::test]
6156 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6157 let fs = FakeFs::new(cx.background());
6158 fs.insert_tree(
6159 "/dir",
6160 json!({
6161 "file1": "the old contents",
6162 }),
6163 )
6164 .await;
6165
6166 let project = Project::test(fs.clone(), cx);
6167 let worktree_id = project
6168 .update(cx, |p, cx| {
6169 p.find_or_create_local_worktree("/dir", true, cx)
6170 })
6171 .await
6172 .unwrap()
6173 .0
6174 .read_with(cx, |tree, _| tree.id());
6175
6176 let buffer = project
6177 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6178 .await
6179 .unwrap();
6180 buffer
6181 .update(cx, |buffer, cx| {
6182 assert_eq!(buffer.text(), "the old contents");
6183 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6184 buffer.save(cx)
6185 })
6186 .await
6187 .unwrap();
6188
6189 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6190 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6191 }
6192
6193 #[gpui::test]
6194 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6195 let fs = FakeFs::new(cx.background());
6196 fs.insert_tree(
6197 "/dir",
6198 json!({
6199 "file1": "the old contents",
6200 }),
6201 )
6202 .await;
6203
6204 let project = Project::test(fs.clone(), cx);
6205 let worktree_id = project
6206 .update(cx, |p, cx| {
6207 p.find_or_create_local_worktree("/dir/file1", true, cx)
6208 })
6209 .await
6210 .unwrap()
6211 .0
6212 .read_with(cx, |tree, _| tree.id());
6213
6214 let buffer = project
6215 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6216 .await
6217 .unwrap();
6218 buffer
6219 .update(cx, |buffer, cx| {
6220 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6221 buffer.save(cx)
6222 })
6223 .await
6224 .unwrap();
6225
6226 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6227 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6228 }
6229
6230 #[gpui::test]
6231 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6232 let fs = FakeFs::new(cx.background());
6233 fs.insert_tree("/dir", json!({})).await;
6234
6235 let project = Project::test(fs.clone(), cx);
6236 let (worktree, _) = project
6237 .update(cx, |project, cx| {
6238 project.find_or_create_local_worktree("/dir", true, cx)
6239 })
6240 .await
6241 .unwrap();
6242 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6243
6244 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
6245 buffer.update(cx, |buffer, cx| {
6246 buffer.edit([0..0], "abc", cx);
6247 assert!(buffer.is_dirty());
6248 assert!(!buffer.has_conflict());
6249 });
6250 project
6251 .update(cx, |project, cx| {
6252 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6253 })
6254 .await
6255 .unwrap();
6256 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6257 buffer.read_with(cx, |buffer, cx| {
6258 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6259 assert!(!buffer.is_dirty());
6260 assert!(!buffer.has_conflict());
6261 });
6262
6263 let opened_buffer = project
6264 .update(cx, |project, cx| {
6265 project.open_buffer((worktree_id, "file1"), cx)
6266 })
6267 .await
6268 .unwrap();
6269 assert_eq!(opened_buffer, buffer);
6270 }
6271
6272 #[gpui::test(retries = 5)]
6273 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6274 let dir = temp_tree(json!({
6275 "a": {
6276 "file1": "",
6277 "file2": "",
6278 "file3": "",
6279 },
6280 "b": {
6281 "c": {
6282 "file4": "",
6283 "file5": "",
6284 }
6285 }
6286 }));
6287
6288 let project = Project::test(Arc::new(RealFs), cx);
6289 let rpc = project.read_with(cx, |p, _| p.client.clone());
6290
6291 let (tree, _) = project
6292 .update(cx, |p, cx| {
6293 p.find_or_create_local_worktree(dir.path(), true, cx)
6294 })
6295 .await
6296 .unwrap();
6297 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6298
6299 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6300 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6301 async move { buffer.await.unwrap() }
6302 };
6303 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6304 tree.read_with(cx, |tree, _| {
6305 tree.entry_for_path(path)
6306 .expect(&format!("no entry for path {}", path))
6307 .id
6308 })
6309 };
6310
6311 let buffer2 = buffer_for_path("a/file2", cx).await;
6312 let buffer3 = buffer_for_path("a/file3", cx).await;
6313 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6314 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6315
6316 let file2_id = id_for_path("a/file2", &cx);
6317 let file3_id = id_for_path("a/file3", &cx);
6318 let file4_id = id_for_path("b/c/file4", &cx);
6319
6320 // Wait for the initial scan.
6321 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6322 .await;
6323
6324 // Create a remote copy of this worktree.
6325 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6326 let (remote, load_task) = cx.update(|cx| {
6327 Worktree::remote(
6328 1,
6329 1,
6330 initial_snapshot.to_proto(&Default::default(), true),
6331 rpc.clone(),
6332 cx,
6333 )
6334 });
6335 load_task.await;
6336
6337 cx.read(|cx| {
6338 assert!(!buffer2.read(cx).is_dirty());
6339 assert!(!buffer3.read(cx).is_dirty());
6340 assert!(!buffer4.read(cx).is_dirty());
6341 assert!(!buffer5.read(cx).is_dirty());
6342 });
6343
6344 // Rename and delete files and directories.
6345 tree.flush_fs_events(&cx).await;
6346 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6347 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6348 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6349 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6350 tree.flush_fs_events(&cx).await;
6351
6352 let expected_paths = vec![
6353 "a",
6354 "a/file1",
6355 "a/file2.new",
6356 "b",
6357 "d",
6358 "d/file3",
6359 "d/file4",
6360 ];
6361
6362 cx.read(|app| {
6363 assert_eq!(
6364 tree.read(app)
6365 .paths()
6366 .map(|p| p.to_str().unwrap())
6367 .collect::<Vec<_>>(),
6368 expected_paths
6369 );
6370
6371 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6372 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6373 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6374
6375 assert_eq!(
6376 buffer2.read(app).file().unwrap().path().as_ref(),
6377 Path::new("a/file2.new")
6378 );
6379 assert_eq!(
6380 buffer3.read(app).file().unwrap().path().as_ref(),
6381 Path::new("d/file3")
6382 );
6383 assert_eq!(
6384 buffer4.read(app).file().unwrap().path().as_ref(),
6385 Path::new("d/file4")
6386 );
6387 assert_eq!(
6388 buffer5.read(app).file().unwrap().path().as_ref(),
6389 Path::new("b/c/file5")
6390 );
6391
6392 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6393 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6394 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6395 assert!(buffer5.read(app).file().unwrap().is_deleted());
6396 });
6397
6398 // Update the remote worktree. Check that it becomes consistent with the
6399 // local worktree.
6400 remote.update(cx, |remote, cx| {
6401 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6402 &initial_snapshot,
6403 1,
6404 1,
6405 true,
6406 );
6407 remote
6408 .as_remote_mut()
6409 .unwrap()
6410 .snapshot
6411 .apply_remote_update(update_message)
6412 .unwrap();
6413
6414 assert_eq!(
6415 remote
6416 .paths()
6417 .map(|p| p.to_str().unwrap())
6418 .collect::<Vec<_>>(),
6419 expected_paths
6420 );
6421 });
6422 }
6423
6424 #[gpui::test]
6425 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6426 let fs = FakeFs::new(cx.background());
6427 fs.insert_tree(
6428 "/the-dir",
6429 json!({
6430 "a.txt": "a-contents",
6431 "b.txt": "b-contents",
6432 }),
6433 )
6434 .await;
6435
6436 let project = Project::test(fs.clone(), cx);
6437 let worktree_id = project
6438 .update(cx, |p, cx| {
6439 p.find_or_create_local_worktree("/the-dir", true, cx)
6440 })
6441 .await
6442 .unwrap()
6443 .0
6444 .read_with(cx, |tree, _| tree.id());
6445
6446 // Spawn multiple tasks to open paths, repeating some paths.
6447 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6448 (
6449 p.open_buffer((worktree_id, "a.txt"), cx),
6450 p.open_buffer((worktree_id, "b.txt"), cx),
6451 p.open_buffer((worktree_id, "a.txt"), cx),
6452 )
6453 });
6454
6455 let buffer_a_1 = buffer_a_1.await.unwrap();
6456 let buffer_a_2 = buffer_a_2.await.unwrap();
6457 let buffer_b = buffer_b.await.unwrap();
6458 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6459 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6460
6461 // There is only one buffer per path.
6462 let buffer_a_id = buffer_a_1.id();
6463 assert_eq!(buffer_a_2.id(), buffer_a_id);
6464
6465 // Open the same path again while it is still open.
6466 drop(buffer_a_1);
6467 let buffer_a_3 = project
6468 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6469 .await
6470 .unwrap();
6471
6472 // There's still only one buffer per path.
6473 assert_eq!(buffer_a_3.id(), buffer_a_id);
6474 }
6475
6476 #[gpui::test]
6477 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6478 use std::fs;
6479
6480 let dir = temp_tree(json!({
6481 "file1": "abc",
6482 "file2": "def",
6483 "file3": "ghi",
6484 }));
6485
6486 let project = Project::test(Arc::new(RealFs), cx);
6487 let (worktree, _) = project
6488 .update(cx, |p, cx| {
6489 p.find_or_create_local_worktree(dir.path(), true, cx)
6490 })
6491 .await
6492 .unwrap();
6493 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6494
6495 worktree.flush_fs_events(&cx).await;
6496 worktree
6497 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6498 .await;
6499
6500 let buffer1 = project
6501 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6502 .await
6503 .unwrap();
6504 let events = Rc::new(RefCell::new(Vec::new()));
6505
6506 // initially, the buffer isn't dirty.
6507 buffer1.update(cx, |buffer, cx| {
6508 cx.subscribe(&buffer1, {
6509 let events = events.clone();
6510 move |_, _, event, _| match event {
6511 BufferEvent::Operation(_) => {}
6512 _ => events.borrow_mut().push(event.clone()),
6513 }
6514 })
6515 .detach();
6516
6517 assert!(!buffer.is_dirty());
6518 assert!(events.borrow().is_empty());
6519
6520 buffer.edit(vec![1..2], "", cx);
6521 });
6522
6523 // after the first edit, the buffer is dirty, and emits a dirtied event.
6524 buffer1.update(cx, |buffer, cx| {
6525 assert!(buffer.text() == "ac");
6526 assert!(buffer.is_dirty());
6527 assert_eq!(
6528 *events.borrow(),
6529 &[language::Event::Edited, language::Event::Dirtied]
6530 );
6531 events.borrow_mut().clear();
6532 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6533 });
6534
6535 // after saving, the buffer is not dirty, and emits a saved event.
6536 buffer1.update(cx, |buffer, cx| {
6537 assert!(!buffer.is_dirty());
6538 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6539 events.borrow_mut().clear();
6540
6541 buffer.edit(vec![1..1], "B", cx);
6542 buffer.edit(vec![2..2], "D", cx);
6543 });
6544
6545 // after editing again, the buffer is dirty, and emits another dirty event.
6546 buffer1.update(cx, |buffer, cx| {
6547 assert!(buffer.text() == "aBDc");
6548 assert!(buffer.is_dirty());
6549 assert_eq!(
6550 *events.borrow(),
6551 &[
6552 language::Event::Edited,
6553 language::Event::Dirtied,
6554 language::Event::Edited,
6555 ],
6556 );
6557 events.borrow_mut().clear();
6558
6559 // TODO - currently, after restoring the buffer to its
6560 // previously-saved state, the is still considered dirty.
6561 buffer.edit([1..3], "", cx);
6562 assert!(buffer.text() == "ac");
6563 assert!(buffer.is_dirty());
6564 });
6565
6566 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6567
6568 // When a file is deleted, the buffer is considered dirty.
6569 let events = Rc::new(RefCell::new(Vec::new()));
6570 let buffer2 = project
6571 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
6572 .await
6573 .unwrap();
6574 buffer2.update(cx, |_, cx| {
6575 cx.subscribe(&buffer2, {
6576 let events = events.clone();
6577 move |_, _, event, _| events.borrow_mut().push(event.clone())
6578 })
6579 .detach();
6580 });
6581
6582 fs::remove_file(dir.path().join("file2")).unwrap();
6583 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6584 assert_eq!(
6585 *events.borrow(),
6586 &[language::Event::Dirtied, language::Event::FileHandleChanged]
6587 );
6588
6589 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6590 let events = Rc::new(RefCell::new(Vec::new()));
6591 let buffer3 = project
6592 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
6593 .await
6594 .unwrap();
6595 buffer3.update(cx, |_, cx| {
6596 cx.subscribe(&buffer3, {
6597 let events = events.clone();
6598 move |_, _, event, _| events.borrow_mut().push(event.clone())
6599 })
6600 .detach();
6601 });
6602
6603 worktree.flush_fs_events(&cx).await;
6604 buffer3.update(cx, |buffer, cx| {
6605 buffer.edit(Some(0..0), "x", cx);
6606 });
6607 events.borrow_mut().clear();
6608 fs::remove_file(dir.path().join("file3")).unwrap();
6609 buffer3
6610 .condition(&cx, |_, _| !events.borrow().is_empty())
6611 .await;
6612 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
6613 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
6614 }
6615
6616 #[gpui::test]
6617 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6618 use std::fs;
6619
6620 let initial_contents = "aaa\nbbbbb\nc\n";
6621 let dir = temp_tree(json!({ "the-file": initial_contents }));
6622
6623 let project = Project::test(Arc::new(RealFs), cx);
6624 let (worktree, _) = project
6625 .update(cx, |p, cx| {
6626 p.find_or_create_local_worktree(dir.path(), true, cx)
6627 })
6628 .await
6629 .unwrap();
6630 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6631
6632 worktree
6633 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6634 .await;
6635
6636 let abs_path = dir.path().join("the-file");
6637 let buffer = project
6638 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
6639 .await
6640 .unwrap();
6641
6642 // TODO
6643 // Add a cursor on each row.
6644 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
6645 // assert!(!buffer.is_dirty());
6646 // buffer.add_selection_set(
6647 // &(0..3)
6648 // .map(|row| Selection {
6649 // id: row as usize,
6650 // start: Point::new(row, 1),
6651 // end: Point::new(row, 1),
6652 // reversed: false,
6653 // goal: SelectionGoal::None,
6654 // })
6655 // .collect::<Vec<_>>(),
6656 // cx,
6657 // )
6658 // });
6659
6660 // Change the file on disk, adding two new lines of text, and removing
6661 // one line.
6662 buffer.read_with(cx, |buffer, _| {
6663 assert!(!buffer.is_dirty());
6664 assert!(!buffer.has_conflict());
6665 });
6666 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
6667 fs::write(&abs_path, new_contents).unwrap();
6668
6669 // Because the buffer was not modified, it is reloaded from disk. Its
6670 // contents are edited according to the diff between the old and new
6671 // file contents.
6672 buffer
6673 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6674 .await;
6675
6676 buffer.update(cx, |buffer, _| {
6677 assert_eq!(buffer.text(), new_contents);
6678 assert!(!buffer.is_dirty());
6679 assert!(!buffer.has_conflict());
6680
6681 // TODO
6682 // let cursor_positions = buffer
6683 // .selection_set(selection_set_id)
6684 // .unwrap()
6685 // .selections::<Point>(&*buffer)
6686 // .map(|selection| {
6687 // assert_eq!(selection.start, selection.end);
6688 // selection.start
6689 // })
6690 // .collect::<Vec<_>>();
6691 // assert_eq!(
6692 // cursor_positions,
6693 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6694 // );
6695 });
6696
6697 // Modify the buffer
6698 buffer.update(cx, |buffer, cx| {
6699 buffer.edit(vec![0..0], " ", cx);
6700 assert!(buffer.is_dirty());
6701 assert!(!buffer.has_conflict());
6702 });
6703
6704 // Change the file on disk again, adding blank lines to the beginning.
6705 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6706
6707 // Because the buffer is modified, it doesn't reload from disk, but is
6708 // marked as having a conflict.
6709 buffer
6710 .condition(&cx, |buffer, _| buffer.has_conflict())
6711 .await;
6712 }
6713
6714 #[gpui::test]
6715 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6716 cx.foreground().forbid_parking();
6717
6718 let fs = FakeFs::new(cx.background());
6719 fs.insert_tree(
6720 "/the-dir",
6721 json!({
6722 "a.rs": "
6723 fn foo(mut v: Vec<usize>) {
6724 for x in &v {
6725 v.push(1);
6726 }
6727 }
6728 "
6729 .unindent(),
6730 }),
6731 )
6732 .await;
6733
6734 let project = Project::test(fs.clone(), cx);
6735 let (worktree, _) = project
6736 .update(cx, |p, cx| {
6737 p.find_or_create_local_worktree("/the-dir", true, cx)
6738 })
6739 .await
6740 .unwrap();
6741 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6742
6743 let buffer = project
6744 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6745 .await
6746 .unwrap();
6747
6748 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6749 let message = lsp::PublishDiagnosticsParams {
6750 uri: buffer_uri.clone(),
6751 diagnostics: vec![
6752 lsp::Diagnostic {
6753 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6754 severity: Some(DiagnosticSeverity::WARNING),
6755 message: "error 1".to_string(),
6756 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6757 location: lsp::Location {
6758 uri: buffer_uri.clone(),
6759 range: lsp::Range::new(
6760 lsp::Position::new(1, 8),
6761 lsp::Position::new(1, 9),
6762 ),
6763 },
6764 message: "error 1 hint 1".to_string(),
6765 }]),
6766 ..Default::default()
6767 },
6768 lsp::Diagnostic {
6769 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6770 severity: Some(DiagnosticSeverity::HINT),
6771 message: "error 1 hint 1".to_string(),
6772 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6773 location: lsp::Location {
6774 uri: buffer_uri.clone(),
6775 range: lsp::Range::new(
6776 lsp::Position::new(1, 8),
6777 lsp::Position::new(1, 9),
6778 ),
6779 },
6780 message: "original diagnostic".to_string(),
6781 }]),
6782 ..Default::default()
6783 },
6784 lsp::Diagnostic {
6785 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6786 severity: Some(DiagnosticSeverity::ERROR),
6787 message: "error 2".to_string(),
6788 related_information: Some(vec![
6789 lsp::DiagnosticRelatedInformation {
6790 location: lsp::Location {
6791 uri: buffer_uri.clone(),
6792 range: lsp::Range::new(
6793 lsp::Position::new(1, 13),
6794 lsp::Position::new(1, 15),
6795 ),
6796 },
6797 message: "error 2 hint 1".to_string(),
6798 },
6799 lsp::DiagnosticRelatedInformation {
6800 location: lsp::Location {
6801 uri: buffer_uri.clone(),
6802 range: lsp::Range::new(
6803 lsp::Position::new(1, 13),
6804 lsp::Position::new(1, 15),
6805 ),
6806 },
6807 message: "error 2 hint 2".to_string(),
6808 },
6809 ]),
6810 ..Default::default()
6811 },
6812 lsp::Diagnostic {
6813 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6814 severity: Some(DiagnosticSeverity::HINT),
6815 message: "error 2 hint 1".to_string(),
6816 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6817 location: lsp::Location {
6818 uri: buffer_uri.clone(),
6819 range: lsp::Range::new(
6820 lsp::Position::new(2, 8),
6821 lsp::Position::new(2, 17),
6822 ),
6823 },
6824 message: "original diagnostic".to_string(),
6825 }]),
6826 ..Default::default()
6827 },
6828 lsp::Diagnostic {
6829 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6830 severity: Some(DiagnosticSeverity::HINT),
6831 message: "error 2 hint 2".to_string(),
6832 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6833 location: lsp::Location {
6834 uri: buffer_uri.clone(),
6835 range: lsp::Range::new(
6836 lsp::Position::new(2, 8),
6837 lsp::Position::new(2, 17),
6838 ),
6839 },
6840 message: "original diagnostic".to_string(),
6841 }]),
6842 ..Default::default()
6843 },
6844 ],
6845 version: None,
6846 };
6847
6848 project
6849 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
6850 .unwrap();
6851 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6852
6853 assert_eq!(
6854 buffer
6855 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6856 .collect::<Vec<_>>(),
6857 &[
6858 DiagnosticEntry {
6859 range: Point::new(1, 8)..Point::new(1, 9),
6860 diagnostic: Diagnostic {
6861 severity: DiagnosticSeverity::WARNING,
6862 message: "error 1".to_string(),
6863 group_id: 0,
6864 is_primary: true,
6865 ..Default::default()
6866 }
6867 },
6868 DiagnosticEntry {
6869 range: Point::new(1, 8)..Point::new(1, 9),
6870 diagnostic: Diagnostic {
6871 severity: DiagnosticSeverity::HINT,
6872 message: "error 1 hint 1".to_string(),
6873 group_id: 0,
6874 is_primary: false,
6875 ..Default::default()
6876 }
6877 },
6878 DiagnosticEntry {
6879 range: Point::new(1, 13)..Point::new(1, 15),
6880 diagnostic: Diagnostic {
6881 severity: DiagnosticSeverity::HINT,
6882 message: "error 2 hint 1".to_string(),
6883 group_id: 1,
6884 is_primary: false,
6885 ..Default::default()
6886 }
6887 },
6888 DiagnosticEntry {
6889 range: Point::new(1, 13)..Point::new(1, 15),
6890 diagnostic: Diagnostic {
6891 severity: DiagnosticSeverity::HINT,
6892 message: "error 2 hint 2".to_string(),
6893 group_id: 1,
6894 is_primary: false,
6895 ..Default::default()
6896 }
6897 },
6898 DiagnosticEntry {
6899 range: Point::new(2, 8)..Point::new(2, 17),
6900 diagnostic: Diagnostic {
6901 severity: DiagnosticSeverity::ERROR,
6902 message: "error 2".to_string(),
6903 group_id: 1,
6904 is_primary: true,
6905 ..Default::default()
6906 }
6907 }
6908 ]
6909 );
6910
6911 assert_eq!(
6912 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6913 &[
6914 DiagnosticEntry {
6915 range: Point::new(1, 8)..Point::new(1, 9),
6916 diagnostic: Diagnostic {
6917 severity: DiagnosticSeverity::WARNING,
6918 message: "error 1".to_string(),
6919 group_id: 0,
6920 is_primary: true,
6921 ..Default::default()
6922 }
6923 },
6924 DiagnosticEntry {
6925 range: Point::new(1, 8)..Point::new(1, 9),
6926 diagnostic: Diagnostic {
6927 severity: DiagnosticSeverity::HINT,
6928 message: "error 1 hint 1".to_string(),
6929 group_id: 0,
6930 is_primary: false,
6931 ..Default::default()
6932 }
6933 },
6934 ]
6935 );
6936 assert_eq!(
6937 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6938 &[
6939 DiagnosticEntry {
6940 range: Point::new(1, 13)..Point::new(1, 15),
6941 diagnostic: Diagnostic {
6942 severity: DiagnosticSeverity::HINT,
6943 message: "error 2 hint 1".to_string(),
6944 group_id: 1,
6945 is_primary: false,
6946 ..Default::default()
6947 }
6948 },
6949 DiagnosticEntry {
6950 range: Point::new(1, 13)..Point::new(1, 15),
6951 diagnostic: Diagnostic {
6952 severity: DiagnosticSeverity::HINT,
6953 message: "error 2 hint 2".to_string(),
6954 group_id: 1,
6955 is_primary: false,
6956 ..Default::default()
6957 }
6958 },
6959 DiagnosticEntry {
6960 range: Point::new(2, 8)..Point::new(2, 17),
6961 diagnostic: Diagnostic {
6962 severity: DiagnosticSeverity::ERROR,
6963 message: "error 2".to_string(),
6964 group_id: 1,
6965 is_primary: true,
6966 ..Default::default()
6967 }
6968 }
6969 ]
6970 );
6971 }
6972
6973 #[gpui::test]
6974 async fn test_rename(cx: &mut gpui::TestAppContext) {
6975 cx.foreground().forbid_parking();
6976
6977 let mut language = Language::new(
6978 LanguageConfig {
6979 name: "Rust".into(),
6980 path_suffixes: vec!["rs".to_string()],
6981 ..Default::default()
6982 },
6983 Some(tree_sitter_rust::language()),
6984 );
6985 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6986
6987 let fs = FakeFs::new(cx.background());
6988 fs.insert_tree(
6989 "/dir",
6990 json!({
6991 "one.rs": "const ONE: usize = 1;",
6992 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6993 }),
6994 )
6995 .await;
6996
6997 let project = Project::test(fs.clone(), cx);
6998 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6999
7000 let (tree, _) = project
7001 .update(cx, |project, cx| {
7002 project.find_or_create_local_worktree("/dir", true, cx)
7003 })
7004 .await
7005 .unwrap();
7006 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7007 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7008 .await;
7009
7010 let buffer = project
7011 .update(cx, |project, cx| {
7012 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7013 })
7014 .await
7015 .unwrap();
7016
7017 let fake_server = fake_servers.next().await.unwrap();
7018
7019 let response = project.update(cx, |project, cx| {
7020 project.prepare_rename(buffer.clone(), 7, cx)
7021 });
7022 fake_server
7023 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7024 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7025 assert_eq!(params.position, lsp::Position::new(0, 7));
7026 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7027 lsp::Position::new(0, 6),
7028 lsp::Position::new(0, 9),
7029 ))))
7030 })
7031 .next()
7032 .await
7033 .unwrap();
7034 let range = response.await.unwrap().unwrap();
7035 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7036 assert_eq!(range, 6..9);
7037
7038 let response = project.update(cx, |project, cx| {
7039 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7040 });
7041 fake_server
7042 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7043 assert_eq!(
7044 params.text_document_position.text_document.uri.as_str(),
7045 "file:///dir/one.rs"
7046 );
7047 assert_eq!(
7048 params.text_document_position.position,
7049 lsp::Position::new(0, 7)
7050 );
7051 assert_eq!(params.new_name, "THREE");
7052 Ok(Some(lsp::WorkspaceEdit {
7053 changes: Some(
7054 [
7055 (
7056 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7057 vec![lsp::TextEdit::new(
7058 lsp::Range::new(
7059 lsp::Position::new(0, 6),
7060 lsp::Position::new(0, 9),
7061 ),
7062 "THREE".to_string(),
7063 )],
7064 ),
7065 (
7066 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7067 vec![
7068 lsp::TextEdit::new(
7069 lsp::Range::new(
7070 lsp::Position::new(0, 24),
7071 lsp::Position::new(0, 27),
7072 ),
7073 "THREE".to_string(),
7074 ),
7075 lsp::TextEdit::new(
7076 lsp::Range::new(
7077 lsp::Position::new(0, 35),
7078 lsp::Position::new(0, 38),
7079 ),
7080 "THREE".to_string(),
7081 ),
7082 ],
7083 ),
7084 ]
7085 .into_iter()
7086 .collect(),
7087 ),
7088 ..Default::default()
7089 }))
7090 })
7091 .next()
7092 .await
7093 .unwrap();
7094 let mut transaction = response.await.unwrap().0;
7095 assert_eq!(transaction.len(), 2);
7096 assert_eq!(
7097 transaction
7098 .remove_entry(&buffer)
7099 .unwrap()
7100 .0
7101 .read_with(cx, |buffer, _| buffer.text()),
7102 "const THREE: usize = 1;"
7103 );
7104 assert_eq!(
7105 transaction
7106 .into_keys()
7107 .next()
7108 .unwrap()
7109 .read_with(cx, |buffer, _| buffer.text()),
7110 "const TWO: usize = one::THREE + one::THREE;"
7111 );
7112 }
7113
7114 #[gpui::test]
7115 async fn test_search(cx: &mut gpui::TestAppContext) {
7116 let fs = FakeFs::new(cx.background());
7117 fs.insert_tree(
7118 "/dir",
7119 json!({
7120 "one.rs": "const ONE: usize = 1;",
7121 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7122 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7123 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7124 }),
7125 )
7126 .await;
7127 let project = Project::test(fs.clone(), cx);
7128 let (tree, _) = project
7129 .update(cx, |project, cx| {
7130 project.find_or_create_local_worktree("/dir", true, cx)
7131 })
7132 .await
7133 .unwrap();
7134 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7135 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7136 .await;
7137
7138 assert_eq!(
7139 search(&project, SearchQuery::text("TWO", false, true), cx)
7140 .await
7141 .unwrap(),
7142 HashMap::from_iter([
7143 ("two.rs".to_string(), vec![6..9]),
7144 ("three.rs".to_string(), vec![37..40])
7145 ])
7146 );
7147
7148 let buffer_4 = project
7149 .update(cx, |project, cx| {
7150 project.open_buffer((worktree_id, "four.rs"), cx)
7151 })
7152 .await
7153 .unwrap();
7154 buffer_4.update(cx, |buffer, cx| {
7155 buffer.edit([20..28, 31..43], "two::TWO", cx);
7156 });
7157
7158 assert_eq!(
7159 search(&project, SearchQuery::text("TWO", false, true), cx)
7160 .await
7161 .unwrap(),
7162 HashMap::from_iter([
7163 ("two.rs".to_string(), vec![6..9]),
7164 ("three.rs".to_string(), vec![37..40]),
7165 ("four.rs".to_string(), vec![25..28, 36..39])
7166 ])
7167 );
7168
7169 async fn search(
7170 project: &ModelHandle<Project>,
7171 query: SearchQuery,
7172 cx: &mut gpui::TestAppContext,
7173 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7174 let results = project
7175 .update(cx, |project, cx| project.search(query, cx))
7176 .await?;
7177
7178 Ok(results
7179 .into_iter()
7180 .map(|(buffer, ranges)| {
7181 buffer.read_with(cx, |buffer, _| {
7182 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7183 let ranges = ranges
7184 .into_iter()
7185 .map(|range| range.to_offset(buffer))
7186 .collect::<Vec<_>>();
7187 (path, ranges)
7188 })
7189 })
7190 .collect())
7191 }
7192 }
7193}