1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 hash::Hash,
40 mem,
41 ops::Range,
42 path::{Component, Path, PathBuf},
43 rc::Rc,
44 sync::{
45 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
46 Arc,
47 },
48 time::Instant,
49};
50use util::{post_inc, ResultExt, TryFutureExt as _};
51
52pub use fs::*;
53pub use worktree::*;
54
55pub trait Item: Entity {
56 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
57}
58
59pub struct Project {
60 worktrees: Vec<WorktreeHandle>,
61 active_entry: Option<ProjectEntryId>,
62 languages: Arc<LanguageRegistry>,
63 language_servers:
64 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
65 started_language_servers:
66 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
67 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
68 language_server_settings: Arc<Mutex<serde_json::Value>>,
69 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
70 next_language_server_id: usize,
71 client: Arc<client::Client>,
72 next_entry_id: Arc<AtomicUsize>,
73 user_store: ModelHandle<UserStore>,
74 fs: Arc<dyn Fs>,
75 client_state: ProjectClientState,
76 collaborators: HashMap<PeerId, Collaborator>,
77 subscriptions: Vec<client::Subscription>,
78 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
79 shared_buffers: HashMap<PeerId, HashSet<u64>>,
80 loading_buffers: HashMap<
81 ProjectPath,
82 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
83 >,
84 loading_local_worktrees:
85 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
86 opened_buffers: HashMap<u64, OpenBuffer>,
87 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
88 nonce: u128,
89}
90
91enum OpenBuffer {
92 Strong(ModelHandle<Buffer>),
93 Weak(WeakModelHandle<Buffer>),
94 Loading(Vec<Operation>),
95}
96
97enum WorktreeHandle {
98 Strong(ModelHandle<Worktree>),
99 Weak(WeakModelHandle<Worktree>),
100}
101
102enum ProjectClientState {
103 Local {
104 is_shared: bool,
105 remote_id_tx: watch::Sender<Option<u64>>,
106 remote_id_rx: watch::Receiver<Option<u64>>,
107 _maintain_remote_id_task: Task<Option<()>>,
108 },
109 Remote {
110 sharing_has_stopped: bool,
111 remote_id: u64,
112 replica_id: ReplicaId,
113 _detect_unshare_task: Task<Option<()>>,
114 },
115}
116
117#[derive(Clone, Debug)]
118pub struct Collaborator {
119 pub user: Arc<User>,
120 pub peer_id: PeerId,
121 pub replica_id: ReplicaId,
122}
123
124#[derive(Clone, Debug, PartialEq)]
125pub enum Event {
126 ActiveEntryChanged(Option<ProjectEntryId>),
127 WorktreeRemoved(WorktreeId),
128 DiskBasedDiagnosticsStarted,
129 DiskBasedDiagnosticsUpdated,
130 DiskBasedDiagnosticsFinished,
131 DiagnosticsUpdated(ProjectPath),
132 RemoteIdChanged(Option<u64>),
133 CollaboratorLeft(PeerId),
134}
135
136#[derive(Serialize)]
137pub struct LanguageServerStatus {
138 pub name: String,
139 pub pending_work: BTreeMap<String, LanguageServerProgress>,
140 pub pending_diagnostic_updates: isize,
141}
142
143#[derive(Clone, Debug, Serialize)]
144pub struct LanguageServerProgress {
145 pub message: Option<String>,
146 pub percentage: Option<usize>,
147 #[serde(skip_serializing)]
148 pub last_update_at: Instant,
149}
150
151#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
152pub struct ProjectPath {
153 pub worktree_id: WorktreeId,
154 pub path: Arc<Path>,
155}
156
157#[derive(Clone, Debug, Default, PartialEq, Serialize)]
158pub struct DiagnosticSummary {
159 pub error_count: usize,
160 pub warning_count: usize,
161}
162
163#[derive(Debug)]
164pub struct Location {
165 pub buffer: ModelHandle<Buffer>,
166 pub range: Range<language::Anchor>,
167}
168
169#[derive(Debug)]
170pub struct DocumentHighlight {
171 pub range: Range<language::Anchor>,
172 pub kind: DocumentHighlightKind,
173}
174
175#[derive(Clone, Debug)]
176pub struct Symbol {
177 pub source_worktree_id: WorktreeId,
178 pub worktree_id: WorktreeId,
179 pub language_server_name: LanguageServerName,
180 pub path: PathBuf,
181 pub label: CodeLabel,
182 pub name: String,
183 pub kind: lsp::SymbolKind,
184 pub range: Range<PointUtf16>,
185 pub signature: [u8; 32],
186}
187
188#[derive(Default)]
189pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
190
191impl DiagnosticSummary {
192 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
193 let mut this = Self {
194 error_count: 0,
195 warning_count: 0,
196 };
197
198 for entry in diagnostics {
199 if entry.diagnostic.is_primary {
200 match entry.diagnostic.severity {
201 DiagnosticSeverity::ERROR => this.error_count += 1,
202 DiagnosticSeverity::WARNING => this.warning_count += 1,
203 _ => {}
204 }
205 }
206 }
207
208 this
209 }
210
211 pub fn is_empty(&self) -> bool {
212 self.error_count == 0 && self.warning_count == 0
213 }
214
215 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
216 proto::DiagnosticSummary {
217 path: path.to_string_lossy().to_string(),
218 error_count: self.error_count as u32,
219 warning_count: self.warning_count as u32,
220 }
221 }
222}
223
224#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
225pub struct ProjectEntryId(usize);
226
227impl ProjectEntryId {
228 pub fn new(counter: &AtomicUsize) -> Self {
229 Self(counter.fetch_add(1, SeqCst))
230 }
231
232 pub fn from_proto(id: u64) -> Self {
233 Self(id as usize)
234 }
235
236 pub fn to_proto(&self) -> u64 {
237 self.0 as u64
238 }
239
240 pub fn to_usize(&self) -> usize {
241 self.0
242 }
243}
244
245impl Project {
246 pub fn init(client: &Arc<Client>) {
247 client.add_model_message_handler(Self::handle_add_collaborator);
248 client.add_model_message_handler(Self::handle_buffer_reloaded);
249 client.add_model_message_handler(Self::handle_buffer_saved);
250 client.add_model_message_handler(Self::handle_start_language_server);
251 client.add_model_message_handler(Self::handle_update_language_server);
252 client.add_model_message_handler(Self::handle_remove_collaborator);
253 client.add_model_message_handler(Self::handle_register_worktree);
254 client.add_model_message_handler(Self::handle_unregister_worktree);
255 client.add_model_message_handler(Self::handle_unshare_project);
256 client.add_model_message_handler(Self::handle_update_buffer_file);
257 client.add_model_message_handler(Self::handle_update_buffer);
258 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
259 client.add_model_message_handler(Self::handle_update_worktree);
260 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
261 client.add_model_request_handler(Self::handle_apply_code_action);
262 client.add_model_request_handler(Self::handle_reload_buffers);
263 client.add_model_request_handler(Self::handle_format_buffers);
264 client.add_model_request_handler(Self::handle_get_code_actions);
265 client.add_model_request_handler(Self::handle_get_completions);
266 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
267 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
269 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
270 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
271 client.add_model_request_handler(Self::handle_search_project);
272 client.add_model_request_handler(Self::handle_get_project_symbols);
273 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
274 client.add_model_request_handler(Self::handle_open_buffer_by_id);
275 client.add_model_request_handler(Self::handle_open_buffer_by_path);
276 client.add_model_request_handler(Self::handle_save_buffer);
277 }
278
279 pub fn local(
280 client: Arc<Client>,
281 user_store: ModelHandle<UserStore>,
282 languages: Arc<LanguageRegistry>,
283 fs: Arc<dyn Fs>,
284 cx: &mut MutableAppContext,
285 ) -> ModelHandle<Self> {
286 cx.add_model(|cx: &mut ModelContext<Self>| {
287 let (remote_id_tx, remote_id_rx) = watch::channel();
288 let _maintain_remote_id_task = cx.spawn_weak({
289 let rpc = client.clone();
290 move |this, mut cx| {
291 async move {
292 let mut status = rpc.status();
293 while let Some(status) = status.next().await {
294 if let Some(this) = this.upgrade(&cx) {
295 if status.is_connected() {
296 this.update(&mut cx, |this, cx| this.register(cx)).await?;
297 } else {
298 this.update(&mut cx, |this, cx| this.unregister(cx));
299 }
300 }
301 }
302 Ok(())
303 }
304 .log_err()
305 }
306 });
307
308 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
309 Self {
310 worktrees: Default::default(),
311 collaborators: Default::default(),
312 opened_buffers: Default::default(),
313 shared_buffers: Default::default(),
314 loading_buffers: Default::default(),
315 loading_local_worktrees: Default::default(),
316 buffer_snapshots: Default::default(),
317 client_state: ProjectClientState::Local {
318 is_shared: false,
319 remote_id_tx,
320 remote_id_rx,
321 _maintain_remote_id_task,
322 },
323 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
324 subscriptions: Vec::new(),
325 active_entry: None,
326 languages,
327 client,
328 user_store,
329 fs,
330 next_entry_id: Default::default(),
331 language_servers: Default::default(),
332 started_language_servers: Default::default(),
333 language_server_statuses: Default::default(),
334 last_workspace_edits_by_language_server: Default::default(),
335 language_server_settings: Default::default(),
336 next_language_server_id: 0,
337 nonce: StdRng::from_entropy().gen(),
338 }
339 })
340 }
341
342 pub async fn remote(
343 remote_id: u64,
344 client: Arc<Client>,
345 user_store: ModelHandle<UserStore>,
346 languages: Arc<LanguageRegistry>,
347 fs: Arc<dyn Fs>,
348 cx: &mut AsyncAppContext,
349 ) -> Result<ModelHandle<Self>> {
350 client.authenticate_and_connect(true, &cx).await?;
351
352 let response = client
353 .request(proto::JoinProject {
354 project_id: remote_id,
355 })
356 .await?;
357
358 let replica_id = response.replica_id as ReplicaId;
359
360 let mut worktrees = Vec::new();
361 for worktree in response.worktrees {
362 let (worktree, load_task) = cx
363 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
364 worktrees.push(worktree);
365 load_task.detach();
366 }
367
368 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
369 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
370 let mut this = Self {
371 worktrees: Vec::new(),
372 loading_buffers: Default::default(),
373 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
374 shared_buffers: Default::default(),
375 loading_local_worktrees: Default::default(),
376 active_entry: None,
377 collaborators: Default::default(),
378 languages,
379 user_store: user_store.clone(),
380 fs,
381 next_entry_id: Default::default(),
382 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
383 client: client.clone(),
384 client_state: ProjectClientState::Remote {
385 sharing_has_stopped: false,
386 remote_id,
387 replica_id,
388 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
389 async move {
390 let mut status = client.status();
391 let is_connected =
392 status.next().await.map_or(false, |s| s.is_connected());
393 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
394 if !is_connected || status.next().await.is_some() {
395 if let Some(this) = this.upgrade(&cx) {
396 this.update(&mut cx, |this, cx| this.project_unshared(cx))
397 }
398 }
399 Ok(())
400 }
401 .log_err()
402 }),
403 },
404 language_servers: Default::default(),
405 started_language_servers: Default::default(),
406 language_server_settings: Default::default(),
407 language_server_statuses: response
408 .language_servers
409 .into_iter()
410 .map(|server| {
411 (
412 server.id as usize,
413 LanguageServerStatus {
414 name: server.name,
415 pending_work: Default::default(),
416 pending_diagnostic_updates: 0,
417 },
418 )
419 })
420 .collect(),
421 last_workspace_edits_by_language_server: Default::default(),
422 next_language_server_id: 0,
423 opened_buffers: Default::default(),
424 buffer_snapshots: Default::default(),
425 nonce: StdRng::from_entropy().gen(),
426 };
427 for worktree in worktrees {
428 this.add_worktree(&worktree, cx);
429 }
430 this
431 });
432
433 let user_ids = response
434 .collaborators
435 .iter()
436 .map(|peer| peer.user_id)
437 .collect();
438 user_store
439 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
440 .await?;
441 let mut collaborators = HashMap::default();
442 for message in response.collaborators {
443 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
444 collaborators.insert(collaborator.peer_id, collaborator);
445 }
446
447 this.update(cx, |this, _| {
448 this.collaborators = collaborators;
449 });
450
451 Ok(this)
452 }
453
454 #[cfg(any(test, feature = "test-support"))]
455 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
456 let languages = Arc::new(LanguageRegistry::test());
457 let http_client = client::test::FakeHttpClient::with_404_response();
458 let client = client::Client::new(http_client.clone());
459 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
460 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
461 }
462
463 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
464 self.opened_buffers
465 .get(&remote_id)
466 .and_then(|buffer| buffer.upgrade(cx))
467 }
468
469 pub fn languages(&self) -> &Arc<LanguageRegistry> {
470 &self.languages
471 }
472
473 #[cfg(any(test, feature = "test-support"))]
474 pub fn check_invariants(&self, cx: &AppContext) {
475 if self.is_local() {
476 let mut worktree_root_paths = HashMap::default();
477 for worktree in self.worktrees(cx) {
478 let worktree = worktree.read(cx);
479 let abs_path = worktree.as_local().unwrap().abs_path().clone();
480 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
481 assert_eq!(
482 prev_worktree_id,
483 None,
484 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
485 abs_path,
486 worktree.id(),
487 prev_worktree_id
488 )
489 }
490 } else {
491 let replica_id = self.replica_id();
492 for buffer in self.opened_buffers.values() {
493 if let Some(buffer) = buffer.upgrade(cx) {
494 let buffer = buffer.read(cx);
495 assert_eq!(
496 buffer.deferred_ops_len(),
497 0,
498 "replica {}, buffer {} has deferred operations",
499 replica_id,
500 buffer.remote_id()
501 );
502 }
503 }
504 }
505 }
506
507 #[cfg(any(test, feature = "test-support"))]
508 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
509 let path = path.into();
510 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
511 self.opened_buffers.iter().any(|(_, buffer)| {
512 if let Some(buffer) = buffer.upgrade(cx) {
513 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
514 if file.worktree == worktree && file.path() == &path.path {
515 return true;
516 }
517 }
518 }
519 false
520 })
521 } else {
522 false
523 }
524 }
525
526 pub fn fs(&self) -> &Arc<dyn Fs> {
527 &self.fs
528 }
529
530 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
531 self.unshare(cx);
532 for worktree in &self.worktrees {
533 if let Some(worktree) = worktree.upgrade(cx) {
534 worktree.update(cx, |worktree, _| {
535 worktree.as_local_mut().unwrap().unregister();
536 });
537 }
538 }
539
540 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
541 *remote_id_tx.borrow_mut() = None;
542 }
543
544 self.subscriptions.clear();
545 }
546
547 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
548 self.unregister(cx);
549
550 let response = self.client.request(proto::RegisterProject {});
551 cx.spawn(|this, mut cx| async move {
552 let remote_id = response.await?.project_id;
553
554 let mut registrations = Vec::new();
555 this.update(&mut cx, |this, cx| {
556 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
557 *remote_id_tx.borrow_mut() = Some(remote_id);
558 }
559
560 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
561
562 this.subscriptions
563 .push(this.client.add_model_for_remote_entity(remote_id, cx));
564
565 for worktree in &this.worktrees {
566 if let Some(worktree) = worktree.upgrade(cx) {
567 registrations.push(worktree.update(cx, |worktree, cx| {
568 let worktree = worktree.as_local_mut().unwrap();
569 worktree.register(remote_id, cx)
570 }));
571 }
572 }
573 });
574
575 futures::future::try_join_all(registrations).await?;
576 Ok(())
577 })
578 }
579
580 pub fn remote_id(&self) -> Option<u64> {
581 match &self.client_state {
582 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
583 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
584 }
585 }
586
587 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
588 let mut id = None;
589 let mut watch = None;
590 match &self.client_state {
591 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
592 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
593 }
594
595 async move {
596 if let Some(id) = id {
597 return id;
598 }
599 let mut watch = watch.unwrap();
600 loop {
601 let id = *watch.borrow();
602 if let Some(id) = id {
603 return id;
604 }
605 watch.next().await;
606 }
607 }
608 }
609
610 pub fn replica_id(&self) -> ReplicaId {
611 match &self.client_state {
612 ProjectClientState::Local { .. } => 0,
613 ProjectClientState::Remote { replica_id, .. } => *replica_id,
614 }
615 }
616
617 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
618 &self.collaborators
619 }
620
621 pub fn worktrees<'a>(
622 &'a self,
623 cx: &'a AppContext,
624 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
625 self.worktrees
626 .iter()
627 .filter_map(move |worktree| worktree.upgrade(cx))
628 }
629
630 pub fn visible_worktrees<'a>(
631 &'a self,
632 cx: &'a AppContext,
633 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
634 self.worktrees.iter().filter_map(|worktree| {
635 worktree.upgrade(cx).and_then(|worktree| {
636 if worktree.read(cx).is_visible() {
637 Some(worktree)
638 } else {
639 None
640 }
641 })
642 })
643 }
644
645 pub fn worktree_for_id(
646 &self,
647 id: WorktreeId,
648 cx: &AppContext,
649 ) -> Option<ModelHandle<Worktree>> {
650 self.worktrees(cx)
651 .find(|worktree| worktree.read(cx).id() == id)
652 }
653
654 pub fn worktree_for_entry(
655 &self,
656 entry_id: ProjectEntryId,
657 cx: &AppContext,
658 ) -> Option<ModelHandle<Worktree>> {
659 self.worktrees(cx)
660 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
661 }
662
663 pub fn worktree_id_for_entry(
664 &self,
665 entry_id: ProjectEntryId,
666 cx: &AppContext,
667 ) -> Option<WorktreeId> {
668 self.worktree_for_entry(entry_id, cx)
669 .map(|worktree| worktree.read(cx).id())
670 }
671
672 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
673 let rpc = self.client.clone();
674 cx.spawn(|this, mut cx| async move {
675 let project_id = this.update(&mut cx, |this, cx| {
676 if let ProjectClientState::Local {
677 is_shared,
678 remote_id_rx,
679 ..
680 } = &mut this.client_state
681 {
682 *is_shared = true;
683
684 for open_buffer in this.opened_buffers.values_mut() {
685 match open_buffer {
686 OpenBuffer::Strong(_) => {}
687 OpenBuffer::Weak(buffer) => {
688 if let Some(buffer) = buffer.upgrade(cx) {
689 *open_buffer = OpenBuffer::Strong(buffer);
690 }
691 }
692 OpenBuffer::Loading(_) => unreachable!(),
693 }
694 }
695
696 for worktree_handle in this.worktrees.iter_mut() {
697 match worktree_handle {
698 WorktreeHandle::Strong(_) => {}
699 WorktreeHandle::Weak(worktree) => {
700 if let Some(worktree) = worktree.upgrade(cx) {
701 *worktree_handle = WorktreeHandle::Strong(worktree);
702 }
703 }
704 }
705 }
706
707 remote_id_rx
708 .borrow()
709 .ok_or_else(|| anyhow!("no project id"))
710 } else {
711 Err(anyhow!("can't share a remote project"))
712 }
713 })?;
714
715 rpc.request(proto::ShareProject { project_id }).await?;
716
717 let mut tasks = Vec::new();
718 this.update(&mut cx, |this, cx| {
719 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
720 worktree.update(cx, |worktree, cx| {
721 let worktree = worktree.as_local_mut().unwrap();
722 tasks.push(worktree.share(project_id, cx));
723 });
724 }
725 });
726 for task in tasks {
727 task.await?;
728 }
729 this.update(&mut cx, |_, cx| cx.notify());
730 Ok(())
731 })
732 }
733
734 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
735 let rpc = self.client.clone();
736
737 if let ProjectClientState::Local {
738 is_shared,
739 remote_id_rx,
740 ..
741 } = &mut self.client_state
742 {
743 if !*is_shared {
744 return;
745 }
746
747 *is_shared = false;
748 self.collaborators.clear();
749 self.shared_buffers.clear();
750 for worktree_handle in self.worktrees.iter_mut() {
751 if let WorktreeHandle::Strong(worktree) = worktree_handle {
752 let is_visible = worktree.update(cx, |worktree, _| {
753 worktree.as_local_mut().unwrap().unshare();
754 worktree.is_visible()
755 });
756 if !is_visible {
757 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
758 }
759 }
760 }
761
762 for open_buffer in self.opened_buffers.values_mut() {
763 match open_buffer {
764 OpenBuffer::Strong(buffer) => {
765 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
766 }
767 _ => {}
768 }
769 }
770
771 if let Some(project_id) = *remote_id_rx.borrow() {
772 rpc.send(proto::UnshareProject { project_id }).log_err();
773 }
774
775 cx.notify();
776 } else {
777 log::error!("attempted to unshare a remote project");
778 }
779 }
780
781 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
782 if let ProjectClientState::Remote {
783 sharing_has_stopped,
784 ..
785 } = &mut self.client_state
786 {
787 *sharing_has_stopped = true;
788 self.collaborators.clear();
789 cx.notify();
790 }
791 }
792
793 pub fn is_read_only(&self) -> bool {
794 match &self.client_state {
795 ProjectClientState::Local { .. } => false,
796 ProjectClientState::Remote {
797 sharing_has_stopped,
798 ..
799 } => *sharing_has_stopped,
800 }
801 }
802
803 pub fn is_local(&self) -> bool {
804 match &self.client_state {
805 ProjectClientState::Local { .. } => true,
806 ProjectClientState::Remote { .. } => false,
807 }
808 }
809
810 pub fn is_remote(&self) -> bool {
811 !self.is_local()
812 }
813
814 pub fn create_buffer(
815 &mut self,
816 text: &str,
817 language: Option<Arc<Language>>,
818 cx: &mut ModelContext<Self>,
819 ) -> Result<ModelHandle<Buffer>> {
820 if self.is_remote() {
821 return Err(anyhow!("creating buffers as a guest is not supported yet"));
822 }
823
824 let buffer = cx.add_model(|cx| {
825 Buffer::new(self.replica_id(), text, cx)
826 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
827 });
828 self.register_buffer(&buffer, cx)?;
829 Ok(buffer)
830 }
831
832 pub fn open_path(
833 &mut self,
834 path: impl Into<ProjectPath>,
835 cx: &mut ModelContext<Self>,
836 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
837 let task = self.open_buffer(path, cx);
838 cx.spawn_weak(|_, cx| async move {
839 let buffer = task.await?;
840 let project_entry_id = buffer
841 .read_with(&cx, |buffer, cx| {
842 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
843 })
844 .ok_or_else(|| anyhow!("no project entry"))?;
845 Ok((project_entry_id, buffer.into()))
846 })
847 }
848
849 pub fn open_buffer(
850 &mut self,
851 path: impl Into<ProjectPath>,
852 cx: &mut ModelContext<Self>,
853 ) -> Task<Result<ModelHandle<Buffer>>> {
854 let project_path = path.into();
855 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
856 worktree
857 } else {
858 return Task::ready(Err(anyhow!("no such worktree")));
859 };
860
861 // If there is already a buffer for the given path, then return it.
862 let existing_buffer = self.get_open_buffer(&project_path, cx);
863 if let Some(existing_buffer) = existing_buffer {
864 return Task::ready(Ok(existing_buffer));
865 }
866
867 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
868 // If the given path is already being loaded, then wait for that existing
869 // task to complete and return the same buffer.
870 hash_map::Entry::Occupied(e) => e.get().clone(),
871
872 // Otherwise, record the fact that this path is now being loaded.
873 hash_map::Entry::Vacant(entry) => {
874 let (mut tx, rx) = postage::watch::channel();
875 entry.insert(rx.clone());
876
877 let load_buffer = if worktree.read(cx).is_local() {
878 self.open_local_buffer(&project_path.path, &worktree, cx)
879 } else {
880 self.open_remote_buffer(&project_path.path, &worktree, cx)
881 };
882
883 cx.spawn(move |this, mut cx| async move {
884 let load_result = load_buffer.await;
885 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
886 // Record the fact that the buffer is no longer loading.
887 this.loading_buffers.remove(&project_path);
888 let buffer = load_result.map_err(Arc::new)?;
889 Ok(buffer)
890 }));
891 })
892 .detach();
893 rx
894 }
895 };
896
897 cx.foreground().spawn(async move {
898 loop {
899 if let Some(result) = loading_watch.borrow().as_ref() {
900 match result {
901 Ok(buffer) => return Ok(buffer.clone()),
902 Err(error) => return Err(anyhow!("{}", error)),
903 }
904 }
905 loading_watch.next().await;
906 }
907 })
908 }
909
910 fn open_local_buffer(
911 &mut self,
912 path: &Arc<Path>,
913 worktree: &ModelHandle<Worktree>,
914 cx: &mut ModelContext<Self>,
915 ) -> Task<Result<ModelHandle<Buffer>>> {
916 let load_buffer = worktree.update(cx, |worktree, cx| {
917 let worktree = worktree.as_local_mut().unwrap();
918 worktree.load_buffer(path, cx)
919 });
920 cx.spawn(|this, mut cx| async move {
921 let buffer = load_buffer.await?;
922 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
923 Ok(buffer)
924 })
925 }
926
927 fn open_remote_buffer(
928 &mut self,
929 path: &Arc<Path>,
930 worktree: &ModelHandle<Worktree>,
931 cx: &mut ModelContext<Self>,
932 ) -> Task<Result<ModelHandle<Buffer>>> {
933 let rpc = self.client.clone();
934 let project_id = self.remote_id().unwrap();
935 let remote_worktree_id = worktree.read(cx).id();
936 let path = path.clone();
937 let path_string = path.to_string_lossy().to_string();
938 cx.spawn(|this, mut cx| async move {
939 let response = rpc
940 .request(proto::OpenBufferByPath {
941 project_id,
942 worktree_id: remote_worktree_id.to_proto(),
943 path: path_string,
944 })
945 .await?;
946 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
947 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
948 .await
949 })
950 }
951
952 fn open_local_buffer_via_lsp(
953 &mut self,
954 abs_path: lsp::Url,
955 lsp_adapter: Arc<dyn LspAdapter>,
956 lsp_server: Arc<LanguageServer>,
957 cx: &mut ModelContext<Self>,
958 ) -> Task<Result<ModelHandle<Buffer>>> {
959 cx.spawn(|this, mut cx| async move {
960 let abs_path = abs_path
961 .to_file_path()
962 .map_err(|_| anyhow!("can't convert URI to path"))?;
963 let (worktree, relative_path) = if let Some(result) =
964 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
965 {
966 result
967 } else {
968 let worktree = this
969 .update(&mut cx, |this, cx| {
970 this.create_local_worktree(&abs_path, false, cx)
971 })
972 .await?;
973 this.update(&mut cx, |this, cx| {
974 this.language_servers.insert(
975 (worktree.read(cx).id(), lsp_adapter.name()),
976 (lsp_adapter, lsp_server),
977 );
978 });
979 (worktree, PathBuf::new())
980 };
981
982 let project_path = ProjectPath {
983 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
984 path: relative_path.into(),
985 };
986 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
987 .await
988 })
989 }
990
991 pub fn open_buffer_by_id(
992 &mut self,
993 id: u64,
994 cx: &mut ModelContext<Self>,
995 ) -> Task<Result<ModelHandle<Buffer>>> {
996 if let Some(buffer) = self.buffer_for_id(id, cx) {
997 Task::ready(Ok(buffer))
998 } else if self.is_local() {
999 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1000 } else if let Some(project_id) = self.remote_id() {
1001 let request = self
1002 .client
1003 .request(proto::OpenBufferById { project_id, id });
1004 cx.spawn(|this, mut cx| async move {
1005 let buffer = request
1006 .await?
1007 .buffer
1008 .ok_or_else(|| anyhow!("invalid buffer"))?;
1009 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1010 .await
1011 })
1012 } else {
1013 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1014 }
1015 }
1016
1017 pub fn save_buffer_as(
1018 &mut self,
1019 buffer: ModelHandle<Buffer>,
1020 abs_path: PathBuf,
1021 cx: &mut ModelContext<Project>,
1022 ) -> Task<Result<()>> {
1023 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1024 let old_path =
1025 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1026 cx.spawn(|this, mut cx| async move {
1027 if let Some(old_path) = old_path {
1028 this.update(&mut cx, |this, cx| {
1029 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1030 });
1031 }
1032 let (worktree, path) = worktree_task.await?;
1033 worktree
1034 .update(&mut cx, |worktree, cx| {
1035 worktree
1036 .as_local_mut()
1037 .unwrap()
1038 .save_buffer_as(buffer.clone(), path, cx)
1039 })
1040 .await?;
1041 this.update(&mut cx, |this, cx| {
1042 this.assign_language_to_buffer(&buffer, cx);
1043 this.register_buffer_with_language_server(&buffer, cx);
1044 });
1045 Ok(())
1046 })
1047 }
1048
1049 pub fn get_open_buffer(
1050 &mut self,
1051 path: &ProjectPath,
1052 cx: &mut ModelContext<Self>,
1053 ) -> Option<ModelHandle<Buffer>> {
1054 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1055 self.opened_buffers.values().find_map(|buffer| {
1056 let buffer = buffer.upgrade(cx)?;
1057 let file = File::from_dyn(buffer.read(cx).file())?;
1058 if file.worktree == worktree && file.path() == &path.path {
1059 Some(buffer)
1060 } else {
1061 None
1062 }
1063 })
1064 }
1065
1066 fn register_buffer(
1067 &mut self,
1068 buffer: &ModelHandle<Buffer>,
1069 cx: &mut ModelContext<Self>,
1070 ) -> Result<()> {
1071 let remote_id = buffer.read(cx).remote_id();
1072 let open_buffer = if self.is_remote() || self.is_shared() {
1073 OpenBuffer::Strong(buffer.clone())
1074 } else {
1075 OpenBuffer::Weak(buffer.downgrade())
1076 };
1077
1078 match self.opened_buffers.insert(remote_id, open_buffer) {
1079 None => {}
1080 Some(OpenBuffer::Loading(operations)) => {
1081 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1082 }
1083 Some(OpenBuffer::Weak(existing_handle)) => {
1084 if existing_handle.upgrade(cx).is_some() {
1085 Err(anyhow!(
1086 "already registered buffer with remote id {}",
1087 remote_id
1088 ))?
1089 }
1090 }
1091 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1092 "already registered buffer with remote id {}",
1093 remote_id
1094 ))?,
1095 }
1096 cx.subscribe(buffer, |this, buffer, event, cx| {
1097 this.on_buffer_event(buffer, event, cx);
1098 })
1099 .detach();
1100
1101 self.assign_language_to_buffer(buffer, cx);
1102 self.register_buffer_with_language_server(buffer, cx);
1103 cx.observe_release(buffer, |this, buffer, cx| {
1104 if let Some(file) = File::from_dyn(buffer.file()) {
1105 if file.is_local() {
1106 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1107 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1108 server
1109 .notify::<lsp::notification::DidCloseTextDocument>(
1110 lsp::DidCloseTextDocumentParams {
1111 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1112 },
1113 )
1114 .log_err();
1115 }
1116 }
1117 }
1118 })
1119 .detach();
1120
1121 Ok(())
1122 }
1123
1124 fn register_buffer_with_language_server(
1125 &mut self,
1126 buffer_handle: &ModelHandle<Buffer>,
1127 cx: &mut ModelContext<Self>,
1128 ) {
1129 let buffer = buffer_handle.read(cx);
1130 let buffer_id = buffer.remote_id();
1131 if let Some(file) = File::from_dyn(buffer.file()) {
1132 if file.is_local() {
1133 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1134 let initial_snapshot = buffer.text_snapshot();
1135 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
1136
1137 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1138 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1139 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1140 .log_err();
1141 }
1142 }
1143
1144 if let Some((_, server)) = language_server {
1145 server
1146 .notify::<lsp::notification::DidOpenTextDocument>(
1147 lsp::DidOpenTextDocumentParams {
1148 text_document: lsp::TextDocumentItem::new(
1149 uri,
1150 Default::default(),
1151 0,
1152 initial_snapshot.text(),
1153 ),
1154 }
1155 .clone(),
1156 )
1157 .log_err();
1158 buffer_handle.update(cx, |buffer, cx| {
1159 buffer.set_completion_triggers(
1160 server
1161 .capabilities()
1162 .completion_provider
1163 .as_ref()
1164 .and_then(|provider| provider.trigger_characters.clone())
1165 .unwrap_or(Vec::new()),
1166 cx,
1167 )
1168 });
1169 self.buffer_snapshots
1170 .insert(buffer_id, vec![(0, initial_snapshot)]);
1171 }
1172 }
1173 }
1174 }
1175
1176 fn unregister_buffer_from_language_server(
1177 &mut self,
1178 buffer: &ModelHandle<Buffer>,
1179 old_path: PathBuf,
1180 cx: &mut ModelContext<Self>,
1181 ) {
1182 buffer.update(cx, |buffer, cx| {
1183 buffer.update_diagnostics(Default::default(), cx);
1184 self.buffer_snapshots.remove(&buffer.remote_id());
1185 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1186 language_server
1187 .notify::<lsp::notification::DidCloseTextDocument>(
1188 lsp::DidCloseTextDocumentParams {
1189 text_document: lsp::TextDocumentIdentifier::new(
1190 lsp::Url::from_file_path(old_path).unwrap(),
1191 ),
1192 },
1193 )
1194 .log_err();
1195 }
1196 });
1197 }
1198
1199 fn on_buffer_event(
1200 &mut self,
1201 buffer: ModelHandle<Buffer>,
1202 event: &BufferEvent,
1203 cx: &mut ModelContext<Self>,
1204 ) -> Option<()> {
1205 match event {
1206 BufferEvent::Operation(operation) => {
1207 let project_id = self.remote_id()?;
1208 let request = self.client.request(proto::UpdateBuffer {
1209 project_id,
1210 buffer_id: buffer.read(cx).remote_id(),
1211 operations: vec![language::proto::serialize_operation(&operation)],
1212 });
1213 cx.background().spawn(request).detach_and_log_err(cx);
1214 }
1215 BufferEvent::Edited { .. } => {
1216 let (_, language_server) = self
1217 .language_server_for_buffer(buffer.read(cx), cx)?
1218 .clone();
1219 let buffer = buffer.read(cx);
1220 let file = File::from_dyn(buffer.file())?;
1221 let abs_path = file.as_local()?.abs_path(cx);
1222 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1223 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1224 let (version, prev_snapshot) = buffer_snapshots.last()?;
1225 let next_snapshot = buffer.text_snapshot();
1226 let next_version = version + 1;
1227
1228 let content_changes = buffer
1229 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1230 .map(|edit| {
1231 let edit_start = edit.new.start.0;
1232 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1233 let new_text = next_snapshot
1234 .text_for_range(edit.new.start.1..edit.new.end.1)
1235 .collect();
1236 lsp::TextDocumentContentChangeEvent {
1237 range: Some(lsp::Range::new(
1238 point_to_lsp(edit_start),
1239 point_to_lsp(edit_end),
1240 )),
1241 range_length: None,
1242 text: new_text,
1243 }
1244 })
1245 .collect();
1246
1247 buffer_snapshots.push((next_version, next_snapshot));
1248
1249 language_server
1250 .notify::<lsp::notification::DidChangeTextDocument>(
1251 lsp::DidChangeTextDocumentParams {
1252 text_document: lsp::VersionedTextDocumentIdentifier::new(
1253 uri,
1254 next_version,
1255 ),
1256 content_changes,
1257 },
1258 )
1259 .log_err();
1260 }
1261 BufferEvent::Saved => {
1262 let file = File::from_dyn(buffer.read(cx).file())?;
1263 let worktree_id = file.worktree_id(cx);
1264 let abs_path = file.as_local()?.abs_path(cx);
1265 let text_document = lsp::TextDocumentIdentifier {
1266 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1267 };
1268
1269 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1270 server
1271 .notify::<lsp::notification::DidSaveTextDocument>(
1272 lsp::DidSaveTextDocumentParams {
1273 text_document: text_document.clone(),
1274 text: None,
1275 },
1276 )
1277 .log_err();
1278 }
1279 }
1280 _ => {}
1281 }
1282
1283 None
1284 }
1285
1286 fn language_servers_for_worktree(
1287 &self,
1288 worktree_id: WorktreeId,
1289 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1290 self.language_servers.iter().filter_map(
1291 move |((language_server_worktree_id, _), server)| {
1292 if *language_server_worktree_id == worktree_id {
1293 Some(server)
1294 } else {
1295 None
1296 }
1297 },
1298 )
1299 }
1300
1301 fn assign_language_to_buffer(
1302 &mut self,
1303 buffer: &ModelHandle<Buffer>,
1304 cx: &mut ModelContext<Self>,
1305 ) -> Option<()> {
1306 // If the buffer has a language, set it and start the language server if we haven't already.
1307 let full_path = buffer.read(cx).file()?.full_path(cx);
1308 let language = self.languages.select_language(&full_path)?;
1309 buffer.update(cx, |buffer, cx| {
1310 buffer.set_language(Some(language.clone()), cx);
1311 });
1312
1313 let file = File::from_dyn(buffer.read(cx).file())?;
1314 let worktree = file.worktree.read(cx).as_local()?;
1315 let worktree_id = worktree.id();
1316 let worktree_abs_path = worktree.abs_path().clone();
1317 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1318
1319 None
1320 }
1321
1322 fn start_language_server(
1323 &mut self,
1324 worktree_id: WorktreeId,
1325 worktree_path: Arc<Path>,
1326 language: Arc<Language>,
1327 cx: &mut ModelContext<Self>,
1328 ) {
1329 let adapter = if let Some(adapter) = language.lsp_adapter() {
1330 adapter
1331 } else {
1332 return;
1333 };
1334 let key = (worktree_id, adapter.name());
1335 self.started_language_servers
1336 .entry(key.clone())
1337 .or_insert_with(|| {
1338 let server_id = post_inc(&mut self.next_language_server_id);
1339 let language_server = self.languages.start_language_server(
1340 server_id,
1341 language.clone(),
1342 worktree_path,
1343 self.client.http_client(),
1344 cx,
1345 );
1346 cx.spawn_weak(|this, mut cx| async move {
1347 let language_server = language_server?.await.log_err()?;
1348 let language_server = language_server
1349 .initialize(adapter.initialization_options())
1350 .await
1351 .log_err()?;
1352 let this = this.upgrade(&cx)?;
1353 let disk_based_diagnostics_progress_token =
1354 adapter.disk_based_diagnostics_progress_token();
1355
1356 language_server
1357 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1358 let this = this.downgrade();
1359 let adapter = adapter.clone();
1360 move |params, mut cx| {
1361 if let Some(this) = this.upgrade(&cx) {
1362 this.update(&mut cx, |this, cx| {
1363 this.on_lsp_diagnostics_published(
1364 server_id,
1365 params,
1366 &adapter,
1367 disk_based_diagnostics_progress_token,
1368 cx,
1369 );
1370 });
1371 }
1372 }
1373 })
1374 .detach();
1375
1376 language_server
1377 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1378 let settings = this
1379 .read_with(&cx, |this, _| this.language_server_settings.clone());
1380 move |params, _| {
1381 let settings = settings.lock().clone();
1382 async move {
1383 Ok(params
1384 .items
1385 .into_iter()
1386 .map(|item| {
1387 if let Some(section) = &item.section {
1388 settings
1389 .get(section)
1390 .cloned()
1391 .unwrap_or(serde_json::Value::Null)
1392 } else {
1393 settings.clone()
1394 }
1395 })
1396 .collect())
1397 }
1398 }
1399 })
1400 .detach();
1401
1402 language_server
1403 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1404 let this = this.downgrade();
1405 let adapter = adapter.clone();
1406 let language_server = language_server.clone();
1407 move |params, cx| {
1408 Self::on_lsp_workspace_edit(
1409 this,
1410 params,
1411 server_id,
1412 adapter.clone(),
1413 language_server.clone(),
1414 cx,
1415 )
1416 }
1417 })
1418 .detach();
1419
1420 language_server
1421 .on_notification::<lsp::notification::Progress, _>({
1422 let this = this.downgrade();
1423 move |params, mut cx| {
1424 if let Some(this) = this.upgrade(&cx) {
1425 this.update(&mut cx, |this, cx| {
1426 this.on_lsp_progress(
1427 params,
1428 server_id,
1429 disk_based_diagnostics_progress_token,
1430 cx,
1431 );
1432 });
1433 }
1434 }
1435 })
1436 .detach();
1437
1438 this.update(&mut cx, |this, cx| {
1439 this.language_servers
1440 .insert(key.clone(), (adapter, language_server.clone()));
1441 this.language_server_statuses.insert(
1442 server_id,
1443 LanguageServerStatus {
1444 name: language_server.name().to_string(),
1445 pending_work: Default::default(),
1446 pending_diagnostic_updates: 0,
1447 },
1448 );
1449 language_server
1450 .notify::<lsp::notification::DidChangeConfiguration>(
1451 lsp::DidChangeConfigurationParams {
1452 settings: this.language_server_settings.lock().clone(),
1453 },
1454 )
1455 .ok();
1456
1457 if let Some(project_id) = this.remote_id() {
1458 this.client
1459 .send(proto::StartLanguageServer {
1460 project_id,
1461 server: Some(proto::LanguageServer {
1462 id: server_id as u64,
1463 name: language_server.name().to_string(),
1464 }),
1465 })
1466 .log_err();
1467 }
1468
1469 // Tell the language server about every open buffer in the worktree that matches the language.
1470 for buffer in this.opened_buffers.values() {
1471 if let Some(buffer_handle) = buffer.upgrade(cx) {
1472 let buffer = buffer_handle.read(cx);
1473 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1474 file
1475 } else {
1476 continue;
1477 };
1478 let language = if let Some(language) = buffer.language() {
1479 language
1480 } else {
1481 continue;
1482 };
1483 if file.worktree.read(cx).id() != key.0
1484 || language.lsp_adapter().map(|a| a.name())
1485 != Some(key.1.clone())
1486 {
1487 continue;
1488 }
1489
1490 let file = file.as_local()?;
1491 let versions = this
1492 .buffer_snapshots
1493 .entry(buffer.remote_id())
1494 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1495 let (version, initial_snapshot) = versions.last().unwrap();
1496 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1497 language_server
1498 .notify::<lsp::notification::DidOpenTextDocument>(
1499 lsp::DidOpenTextDocumentParams {
1500 text_document: lsp::TextDocumentItem::new(
1501 uri,
1502 Default::default(),
1503 *version,
1504 initial_snapshot.text(),
1505 ),
1506 },
1507 )
1508 .log_err()?;
1509 buffer_handle.update(cx, |buffer, cx| {
1510 buffer.set_completion_triggers(
1511 language_server
1512 .capabilities()
1513 .completion_provider
1514 .as_ref()
1515 .and_then(|provider| {
1516 provider.trigger_characters.clone()
1517 })
1518 .unwrap_or(Vec::new()),
1519 cx,
1520 )
1521 });
1522 }
1523 }
1524
1525 cx.notify();
1526 Some(())
1527 });
1528
1529 Some(language_server)
1530 })
1531 });
1532 }
1533
1534 pub fn restart_language_servers_for_buffers(
1535 &mut self,
1536 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1537 cx: &mut ModelContext<Self>,
1538 ) -> Option<()> {
1539 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1540 .into_iter()
1541 .filter_map(|buffer| {
1542 let file = File::from_dyn(buffer.read(cx).file())?;
1543 let worktree = file.worktree.read(cx).as_local()?;
1544 let worktree_id = worktree.id();
1545 let worktree_abs_path = worktree.abs_path().clone();
1546 let full_path = file.full_path(cx);
1547 Some((worktree_id, worktree_abs_path, full_path))
1548 })
1549 .collect();
1550 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1551 let language = self.languages.select_language(&full_path)?;
1552 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1553 }
1554
1555 None
1556 }
1557
1558 fn restart_language_server(
1559 &mut self,
1560 worktree_id: WorktreeId,
1561 worktree_path: Arc<Path>,
1562 language: Arc<Language>,
1563 cx: &mut ModelContext<Self>,
1564 ) {
1565 let adapter = if let Some(adapter) = language.lsp_adapter() {
1566 adapter
1567 } else {
1568 return;
1569 };
1570 let key = (worktree_id, adapter.name());
1571 let server_to_shutdown = self.language_servers.remove(&key);
1572 self.started_language_servers.remove(&key);
1573 server_to_shutdown
1574 .as_ref()
1575 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1576 cx.spawn_weak(|this, mut cx| async move {
1577 if let Some(this) = this.upgrade(&cx) {
1578 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1579 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1580 shutdown_task.await;
1581 }
1582 }
1583
1584 this.update(&mut cx, |this, cx| {
1585 this.start_language_server(worktree_id, worktree_path, language, cx);
1586 });
1587 }
1588 })
1589 .detach();
1590 }
1591
1592 fn on_lsp_diagnostics_published(
1593 &mut self,
1594 server_id: usize,
1595 mut params: lsp::PublishDiagnosticsParams,
1596 adapter: &Arc<dyn LspAdapter>,
1597 disk_based_diagnostics_progress_token: Option<&str>,
1598 cx: &mut ModelContext<Self>,
1599 ) {
1600 adapter.process_diagnostics(&mut params);
1601 if disk_based_diagnostics_progress_token.is_none() {
1602 self.disk_based_diagnostics_started(cx);
1603 self.broadcast_language_server_update(
1604 server_id,
1605 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1606 proto::LspDiskBasedDiagnosticsUpdating {},
1607 ),
1608 );
1609 }
1610 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1611 .log_err();
1612 if disk_based_diagnostics_progress_token.is_none() {
1613 self.disk_based_diagnostics_finished(cx);
1614 self.broadcast_language_server_update(
1615 server_id,
1616 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1617 proto::LspDiskBasedDiagnosticsUpdated {},
1618 ),
1619 );
1620 }
1621 }
1622
1623 fn on_lsp_progress(
1624 &mut self,
1625 progress: lsp::ProgressParams,
1626 server_id: usize,
1627 disk_based_diagnostics_progress_token: Option<&str>,
1628 cx: &mut ModelContext<Self>,
1629 ) {
1630 let token = match progress.token {
1631 lsp::NumberOrString::String(token) => token,
1632 lsp::NumberOrString::Number(token) => {
1633 log::info!("skipping numeric progress token {}", token);
1634 return;
1635 }
1636 };
1637 let progress = match progress.value {
1638 lsp::ProgressParamsValue::WorkDone(value) => value,
1639 };
1640 let language_server_status =
1641 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1642 status
1643 } else {
1644 return;
1645 };
1646 match progress {
1647 lsp::WorkDoneProgress::Begin(_) => {
1648 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1649 language_server_status.pending_diagnostic_updates += 1;
1650 if language_server_status.pending_diagnostic_updates == 1 {
1651 self.disk_based_diagnostics_started(cx);
1652 self.broadcast_language_server_update(
1653 server_id,
1654 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1655 proto::LspDiskBasedDiagnosticsUpdating {},
1656 ),
1657 );
1658 }
1659 } else {
1660 self.on_lsp_work_start(server_id, token.clone(), cx);
1661 self.broadcast_language_server_update(
1662 server_id,
1663 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1664 token,
1665 }),
1666 );
1667 }
1668 }
1669 lsp::WorkDoneProgress::Report(report) => {
1670 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1671 self.on_lsp_work_progress(
1672 server_id,
1673 token.clone(),
1674 LanguageServerProgress {
1675 message: report.message.clone(),
1676 percentage: report.percentage.map(|p| p as usize),
1677 last_update_at: Instant::now(),
1678 },
1679 cx,
1680 );
1681 self.broadcast_language_server_update(
1682 server_id,
1683 proto::update_language_server::Variant::WorkProgress(
1684 proto::LspWorkProgress {
1685 token,
1686 message: report.message,
1687 percentage: report.percentage.map(|p| p as u32),
1688 },
1689 ),
1690 );
1691 }
1692 }
1693 lsp::WorkDoneProgress::End(_) => {
1694 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1695 language_server_status.pending_diagnostic_updates -= 1;
1696 if language_server_status.pending_diagnostic_updates == 0 {
1697 self.disk_based_diagnostics_finished(cx);
1698 self.broadcast_language_server_update(
1699 server_id,
1700 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1701 proto::LspDiskBasedDiagnosticsUpdated {},
1702 ),
1703 );
1704 }
1705 } else {
1706 self.on_lsp_work_end(server_id, token.clone(), cx);
1707 self.broadcast_language_server_update(
1708 server_id,
1709 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1710 token,
1711 }),
1712 );
1713 }
1714 }
1715 }
1716 }
1717
1718 fn on_lsp_work_start(
1719 &mut self,
1720 language_server_id: usize,
1721 token: String,
1722 cx: &mut ModelContext<Self>,
1723 ) {
1724 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1725 status.pending_work.insert(
1726 token,
1727 LanguageServerProgress {
1728 message: None,
1729 percentage: None,
1730 last_update_at: Instant::now(),
1731 },
1732 );
1733 cx.notify();
1734 }
1735 }
1736
1737 fn on_lsp_work_progress(
1738 &mut self,
1739 language_server_id: usize,
1740 token: String,
1741 progress: LanguageServerProgress,
1742 cx: &mut ModelContext<Self>,
1743 ) {
1744 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1745 status.pending_work.insert(token, progress);
1746 cx.notify();
1747 }
1748 }
1749
1750 fn on_lsp_work_end(
1751 &mut self,
1752 language_server_id: usize,
1753 token: String,
1754 cx: &mut ModelContext<Self>,
1755 ) {
1756 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1757 status.pending_work.remove(&token);
1758 cx.notify();
1759 }
1760 }
1761
1762 async fn on_lsp_workspace_edit(
1763 this: WeakModelHandle<Self>,
1764 params: lsp::ApplyWorkspaceEditParams,
1765 server_id: usize,
1766 adapter: Arc<dyn LspAdapter>,
1767 language_server: Arc<LanguageServer>,
1768 mut cx: AsyncAppContext,
1769 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1770 let this = this
1771 .upgrade(&cx)
1772 .ok_or_else(|| anyhow!("project project closed"))?;
1773 let transaction = Self::deserialize_workspace_edit(
1774 this.clone(),
1775 params.edit,
1776 true,
1777 adapter.clone(),
1778 language_server.clone(),
1779 &mut cx,
1780 )
1781 .await
1782 .log_err();
1783 this.update(&mut cx, |this, _| {
1784 if let Some(transaction) = transaction {
1785 this.last_workspace_edits_by_language_server
1786 .insert(server_id, transaction);
1787 }
1788 });
1789 Ok(lsp::ApplyWorkspaceEditResponse {
1790 applied: true,
1791 failed_change: None,
1792 failure_reason: None,
1793 })
1794 }
1795
1796 fn broadcast_language_server_update(
1797 &self,
1798 language_server_id: usize,
1799 event: proto::update_language_server::Variant,
1800 ) {
1801 if let Some(project_id) = self.remote_id() {
1802 self.client
1803 .send(proto::UpdateLanguageServer {
1804 project_id,
1805 language_server_id: language_server_id as u64,
1806 variant: Some(event),
1807 })
1808 .log_err();
1809 }
1810 }
1811
1812 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1813 for (_, server) in self.language_servers.values() {
1814 server
1815 .notify::<lsp::notification::DidChangeConfiguration>(
1816 lsp::DidChangeConfigurationParams {
1817 settings: settings.clone(),
1818 },
1819 )
1820 .ok();
1821 }
1822 *self.language_server_settings.lock() = settings;
1823 }
1824
1825 pub fn language_server_statuses(
1826 &self,
1827 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1828 self.language_server_statuses.values()
1829 }
1830
1831 pub fn update_diagnostics(
1832 &mut self,
1833 params: lsp::PublishDiagnosticsParams,
1834 disk_based_sources: &[&str],
1835 cx: &mut ModelContext<Self>,
1836 ) -> Result<()> {
1837 let abs_path = params
1838 .uri
1839 .to_file_path()
1840 .map_err(|_| anyhow!("URI is not a file"))?;
1841 let mut next_group_id = 0;
1842 let mut diagnostics = Vec::default();
1843 let mut primary_diagnostic_group_ids = HashMap::default();
1844 let mut sources_by_group_id = HashMap::default();
1845 let mut supporting_diagnostics = HashMap::default();
1846 for diagnostic in ¶ms.diagnostics {
1847 let source = diagnostic.source.as_ref();
1848 let code = diagnostic.code.as_ref().map(|code| match code {
1849 lsp::NumberOrString::Number(code) => code.to_string(),
1850 lsp::NumberOrString::String(code) => code.clone(),
1851 });
1852 let range = range_from_lsp(diagnostic.range);
1853 let is_supporting = diagnostic
1854 .related_information
1855 .as_ref()
1856 .map_or(false, |infos| {
1857 infos.iter().any(|info| {
1858 primary_diagnostic_group_ids.contains_key(&(
1859 source,
1860 code.clone(),
1861 range_from_lsp(info.location.range),
1862 ))
1863 })
1864 });
1865
1866 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1867 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1868 });
1869
1870 if is_supporting {
1871 supporting_diagnostics.insert(
1872 (source, code.clone(), range),
1873 (diagnostic.severity, is_unnecessary),
1874 );
1875 } else {
1876 let group_id = post_inc(&mut next_group_id);
1877 let is_disk_based = source.map_or(false, |source| {
1878 disk_based_sources.contains(&source.as_str())
1879 });
1880
1881 sources_by_group_id.insert(group_id, source);
1882 primary_diagnostic_group_ids
1883 .insert((source, code.clone(), range.clone()), group_id);
1884
1885 diagnostics.push(DiagnosticEntry {
1886 range,
1887 diagnostic: Diagnostic {
1888 code: code.clone(),
1889 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1890 message: diagnostic.message.clone(),
1891 group_id,
1892 is_primary: true,
1893 is_valid: true,
1894 is_disk_based,
1895 is_unnecessary,
1896 },
1897 });
1898 if let Some(infos) = &diagnostic.related_information {
1899 for info in infos {
1900 if info.location.uri == params.uri && !info.message.is_empty() {
1901 let range = range_from_lsp(info.location.range);
1902 diagnostics.push(DiagnosticEntry {
1903 range,
1904 diagnostic: Diagnostic {
1905 code: code.clone(),
1906 severity: DiagnosticSeverity::INFORMATION,
1907 message: info.message.clone(),
1908 group_id,
1909 is_primary: false,
1910 is_valid: true,
1911 is_disk_based,
1912 is_unnecessary: false,
1913 },
1914 });
1915 }
1916 }
1917 }
1918 }
1919 }
1920
1921 for entry in &mut diagnostics {
1922 let diagnostic = &mut entry.diagnostic;
1923 if !diagnostic.is_primary {
1924 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1925 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1926 source,
1927 diagnostic.code.clone(),
1928 entry.range.clone(),
1929 )) {
1930 if let Some(severity) = severity {
1931 diagnostic.severity = severity;
1932 }
1933 diagnostic.is_unnecessary = is_unnecessary;
1934 }
1935 }
1936 }
1937
1938 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1939 Ok(())
1940 }
1941
1942 pub fn update_diagnostic_entries(
1943 &mut self,
1944 abs_path: PathBuf,
1945 version: Option<i32>,
1946 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1947 cx: &mut ModelContext<Project>,
1948 ) -> Result<(), anyhow::Error> {
1949 let (worktree, relative_path) = self
1950 .find_local_worktree(&abs_path, cx)
1951 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1952 if !worktree.read(cx).is_visible() {
1953 return Ok(());
1954 }
1955
1956 let project_path = ProjectPath {
1957 worktree_id: worktree.read(cx).id(),
1958 path: relative_path.into(),
1959 };
1960 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
1961 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1962 }
1963
1964 worktree.update(cx, |worktree, cx| {
1965 worktree
1966 .as_local_mut()
1967 .ok_or_else(|| anyhow!("not a local worktree"))?
1968 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1969 })?;
1970 cx.emit(Event::DiagnosticsUpdated(project_path));
1971 Ok(())
1972 }
1973
1974 fn update_buffer_diagnostics(
1975 &mut self,
1976 buffer: &ModelHandle<Buffer>,
1977 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1978 version: Option<i32>,
1979 cx: &mut ModelContext<Self>,
1980 ) -> Result<()> {
1981 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1982 Ordering::Equal
1983 .then_with(|| b.is_primary.cmp(&a.is_primary))
1984 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1985 .then_with(|| a.severity.cmp(&b.severity))
1986 .then_with(|| a.message.cmp(&b.message))
1987 }
1988
1989 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1990
1991 diagnostics.sort_unstable_by(|a, b| {
1992 Ordering::Equal
1993 .then_with(|| a.range.start.cmp(&b.range.start))
1994 .then_with(|| b.range.end.cmp(&a.range.end))
1995 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1996 });
1997
1998 let mut sanitized_diagnostics = Vec::new();
1999 let edits_since_save = Patch::new(
2000 snapshot
2001 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2002 .collect(),
2003 );
2004 for entry in diagnostics {
2005 let start;
2006 let end;
2007 if entry.diagnostic.is_disk_based {
2008 // Some diagnostics are based on files on disk instead of buffers'
2009 // current contents. Adjust these diagnostics' ranges to reflect
2010 // any unsaved edits.
2011 start = edits_since_save.old_to_new(entry.range.start);
2012 end = edits_since_save.old_to_new(entry.range.end);
2013 } else {
2014 start = entry.range.start;
2015 end = entry.range.end;
2016 }
2017
2018 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2019 ..snapshot.clip_point_utf16(end, Bias::Right);
2020
2021 // Expand empty ranges by one character
2022 if range.start == range.end {
2023 range.end.column += 1;
2024 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2025 if range.start == range.end && range.end.column > 0 {
2026 range.start.column -= 1;
2027 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2028 }
2029 }
2030
2031 sanitized_diagnostics.push(DiagnosticEntry {
2032 range,
2033 diagnostic: entry.diagnostic,
2034 });
2035 }
2036 drop(edits_since_save);
2037
2038 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2039 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2040 Ok(())
2041 }
2042
2043 pub fn reload_buffers(
2044 &self,
2045 buffers: HashSet<ModelHandle<Buffer>>,
2046 push_to_history: bool,
2047 cx: &mut ModelContext<Self>,
2048 ) -> Task<Result<ProjectTransaction>> {
2049 let mut local_buffers = Vec::new();
2050 let mut remote_buffers = None;
2051 for buffer_handle in buffers {
2052 let buffer = buffer_handle.read(cx);
2053 if buffer.is_dirty() {
2054 if let Some(file) = File::from_dyn(buffer.file()) {
2055 if file.is_local() {
2056 local_buffers.push(buffer_handle);
2057 } else {
2058 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2059 }
2060 }
2061 }
2062 }
2063
2064 let remote_buffers = self.remote_id().zip(remote_buffers);
2065 let client = self.client.clone();
2066
2067 cx.spawn(|this, mut cx| async move {
2068 let mut project_transaction = ProjectTransaction::default();
2069
2070 if let Some((project_id, remote_buffers)) = remote_buffers {
2071 let response = client
2072 .request(proto::ReloadBuffers {
2073 project_id,
2074 buffer_ids: remote_buffers
2075 .iter()
2076 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2077 .collect(),
2078 })
2079 .await?
2080 .transaction
2081 .ok_or_else(|| anyhow!("missing transaction"))?;
2082 project_transaction = this
2083 .update(&mut cx, |this, cx| {
2084 this.deserialize_project_transaction(response, push_to_history, cx)
2085 })
2086 .await?;
2087 }
2088
2089 for buffer in local_buffers {
2090 let transaction = buffer
2091 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2092 .await?;
2093 buffer.update(&mut cx, |buffer, cx| {
2094 if let Some(transaction) = transaction {
2095 if !push_to_history {
2096 buffer.forget_transaction(transaction.id);
2097 }
2098 project_transaction.0.insert(cx.handle(), transaction);
2099 }
2100 });
2101 }
2102
2103 Ok(project_transaction)
2104 })
2105 }
2106
2107 pub fn format(
2108 &self,
2109 buffers: HashSet<ModelHandle<Buffer>>,
2110 push_to_history: bool,
2111 cx: &mut ModelContext<Project>,
2112 ) -> Task<Result<ProjectTransaction>> {
2113 let mut local_buffers = Vec::new();
2114 let mut remote_buffers = None;
2115 for buffer_handle in buffers {
2116 let buffer = buffer_handle.read(cx);
2117 if let Some(file) = File::from_dyn(buffer.file()) {
2118 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2119 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2120 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2121 }
2122 } else {
2123 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2124 }
2125 } else {
2126 return Task::ready(Ok(Default::default()));
2127 }
2128 }
2129
2130 let remote_buffers = self.remote_id().zip(remote_buffers);
2131 let client = self.client.clone();
2132
2133 cx.spawn(|this, mut cx| async move {
2134 let mut project_transaction = ProjectTransaction::default();
2135
2136 if let Some((project_id, remote_buffers)) = remote_buffers {
2137 let response = client
2138 .request(proto::FormatBuffers {
2139 project_id,
2140 buffer_ids: remote_buffers
2141 .iter()
2142 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2143 .collect(),
2144 })
2145 .await?
2146 .transaction
2147 .ok_or_else(|| anyhow!("missing transaction"))?;
2148 project_transaction = this
2149 .update(&mut cx, |this, cx| {
2150 this.deserialize_project_transaction(response, push_to_history, cx)
2151 })
2152 .await?;
2153 }
2154
2155 for (buffer, buffer_abs_path, language_server) in local_buffers {
2156 let text_document = lsp::TextDocumentIdentifier::new(
2157 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2158 );
2159 let capabilities = &language_server.capabilities();
2160 let tab_size = cx.update(|cx| {
2161 let language_name = buffer.read(cx).language().map(|language| language.name());
2162 cx.global::<Settings>().tab_size(language_name.as_deref())
2163 });
2164 let lsp_edits = if capabilities
2165 .document_formatting_provider
2166 .as_ref()
2167 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2168 {
2169 language_server
2170 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2171 text_document,
2172 options: lsp::FormattingOptions {
2173 tab_size,
2174 insert_spaces: true,
2175 insert_final_newline: Some(true),
2176 ..Default::default()
2177 },
2178 work_done_progress_params: Default::default(),
2179 })
2180 .await?
2181 } else if capabilities
2182 .document_range_formatting_provider
2183 .as_ref()
2184 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2185 {
2186 let buffer_start = lsp::Position::new(0, 0);
2187 let buffer_end =
2188 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2189 language_server
2190 .request::<lsp::request::RangeFormatting>(
2191 lsp::DocumentRangeFormattingParams {
2192 text_document,
2193 range: lsp::Range::new(buffer_start, buffer_end),
2194 options: lsp::FormattingOptions {
2195 tab_size: 4,
2196 insert_spaces: true,
2197 insert_final_newline: Some(true),
2198 ..Default::default()
2199 },
2200 work_done_progress_params: Default::default(),
2201 },
2202 )
2203 .await?
2204 } else {
2205 continue;
2206 };
2207
2208 if let Some(lsp_edits) = lsp_edits {
2209 let edits = this
2210 .update(&mut cx, |this, cx| {
2211 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2212 })
2213 .await?;
2214 buffer.update(&mut cx, |buffer, cx| {
2215 buffer.finalize_last_transaction();
2216 buffer.start_transaction();
2217 for (range, text) in edits {
2218 buffer.edit([range], text, cx);
2219 }
2220 if buffer.end_transaction(cx).is_some() {
2221 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2222 if !push_to_history {
2223 buffer.forget_transaction(transaction.id);
2224 }
2225 project_transaction.0.insert(cx.handle(), transaction);
2226 }
2227 });
2228 }
2229 }
2230
2231 Ok(project_transaction)
2232 })
2233 }
2234
2235 pub fn definition<T: ToPointUtf16>(
2236 &self,
2237 buffer: &ModelHandle<Buffer>,
2238 position: T,
2239 cx: &mut ModelContext<Self>,
2240 ) -> Task<Result<Vec<Location>>> {
2241 let position = position.to_point_utf16(buffer.read(cx));
2242 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2243 }
2244
2245 pub fn references<T: ToPointUtf16>(
2246 &self,
2247 buffer: &ModelHandle<Buffer>,
2248 position: T,
2249 cx: &mut ModelContext<Self>,
2250 ) -> Task<Result<Vec<Location>>> {
2251 let position = position.to_point_utf16(buffer.read(cx));
2252 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2253 }
2254
2255 pub fn document_highlights<T: ToPointUtf16>(
2256 &self,
2257 buffer: &ModelHandle<Buffer>,
2258 position: T,
2259 cx: &mut ModelContext<Self>,
2260 ) -> Task<Result<Vec<DocumentHighlight>>> {
2261 let position = position.to_point_utf16(buffer.read(cx));
2262
2263 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2264 }
2265
2266 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2267 if self.is_local() {
2268 let mut requests = Vec::new();
2269 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2270 let worktree_id = *worktree_id;
2271 if let Some(worktree) = self
2272 .worktree_for_id(worktree_id, cx)
2273 .and_then(|worktree| worktree.read(cx).as_local())
2274 {
2275 let lsp_adapter = lsp_adapter.clone();
2276 let worktree_abs_path = worktree.abs_path().clone();
2277 requests.push(
2278 language_server
2279 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2280 query: query.to_string(),
2281 ..Default::default()
2282 })
2283 .log_err()
2284 .map(move |response| {
2285 (
2286 lsp_adapter,
2287 worktree_id,
2288 worktree_abs_path,
2289 response.unwrap_or_default(),
2290 )
2291 }),
2292 );
2293 }
2294 }
2295
2296 cx.spawn_weak(|this, cx| async move {
2297 let responses = futures::future::join_all(requests).await;
2298 let this = if let Some(this) = this.upgrade(&cx) {
2299 this
2300 } else {
2301 return Ok(Default::default());
2302 };
2303 this.read_with(&cx, |this, cx| {
2304 let mut symbols = Vec::new();
2305 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2306 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2307 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2308 let mut worktree_id = source_worktree_id;
2309 let path;
2310 if let Some((worktree, rel_path)) =
2311 this.find_local_worktree(&abs_path, cx)
2312 {
2313 worktree_id = worktree.read(cx).id();
2314 path = rel_path;
2315 } else {
2316 path = relativize_path(&worktree_abs_path, &abs_path);
2317 }
2318
2319 let label = this
2320 .languages
2321 .select_language(&path)
2322 .and_then(|language| {
2323 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2324 })
2325 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2326 let signature = this.symbol_signature(worktree_id, &path);
2327
2328 Some(Symbol {
2329 source_worktree_id,
2330 worktree_id,
2331 language_server_name: adapter.name(),
2332 name: lsp_symbol.name,
2333 kind: lsp_symbol.kind,
2334 label,
2335 path,
2336 range: range_from_lsp(lsp_symbol.location.range),
2337 signature,
2338 })
2339 }));
2340 }
2341 Ok(symbols)
2342 })
2343 })
2344 } else if let Some(project_id) = self.remote_id() {
2345 let request = self.client.request(proto::GetProjectSymbols {
2346 project_id,
2347 query: query.to_string(),
2348 });
2349 cx.spawn_weak(|this, cx| async move {
2350 let response = request.await?;
2351 let mut symbols = Vec::new();
2352 if let Some(this) = this.upgrade(&cx) {
2353 this.read_with(&cx, |this, _| {
2354 symbols.extend(
2355 response
2356 .symbols
2357 .into_iter()
2358 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2359 );
2360 })
2361 }
2362 Ok(symbols)
2363 })
2364 } else {
2365 Task::ready(Ok(Default::default()))
2366 }
2367 }
2368
2369 pub fn open_buffer_for_symbol(
2370 &mut self,
2371 symbol: &Symbol,
2372 cx: &mut ModelContext<Self>,
2373 ) -> Task<Result<ModelHandle<Buffer>>> {
2374 if self.is_local() {
2375 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2376 symbol.source_worktree_id,
2377 symbol.language_server_name.clone(),
2378 )) {
2379 server.clone()
2380 } else {
2381 return Task::ready(Err(anyhow!(
2382 "language server for worktree and language not found"
2383 )));
2384 };
2385
2386 let worktree_abs_path = if let Some(worktree_abs_path) = self
2387 .worktree_for_id(symbol.worktree_id, cx)
2388 .and_then(|worktree| worktree.read(cx).as_local())
2389 .map(|local_worktree| local_worktree.abs_path())
2390 {
2391 worktree_abs_path
2392 } else {
2393 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2394 };
2395 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2396 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2397 uri
2398 } else {
2399 return Task::ready(Err(anyhow!("invalid symbol path")));
2400 };
2401
2402 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2403 } else if let Some(project_id) = self.remote_id() {
2404 let request = self.client.request(proto::OpenBufferForSymbol {
2405 project_id,
2406 symbol: Some(serialize_symbol(symbol)),
2407 });
2408 cx.spawn(|this, mut cx| async move {
2409 let response = request.await?;
2410 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2411 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2412 .await
2413 })
2414 } else {
2415 Task::ready(Err(anyhow!("project does not have a remote id")))
2416 }
2417 }
2418
2419 pub fn completions<T: ToPointUtf16>(
2420 &self,
2421 source_buffer_handle: &ModelHandle<Buffer>,
2422 position: T,
2423 cx: &mut ModelContext<Self>,
2424 ) -> Task<Result<Vec<Completion>>> {
2425 let source_buffer_handle = source_buffer_handle.clone();
2426 let source_buffer = source_buffer_handle.read(cx);
2427 let buffer_id = source_buffer.remote_id();
2428 let language = source_buffer.language().cloned();
2429 let worktree;
2430 let buffer_abs_path;
2431 if let Some(file) = File::from_dyn(source_buffer.file()) {
2432 worktree = file.worktree.clone();
2433 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2434 } else {
2435 return Task::ready(Ok(Default::default()));
2436 };
2437
2438 let position = position.to_point_utf16(source_buffer);
2439 let anchor = source_buffer.anchor_after(position);
2440
2441 if worktree.read(cx).as_local().is_some() {
2442 let buffer_abs_path = buffer_abs_path.unwrap();
2443 let (_, lang_server) =
2444 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2445 server.clone()
2446 } else {
2447 return Task::ready(Ok(Default::default()));
2448 };
2449
2450 cx.spawn(|_, cx| async move {
2451 let completions = lang_server
2452 .request::<lsp::request::Completion>(lsp::CompletionParams {
2453 text_document_position: lsp::TextDocumentPositionParams::new(
2454 lsp::TextDocumentIdentifier::new(
2455 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2456 ),
2457 point_to_lsp(position),
2458 ),
2459 context: Default::default(),
2460 work_done_progress_params: Default::default(),
2461 partial_result_params: Default::default(),
2462 })
2463 .await
2464 .context("lsp completion request failed")?;
2465
2466 let completions = if let Some(completions) = completions {
2467 match completions {
2468 lsp::CompletionResponse::Array(completions) => completions,
2469 lsp::CompletionResponse::List(list) => list.items,
2470 }
2471 } else {
2472 Default::default()
2473 };
2474
2475 source_buffer_handle.read_with(&cx, |this, _| {
2476 Ok(completions
2477 .into_iter()
2478 .filter_map(|lsp_completion| {
2479 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2480 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2481 (range_from_lsp(edit.range), edit.new_text.clone())
2482 }
2483 None => {
2484 let clipped_position =
2485 this.clip_point_utf16(position, Bias::Left);
2486 if position != clipped_position {
2487 log::info!("completion out of expected range");
2488 return None;
2489 }
2490 (
2491 this.common_prefix_at(
2492 clipped_position,
2493 &lsp_completion.label,
2494 ),
2495 lsp_completion.label.clone(),
2496 )
2497 }
2498 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2499 log::info!("unsupported insert/replace completion");
2500 return None;
2501 }
2502 };
2503
2504 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
2505 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
2506 if clipped_start == old_range.start && clipped_end == old_range.end {
2507 Some(Completion {
2508 old_range: this.anchor_before(old_range.start)
2509 ..this.anchor_after(old_range.end),
2510 new_text,
2511 label: language
2512 .as_ref()
2513 .and_then(|l| l.label_for_completion(&lsp_completion))
2514 .unwrap_or_else(|| {
2515 CodeLabel::plain(
2516 lsp_completion.label.clone(),
2517 lsp_completion.filter_text.as_deref(),
2518 )
2519 }),
2520 lsp_completion,
2521 })
2522 } else {
2523 log::info!("completion out of expected range");
2524 None
2525 }
2526 })
2527 .collect())
2528 })
2529 })
2530 } else if let Some(project_id) = self.remote_id() {
2531 let rpc = self.client.clone();
2532 let message = proto::GetCompletions {
2533 project_id,
2534 buffer_id,
2535 position: Some(language::proto::serialize_anchor(&anchor)),
2536 version: serialize_version(&source_buffer.version()),
2537 };
2538 cx.spawn_weak(|_, mut cx| async move {
2539 let response = rpc.request(message).await?;
2540
2541 source_buffer_handle
2542 .update(&mut cx, |buffer, _| {
2543 buffer.wait_for_version(deserialize_version(response.version))
2544 })
2545 .await;
2546
2547 response
2548 .completions
2549 .into_iter()
2550 .map(|completion| {
2551 language::proto::deserialize_completion(completion, language.as_ref())
2552 })
2553 .collect()
2554 })
2555 } else {
2556 Task::ready(Ok(Default::default()))
2557 }
2558 }
2559
2560 pub fn apply_additional_edits_for_completion(
2561 &self,
2562 buffer_handle: ModelHandle<Buffer>,
2563 completion: Completion,
2564 push_to_history: bool,
2565 cx: &mut ModelContext<Self>,
2566 ) -> Task<Result<Option<Transaction>>> {
2567 let buffer = buffer_handle.read(cx);
2568 let buffer_id = buffer.remote_id();
2569
2570 if self.is_local() {
2571 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2572 {
2573 server.clone()
2574 } else {
2575 return Task::ready(Ok(Default::default()));
2576 };
2577
2578 cx.spawn(|this, mut cx| async move {
2579 let resolved_completion = lang_server
2580 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2581 .await?;
2582 if let Some(edits) = resolved_completion.additional_text_edits {
2583 let edits = this
2584 .update(&mut cx, |this, cx| {
2585 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2586 })
2587 .await?;
2588 buffer_handle.update(&mut cx, |buffer, cx| {
2589 buffer.finalize_last_transaction();
2590 buffer.start_transaction();
2591 for (range, text) in edits {
2592 buffer.edit([range], text, cx);
2593 }
2594 let transaction = if buffer.end_transaction(cx).is_some() {
2595 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2596 if !push_to_history {
2597 buffer.forget_transaction(transaction.id);
2598 }
2599 Some(transaction)
2600 } else {
2601 None
2602 };
2603 Ok(transaction)
2604 })
2605 } else {
2606 Ok(None)
2607 }
2608 })
2609 } else if let Some(project_id) = self.remote_id() {
2610 let client = self.client.clone();
2611 cx.spawn(|_, mut cx| async move {
2612 let response = client
2613 .request(proto::ApplyCompletionAdditionalEdits {
2614 project_id,
2615 buffer_id,
2616 completion: Some(language::proto::serialize_completion(&completion)),
2617 })
2618 .await?;
2619
2620 if let Some(transaction) = response.transaction {
2621 let transaction = language::proto::deserialize_transaction(transaction)?;
2622 buffer_handle
2623 .update(&mut cx, |buffer, _| {
2624 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2625 })
2626 .await;
2627 if push_to_history {
2628 buffer_handle.update(&mut cx, |buffer, _| {
2629 buffer.push_transaction(transaction.clone(), Instant::now());
2630 });
2631 }
2632 Ok(Some(transaction))
2633 } else {
2634 Ok(None)
2635 }
2636 })
2637 } else {
2638 Task::ready(Err(anyhow!("project does not have a remote id")))
2639 }
2640 }
2641
2642 pub fn code_actions<T: Clone + ToOffset>(
2643 &self,
2644 buffer_handle: &ModelHandle<Buffer>,
2645 range: Range<T>,
2646 cx: &mut ModelContext<Self>,
2647 ) -> Task<Result<Vec<CodeAction>>> {
2648 let buffer_handle = buffer_handle.clone();
2649 let buffer = buffer_handle.read(cx);
2650 let snapshot = buffer.snapshot();
2651 let relevant_diagnostics = snapshot
2652 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2653 .map(|entry| entry.to_lsp_diagnostic_stub())
2654 .collect();
2655 let buffer_id = buffer.remote_id();
2656 let worktree;
2657 let buffer_abs_path;
2658 if let Some(file) = File::from_dyn(buffer.file()) {
2659 worktree = file.worktree.clone();
2660 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2661 } else {
2662 return Task::ready(Ok(Default::default()));
2663 };
2664 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2665
2666 if worktree.read(cx).as_local().is_some() {
2667 let buffer_abs_path = buffer_abs_path.unwrap();
2668 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2669 {
2670 server.clone()
2671 } else {
2672 return Task::ready(Ok(Default::default()));
2673 };
2674
2675 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2676 cx.foreground().spawn(async move {
2677 if !lang_server.capabilities().code_action_provider.is_some() {
2678 return Ok(Default::default());
2679 }
2680
2681 Ok(lang_server
2682 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2683 text_document: lsp::TextDocumentIdentifier::new(
2684 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2685 ),
2686 range: lsp_range,
2687 work_done_progress_params: Default::default(),
2688 partial_result_params: Default::default(),
2689 context: lsp::CodeActionContext {
2690 diagnostics: relevant_diagnostics,
2691 only: Some(vec![
2692 lsp::CodeActionKind::QUICKFIX,
2693 lsp::CodeActionKind::REFACTOR,
2694 lsp::CodeActionKind::REFACTOR_EXTRACT,
2695 lsp::CodeActionKind::SOURCE,
2696 ]),
2697 },
2698 })
2699 .await?
2700 .unwrap_or_default()
2701 .into_iter()
2702 .filter_map(|entry| {
2703 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2704 Some(CodeAction {
2705 range: range.clone(),
2706 lsp_action,
2707 })
2708 } else {
2709 None
2710 }
2711 })
2712 .collect())
2713 })
2714 } else if let Some(project_id) = self.remote_id() {
2715 let rpc = self.client.clone();
2716 let version = buffer.version();
2717 cx.spawn_weak(|_, mut cx| async move {
2718 let response = rpc
2719 .request(proto::GetCodeActions {
2720 project_id,
2721 buffer_id,
2722 start: Some(language::proto::serialize_anchor(&range.start)),
2723 end: Some(language::proto::serialize_anchor(&range.end)),
2724 version: serialize_version(&version),
2725 })
2726 .await?;
2727
2728 buffer_handle
2729 .update(&mut cx, |buffer, _| {
2730 buffer.wait_for_version(deserialize_version(response.version))
2731 })
2732 .await;
2733
2734 response
2735 .actions
2736 .into_iter()
2737 .map(language::proto::deserialize_code_action)
2738 .collect()
2739 })
2740 } else {
2741 Task::ready(Ok(Default::default()))
2742 }
2743 }
2744
2745 pub fn apply_code_action(
2746 &self,
2747 buffer_handle: ModelHandle<Buffer>,
2748 mut action: CodeAction,
2749 push_to_history: bool,
2750 cx: &mut ModelContext<Self>,
2751 ) -> Task<Result<ProjectTransaction>> {
2752 if self.is_local() {
2753 let buffer = buffer_handle.read(cx);
2754 let (lsp_adapter, lang_server) =
2755 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2756 server.clone()
2757 } else {
2758 return Task::ready(Ok(Default::default()));
2759 };
2760 let range = action.range.to_point_utf16(buffer);
2761
2762 cx.spawn(|this, mut cx| async move {
2763 if let Some(lsp_range) = action
2764 .lsp_action
2765 .data
2766 .as_mut()
2767 .and_then(|d| d.get_mut("codeActionParams"))
2768 .and_then(|d| d.get_mut("range"))
2769 {
2770 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2771 action.lsp_action = lang_server
2772 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2773 .await?;
2774 } else {
2775 let actions = this
2776 .update(&mut cx, |this, cx| {
2777 this.code_actions(&buffer_handle, action.range, cx)
2778 })
2779 .await?;
2780 action.lsp_action = actions
2781 .into_iter()
2782 .find(|a| a.lsp_action.title == action.lsp_action.title)
2783 .ok_or_else(|| anyhow!("code action is outdated"))?
2784 .lsp_action;
2785 }
2786
2787 if let Some(edit) = action.lsp_action.edit {
2788 Self::deserialize_workspace_edit(
2789 this,
2790 edit,
2791 push_to_history,
2792 lsp_adapter,
2793 lang_server,
2794 &mut cx,
2795 )
2796 .await
2797 } else if let Some(command) = action.lsp_action.command {
2798 this.update(&mut cx, |this, _| {
2799 this.last_workspace_edits_by_language_server
2800 .remove(&lang_server.server_id());
2801 });
2802 lang_server
2803 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2804 command: command.command,
2805 arguments: command.arguments.unwrap_or_default(),
2806 ..Default::default()
2807 })
2808 .await?;
2809 Ok(this.update(&mut cx, |this, _| {
2810 this.last_workspace_edits_by_language_server
2811 .remove(&lang_server.server_id())
2812 .unwrap_or_default()
2813 }))
2814 } else {
2815 Ok(ProjectTransaction::default())
2816 }
2817 })
2818 } else if let Some(project_id) = self.remote_id() {
2819 let client = self.client.clone();
2820 let request = proto::ApplyCodeAction {
2821 project_id,
2822 buffer_id: buffer_handle.read(cx).remote_id(),
2823 action: Some(language::proto::serialize_code_action(&action)),
2824 };
2825 cx.spawn(|this, mut cx| async move {
2826 let response = client
2827 .request(request)
2828 .await?
2829 .transaction
2830 .ok_or_else(|| anyhow!("missing transaction"))?;
2831 this.update(&mut cx, |this, cx| {
2832 this.deserialize_project_transaction(response, push_to_history, cx)
2833 })
2834 .await
2835 })
2836 } else {
2837 Task::ready(Err(anyhow!("project does not have a remote id")))
2838 }
2839 }
2840
2841 async fn deserialize_workspace_edit(
2842 this: ModelHandle<Self>,
2843 edit: lsp::WorkspaceEdit,
2844 push_to_history: bool,
2845 lsp_adapter: Arc<dyn LspAdapter>,
2846 language_server: Arc<LanguageServer>,
2847 cx: &mut AsyncAppContext,
2848 ) -> Result<ProjectTransaction> {
2849 let fs = this.read_with(cx, |this, _| this.fs.clone());
2850 let mut operations = Vec::new();
2851 if let Some(document_changes) = edit.document_changes {
2852 match document_changes {
2853 lsp::DocumentChanges::Edits(edits) => {
2854 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2855 }
2856 lsp::DocumentChanges::Operations(ops) => operations = ops,
2857 }
2858 } else if let Some(changes) = edit.changes {
2859 operations.extend(changes.into_iter().map(|(uri, edits)| {
2860 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2861 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2862 uri,
2863 version: None,
2864 },
2865 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2866 })
2867 }));
2868 }
2869
2870 let mut project_transaction = ProjectTransaction::default();
2871 for operation in operations {
2872 match operation {
2873 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2874 let abs_path = op
2875 .uri
2876 .to_file_path()
2877 .map_err(|_| anyhow!("can't convert URI to path"))?;
2878
2879 if let Some(parent_path) = abs_path.parent() {
2880 fs.create_dir(parent_path).await?;
2881 }
2882 if abs_path.ends_with("/") {
2883 fs.create_dir(&abs_path).await?;
2884 } else {
2885 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2886 .await?;
2887 }
2888 }
2889 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2890 let source_abs_path = op
2891 .old_uri
2892 .to_file_path()
2893 .map_err(|_| anyhow!("can't convert URI to path"))?;
2894 let target_abs_path = op
2895 .new_uri
2896 .to_file_path()
2897 .map_err(|_| anyhow!("can't convert URI to path"))?;
2898 fs.rename(
2899 &source_abs_path,
2900 &target_abs_path,
2901 op.options.map(Into::into).unwrap_or_default(),
2902 )
2903 .await?;
2904 }
2905 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2906 let abs_path = op
2907 .uri
2908 .to_file_path()
2909 .map_err(|_| anyhow!("can't convert URI to path"))?;
2910 let options = op.options.map(Into::into).unwrap_or_default();
2911 if abs_path.ends_with("/") {
2912 fs.remove_dir(&abs_path, options).await?;
2913 } else {
2914 fs.remove_file(&abs_path, options).await?;
2915 }
2916 }
2917 lsp::DocumentChangeOperation::Edit(op) => {
2918 let buffer_to_edit = this
2919 .update(cx, |this, cx| {
2920 this.open_local_buffer_via_lsp(
2921 op.text_document.uri,
2922 lsp_adapter.clone(),
2923 language_server.clone(),
2924 cx,
2925 )
2926 })
2927 .await?;
2928
2929 let edits = this
2930 .update(cx, |this, cx| {
2931 let edits = op.edits.into_iter().map(|edit| match edit {
2932 lsp::OneOf::Left(edit) => edit,
2933 lsp::OneOf::Right(edit) => edit.text_edit,
2934 });
2935 this.edits_from_lsp(
2936 &buffer_to_edit,
2937 edits,
2938 op.text_document.version,
2939 cx,
2940 )
2941 })
2942 .await?;
2943
2944 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2945 buffer.finalize_last_transaction();
2946 buffer.start_transaction();
2947 for (range, text) in edits {
2948 buffer.edit([range], text, cx);
2949 }
2950 let transaction = if buffer.end_transaction(cx).is_some() {
2951 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2952 if !push_to_history {
2953 buffer.forget_transaction(transaction.id);
2954 }
2955 Some(transaction)
2956 } else {
2957 None
2958 };
2959
2960 transaction
2961 });
2962 if let Some(transaction) = transaction {
2963 project_transaction.0.insert(buffer_to_edit, transaction);
2964 }
2965 }
2966 }
2967 }
2968
2969 Ok(project_transaction)
2970 }
2971
2972 pub fn prepare_rename<T: ToPointUtf16>(
2973 &self,
2974 buffer: ModelHandle<Buffer>,
2975 position: T,
2976 cx: &mut ModelContext<Self>,
2977 ) -> Task<Result<Option<Range<Anchor>>>> {
2978 let position = position.to_point_utf16(buffer.read(cx));
2979 self.request_lsp(buffer, PrepareRename { position }, cx)
2980 }
2981
2982 pub fn perform_rename<T: ToPointUtf16>(
2983 &self,
2984 buffer: ModelHandle<Buffer>,
2985 position: T,
2986 new_name: String,
2987 push_to_history: bool,
2988 cx: &mut ModelContext<Self>,
2989 ) -> Task<Result<ProjectTransaction>> {
2990 let position = position.to_point_utf16(buffer.read(cx));
2991 self.request_lsp(
2992 buffer,
2993 PerformRename {
2994 position,
2995 new_name,
2996 push_to_history,
2997 },
2998 cx,
2999 )
3000 }
3001
3002 pub fn search(
3003 &self,
3004 query: SearchQuery,
3005 cx: &mut ModelContext<Self>,
3006 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3007 if self.is_local() {
3008 let snapshots = self
3009 .visible_worktrees(cx)
3010 .filter_map(|tree| {
3011 let tree = tree.read(cx).as_local()?;
3012 Some(tree.snapshot())
3013 })
3014 .collect::<Vec<_>>();
3015
3016 let background = cx.background().clone();
3017 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3018 if path_count == 0 {
3019 return Task::ready(Ok(Default::default()));
3020 }
3021 let workers = background.num_cpus().min(path_count);
3022 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3023 cx.background()
3024 .spawn({
3025 let fs = self.fs.clone();
3026 let background = cx.background().clone();
3027 let query = query.clone();
3028 async move {
3029 let fs = &fs;
3030 let query = &query;
3031 let matching_paths_tx = &matching_paths_tx;
3032 let paths_per_worker = (path_count + workers - 1) / workers;
3033 let snapshots = &snapshots;
3034 background
3035 .scoped(|scope| {
3036 for worker_ix in 0..workers {
3037 let worker_start_ix = worker_ix * paths_per_worker;
3038 let worker_end_ix = worker_start_ix + paths_per_worker;
3039 scope.spawn(async move {
3040 let mut snapshot_start_ix = 0;
3041 let mut abs_path = PathBuf::new();
3042 for snapshot in snapshots {
3043 let snapshot_end_ix =
3044 snapshot_start_ix + snapshot.visible_file_count();
3045 if worker_end_ix <= snapshot_start_ix {
3046 break;
3047 } else if worker_start_ix > snapshot_end_ix {
3048 snapshot_start_ix = snapshot_end_ix;
3049 continue;
3050 } else {
3051 let start_in_snapshot = worker_start_ix
3052 .saturating_sub(snapshot_start_ix);
3053 let end_in_snapshot =
3054 cmp::min(worker_end_ix, snapshot_end_ix)
3055 - snapshot_start_ix;
3056
3057 for entry in snapshot
3058 .files(false, start_in_snapshot)
3059 .take(end_in_snapshot - start_in_snapshot)
3060 {
3061 if matching_paths_tx.is_closed() {
3062 break;
3063 }
3064
3065 abs_path.clear();
3066 abs_path.push(&snapshot.abs_path());
3067 abs_path.push(&entry.path);
3068 let matches = if let Some(file) =
3069 fs.open_sync(&abs_path).await.log_err()
3070 {
3071 query.detect(file).unwrap_or(false)
3072 } else {
3073 false
3074 };
3075
3076 if matches {
3077 let project_path =
3078 (snapshot.id(), entry.path.clone());
3079 if matching_paths_tx
3080 .send(project_path)
3081 .await
3082 .is_err()
3083 {
3084 break;
3085 }
3086 }
3087 }
3088
3089 snapshot_start_ix = snapshot_end_ix;
3090 }
3091 }
3092 });
3093 }
3094 })
3095 .await;
3096 }
3097 })
3098 .detach();
3099
3100 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3101 let open_buffers = self
3102 .opened_buffers
3103 .values()
3104 .filter_map(|b| b.upgrade(cx))
3105 .collect::<HashSet<_>>();
3106 cx.spawn(|this, cx| async move {
3107 for buffer in &open_buffers {
3108 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3109 buffers_tx.send((buffer.clone(), snapshot)).await?;
3110 }
3111
3112 let open_buffers = Rc::new(RefCell::new(open_buffers));
3113 while let Some(project_path) = matching_paths_rx.next().await {
3114 if buffers_tx.is_closed() {
3115 break;
3116 }
3117
3118 let this = this.clone();
3119 let open_buffers = open_buffers.clone();
3120 let buffers_tx = buffers_tx.clone();
3121 cx.spawn(|mut cx| async move {
3122 if let Some(buffer) = this
3123 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3124 .await
3125 .log_err()
3126 {
3127 if open_buffers.borrow_mut().insert(buffer.clone()) {
3128 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3129 buffers_tx.send((buffer, snapshot)).await?;
3130 }
3131 }
3132
3133 Ok::<_, anyhow::Error>(())
3134 })
3135 .detach();
3136 }
3137
3138 Ok::<_, anyhow::Error>(())
3139 })
3140 .detach_and_log_err(cx);
3141
3142 let background = cx.background().clone();
3143 cx.background().spawn(async move {
3144 let query = &query;
3145 let mut matched_buffers = Vec::new();
3146 for _ in 0..workers {
3147 matched_buffers.push(HashMap::default());
3148 }
3149 background
3150 .scoped(|scope| {
3151 for worker_matched_buffers in matched_buffers.iter_mut() {
3152 let mut buffers_rx = buffers_rx.clone();
3153 scope.spawn(async move {
3154 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3155 let buffer_matches = query
3156 .search(snapshot.as_rope())
3157 .await
3158 .iter()
3159 .map(|range| {
3160 snapshot.anchor_before(range.start)
3161 ..snapshot.anchor_after(range.end)
3162 })
3163 .collect::<Vec<_>>();
3164 if !buffer_matches.is_empty() {
3165 worker_matched_buffers
3166 .insert(buffer.clone(), buffer_matches);
3167 }
3168 }
3169 });
3170 }
3171 })
3172 .await;
3173 Ok(matched_buffers.into_iter().flatten().collect())
3174 })
3175 } else if let Some(project_id) = self.remote_id() {
3176 let request = self.client.request(query.to_proto(project_id));
3177 cx.spawn(|this, mut cx| async move {
3178 let response = request.await?;
3179 let mut result = HashMap::default();
3180 for location in response.locations {
3181 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3182 let target_buffer = this
3183 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3184 .await?;
3185 let start = location
3186 .start
3187 .and_then(deserialize_anchor)
3188 .ok_or_else(|| anyhow!("missing target start"))?;
3189 let end = location
3190 .end
3191 .and_then(deserialize_anchor)
3192 .ok_or_else(|| anyhow!("missing target end"))?;
3193 result
3194 .entry(target_buffer)
3195 .or_insert(Vec::new())
3196 .push(start..end)
3197 }
3198 Ok(result)
3199 })
3200 } else {
3201 Task::ready(Ok(Default::default()))
3202 }
3203 }
3204
3205 fn request_lsp<R: LspCommand>(
3206 &self,
3207 buffer_handle: ModelHandle<Buffer>,
3208 request: R,
3209 cx: &mut ModelContext<Self>,
3210 ) -> Task<Result<R::Response>>
3211 where
3212 <R::LspRequest as lsp::request::Request>::Result: Send,
3213 {
3214 let buffer = buffer_handle.read(cx);
3215 if self.is_local() {
3216 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3217 if let Some((file, (_, language_server))) =
3218 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3219 {
3220 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3221 return cx.spawn(|this, cx| async move {
3222 if !request.check_capabilities(&language_server.capabilities()) {
3223 return Ok(Default::default());
3224 }
3225
3226 let response = language_server
3227 .request::<R::LspRequest>(lsp_params)
3228 .await
3229 .context("lsp request failed")?;
3230 request
3231 .response_from_lsp(response, this, buffer_handle, cx)
3232 .await
3233 });
3234 }
3235 } else if let Some(project_id) = self.remote_id() {
3236 let rpc = self.client.clone();
3237 let message = request.to_proto(project_id, buffer);
3238 return cx.spawn(|this, cx| async move {
3239 let response = rpc.request(message).await?;
3240 request
3241 .response_from_proto(response, this, buffer_handle, cx)
3242 .await
3243 });
3244 }
3245 Task::ready(Ok(Default::default()))
3246 }
3247
3248 pub fn find_or_create_local_worktree(
3249 &mut self,
3250 abs_path: impl AsRef<Path>,
3251 visible: bool,
3252 cx: &mut ModelContext<Self>,
3253 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3254 let abs_path = abs_path.as_ref();
3255 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3256 Task::ready(Ok((tree.clone(), relative_path.into())))
3257 } else {
3258 let worktree = self.create_local_worktree(abs_path, visible, cx);
3259 cx.foreground()
3260 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3261 }
3262 }
3263
3264 pub fn find_local_worktree(
3265 &self,
3266 abs_path: &Path,
3267 cx: &AppContext,
3268 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3269 for tree in self.worktrees(cx) {
3270 if let Some(relative_path) = tree
3271 .read(cx)
3272 .as_local()
3273 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3274 {
3275 return Some((tree.clone(), relative_path.into()));
3276 }
3277 }
3278 None
3279 }
3280
3281 pub fn is_shared(&self) -> bool {
3282 match &self.client_state {
3283 ProjectClientState::Local { is_shared, .. } => *is_shared,
3284 ProjectClientState::Remote { .. } => false,
3285 }
3286 }
3287
3288 fn create_local_worktree(
3289 &mut self,
3290 abs_path: impl AsRef<Path>,
3291 visible: bool,
3292 cx: &mut ModelContext<Self>,
3293 ) -> Task<Result<ModelHandle<Worktree>>> {
3294 let fs = self.fs.clone();
3295 let client = self.client.clone();
3296 let next_entry_id = self.next_entry_id.clone();
3297 let path: Arc<Path> = abs_path.as_ref().into();
3298 let task = self
3299 .loading_local_worktrees
3300 .entry(path.clone())
3301 .or_insert_with(|| {
3302 cx.spawn(|project, mut cx| {
3303 async move {
3304 let worktree = Worktree::local(
3305 client.clone(),
3306 path.clone(),
3307 visible,
3308 fs,
3309 next_entry_id,
3310 &mut cx,
3311 )
3312 .await;
3313 project.update(&mut cx, |project, _| {
3314 project.loading_local_worktrees.remove(&path);
3315 });
3316 let worktree = worktree?;
3317
3318 let (remote_project_id, is_shared) =
3319 project.update(&mut cx, |project, cx| {
3320 project.add_worktree(&worktree, cx);
3321 (project.remote_id(), project.is_shared())
3322 });
3323
3324 if let Some(project_id) = remote_project_id {
3325 if is_shared {
3326 worktree
3327 .update(&mut cx, |worktree, cx| {
3328 worktree.as_local_mut().unwrap().share(project_id, cx)
3329 })
3330 .await?;
3331 } else {
3332 worktree
3333 .update(&mut cx, |worktree, cx| {
3334 worktree.as_local_mut().unwrap().register(project_id, cx)
3335 })
3336 .await?;
3337 }
3338 }
3339
3340 Ok(worktree)
3341 }
3342 .map_err(|err| Arc::new(err))
3343 })
3344 .shared()
3345 })
3346 .clone();
3347 cx.foreground().spawn(async move {
3348 match task.await {
3349 Ok(worktree) => Ok(worktree),
3350 Err(err) => Err(anyhow!("{}", err)),
3351 }
3352 })
3353 }
3354
3355 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3356 self.worktrees.retain(|worktree| {
3357 worktree
3358 .upgrade(cx)
3359 .map_or(false, |w| w.read(cx).id() != id)
3360 });
3361 cx.notify();
3362 }
3363
3364 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3365 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3366 if worktree.read(cx).is_local() {
3367 cx.subscribe(&worktree, |this, worktree, _, cx| {
3368 this.update_local_worktree_buffers(worktree, cx);
3369 })
3370 .detach();
3371 }
3372
3373 let push_strong_handle = {
3374 let worktree = worktree.read(cx);
3375 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3376 };
3377 if push_strong_handle {
3378 self.worktrees
3379 .push(WorktreeHandle::Strong(worktree.clone()));
3380 } else {
3381 cx.observe_release(&worktree, |this, _, cx| {
3382 this.worktrees
3383 .retain(|worktree| worktree.upgrade(cx).is_some());
3384 cx.notify();
3385 })
3386 .detach();
3387 self.worktrees
3388 .push(WorktreeHandle::Weak(worktree.downgrade()));
3389 }
3390 cx.notify();
3391 }
3392
3393 fn update_local_worktree_buffers(
3394 &mut self,
3395 worktree_handle: ModelHandle<Worktree>,
3396 cx: &mut ModelContext<Self>,
3397 ) {
3398 let snapshot = worktree_handle.read(cx).snapshot();
3399 let mut buffers_to_delete = Vec::new();
3400 let mut renamed_buffers = Vec::new();
3401 for (buffer_id, buffer) in &self.opened_buffers {
3402 if let Some(buffer) = buffer.upgrade(cx) {
3403 buffer.update(cx, |buffer, cx| {
3404 if let Some(old_file) = File::from_dyn(buffer.file()) {
3405 if old_file.worktree != worktree_handle {
3406 return;
3407 }
3408
3409 let new_file = if let Some(entry) = old_file
3410 .entry_id
3411 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3412 {
3413 File {
3414 is_local: true,
3415 entry_id: Some(entry.id),
3416 mtime: entry.mtime,
3417 path: entry.path.clone(),
3418 worktree: worktree_handle.clone(),
3419 }
3420 } else if let Some(entry) =
3421 snapshot.entry_for_path(old_file.path().as_ref())
3422 {
3423 File {
3424 is_local: true,
3425 entry_id: Some(entry.id),
3426 mtime: entry.mtime,
3427 path: entry.path.clone(),
3428 worktree: worktree_handle.clone(),
3429 }
3430 } else {
3431 File {
3432 is_local: true,
3433 entry_id: None,
3434 path: old_file.path().clone(),
3435 mtime: old_file.mtime(),
3436 worktree: worktree_handle.clone(),
3437 }
3438 };
3439
3440 let old_path = old_file.abs_path(cx);
3441 if new_file.abs_path(cx) != old_path {
3442 renamed_buffers.push((cx.handle(), old_path));
3443 }
3444
3445 if let Some(project_id) = self.remote_id() {
3446 self.client
3447 .send(proto::UpdateBufferFile {
3448 project_id,
3449 buffer_id: *buffer_id as u64,
3450 file: Some(new_file.to_proto()),
3451 })
3452 .log_err();
3453 }
3454 buffer.file_updated(Box::new(new_file), cx).detach();
3455 }
3456 });
3457 } else {
3458 buffers_to_delete.push(*buffer_id);
3459 }
3460 }
3461
3462 for buffer_id in buffers_to_delete {
3463 self.opened_buffers.remove(&buffer_id);
3464 }
3465
3466 for (buffer, old_path) in renamed_buffers {
3467 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3468 self.assign_language_to_buffer(&buffer, cx);
3469 self.register_buffer_with_language_server(&buffer, cx);
3470 }
3471 }
3472
3473 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3474 let new_active_entry = entry.and_then(|project_path| {
3475 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3476 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3477 Some(entry.id)
3478 });
3479 if new_active_entry != self.active_entry {
3480 self.active_entry = new_active_entry;
3481 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3482 }
3483 }
3484
3485 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3486 self.language_server_statuses
3487 .values()
3488 .any(|status| status.pending_diagnostic_updates > 0)
3489 }
3490
3491 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3492 let mut summary = DiagnosticSummary::default();
3493 for (_, path_summary) in self.diagnostic_summaries(cx) {
3494 summary.error_count += path_summary.error_count;
3495 summary.warning_count += path_summary.warning_count;
3496 }
3497 summary
3498 }
3499
3500 pub fn diagnostic_summaries<'a>(
3501 &'a self,
3502 cx: &'a AppContext,
3503 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3504 self.worktrees(cx).flat_map(move |worktree| {
3505 let worktree = worktree.read(cx);
3506 let worktree_id = worktree.id();
3507 worktree
3508 .diagnostic_summaries()
3509 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3510 })
3511 }
3512
3513 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3514 if self
3515 .language_server_statuses
3516 .values()
3517 .map(|status| status.pending_diagnostic_updates)
3518 .sum::<isize>()
3519 == 1
3520 {
3521 cx.emit(Event::DiskBasedDiagnosticsStarted);
3522 }
3523 }
3524
3525 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3526 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3527 if self
3528 .language_server_statuses
3529 .values()
3530 .map(|status| status.pending_diagnostic_updates)
3531 .sum::<isize>()
3532 == 0
3533 {
3534 cx.emit(Event::DiskBasedDiagnosticsFinished);
3535 }
3536 }
3537
3538 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3539 self.active_entry
3540 }
3541
3542 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3543 self.worktree_for_id(path.worktree_id, cx)?
3544 .read(cx)
3545 .entry_for_path(&path.path)
3546 .map(|entry| entry.id)
3547 }
3548
3549 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3550 let worktree = self.worktree_for_entry(entry_id, cx)?;
3551 let worktree = worktree.read(cx);
3552 let worktree_id = worktree.id();
3553 let path = worktree.entry_for_id(entry_id)?.path.clone();
3554 Some(ProjectPath { worktree_id, path })
3555 }
3556
3557 // RPC message handlers
3558
3559 async fn handle_unshare_project(
3560 this: ModelHandle<Self>,
3561 _: TypedEnvelope<proto::UnshareProject>,
3562 _: Arc<Client>,
3563 mut cx: AsyncAppContext,
3564 ) -> Result<()> {
3565 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3566 Ok(())
3567 }
3568
3569 async fn handle_add_collaborator(
3570 this: ModelHandle<Self>,
3571 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3572 _: Arc<Client>,
3573 mut cx: AsyncAppContext,
3574 ) -> Result<()> {
3575 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3576 let collaborator = envelope
3577 .payload
3578 .collaborator
3579 .take()
3580 .ok_or_else(|| anyhow!("empty collaborator"))?;
3581
3582 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3583 this.update(&mut cx, |this, cx| {
3584 this.collaborators
3585 .insert(collaborator.peer_id, collaborator);
3586 cx.notify();
3587 });
3588
3589 Ok(())
3590 }
3591
3592 async fn handle_remove_collaborator(
3593 this: ModelHandle<Self>,
3594 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3595 _: Arc<Client>,
3596 mut cx: AsyncAppContext,
3597 ) -> Result<()> {
3598 this.update(&mut cx, |this, cx| {
3599 let peer_id = PeerId(envelope.payload.peer_id);
3600 let replica_id = this
3601 .collaborators
3602 .remove(&peer_id)
3603 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3604 .replica_id;
3605 for (_, buffer) in &this.opened_buffers {
3606 if let Some(buffer) = buffer.upgrade(cx) {
3607 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3608 }
3609 }
3610 cx.emit(Event::CollaboratorLeft(peer_id));
3611 cx.notify();
3612 Ok(())
3613 })
3614 }
3615
3616 async fn handle_register_worktree(
3617 this: ModelHandle<Self>,
3618 envelope: TypedEnvelope<proto::RegisterWorktree>,
3619 client: Arc<Client>,
3620 mut cx: AsyncAppContext,
3621 ) -> Result<()> {
3622 this.update(&mut cx, |this, cx| {
3623 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3624 let replica_id = this.replica_id();
3625 let worktree = proto::Worktree {
3626 id: envelope.payload.worktree_id,
3627 root_name: envelope.payload.root_name,
3628 entries: Default::default(),
3629 diagnostic_summaries: Default::default(),
3630 visible: envelope.payload.visible,
3631 };
3632 let (worktree, load_task) =
3633 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3634 this.add_worktree(&worktree, cx);
3635 load_task.detach();
3636 Ok(())
3637 })
3638 }
3639
3640 async fn handle_unregister_worktree(
3641 this: ModelHandle<Self>,
3642 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3643 _: Arc<Client>,
3644 mut cx: AsyncAppContext,
3645 ) -> Result<()> {
3646 this.update(&mut cx, |this, cx| {
3647 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3648 this.remove_worktree(worktree_id, cx);
3649 Ok(())
3650 })
3651 }
3652
3653 async fn handle_update_worktree(
3654 this: ModelHandle<Self>,
3655 envelope: TypedEnvelope<proto::UpdateWorktree>,
3656 _: Arc<Client>,
3657 mut cx: AsyncAppContext,
3658 ) -> Result<()> {
3659 this.update(&mut cx, |this, cx| {
3660 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3661 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3662 worktree.update(cx, |worktree, _| {
3663 let worktree = worktree.as_remote_mut().unwrap();
3664 worktree.update_from_remote(envelope)
3665 })?;
3666 }
3667 Ok(())
3668 })
3669 }
3670
3671 async fn handle_update_diagnostic_summary(
3672 this: ModelHandle<Self>,
3673 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3674 _: Arc<Client>,
3675 mut cx: AsyncAppContext,
3676 ) -> Result<()> {
3677 this.update(&mut cx, |this, cx| {
3678 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3679 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3680 if let Some(summary) = envelope.payload.summary {
3681 let project_path = ProjectPath {
3682 worktree_id,
3683 path: Path::new(&summary.path).into(),
3684 };
3685 worktree.update(cx, |worktree, _| {
3686 worktree
3687 .as_remote_mut()
3688 .unwrap()
3689 .update_diagnostic_summary(project_path.path.clone(), &summary);
3690 });
3691 cx.emit(Event::DiagnosticsUpdated(project_path));
3692 }
3693 }
3694 Ok(())
3695 })
3696 }
3697
3698 async fn handle_start_language_server(
3699 this: ModelHandle<Self>,
3700 envelope: TypedEnvelope<proto::StartLanguageServer>,
3701 _: Arc<Client>,
3702 mut cx: AsyncAppContext,
3703 ) -> Result<()> {
3704 let server = envelope
3705 .payload
3706 .server
3707 .ok_or_else(|| anyhow!("invalid server"))?;
3708 this.update(&mut cx, |this, cx| {
3709 this.language_server_statuses.insert(
3710 server.id as usize,
3711 LanguageServerStatus {
3712 name: server.name,
3713 pending_work: Default::default(),
3714 pending_diagnostic_updates: 0,
3715 },
3716 );
3717 cx.notify();
3718 });
3719 Ok(())
3720 }
3721
3722 async fn handle_update_language_server(
3723 this: ModelHandle<Self>,
3724 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3725 _: Arc<Client>,
3726 mut cx: AsyncAppContext,
3727 ) -> Result<()> {
3728 let language_server_id = envelope.payload.language_server_id as usize;
3729 match envelope
3730 .payload
3731 .variant
3732 .ok_or_else(|| anyhow!("invalid variant"))?
3733 {
3734 proto::update_language_server::Variant::WorkStart(payload) => {
3735 this.update(&mut cx, |this, cx| {
3736 this.on_lsp_work_start(language_server_id, payload.token, cx);
3737 })
3738 }
3739 proto::update_language_server::Variant::WorkProgress(payload) => {
3740 this.update(&mut cx, |this, cx| {
3741 this.on_lsp_work_progress(
3742 language_server_id,
3743 payload.token,
3744 LanguageServerProgress {
3745 message: payload.message,
3746 percentage: payload.percentage.map(|p| p as usize),
3747 last_update_at: Instant::now(),
3748 },
3749 cx,
3750 );
3751 })
3752 }
3753 proto::update_language_server::Variant::WorkEnd(payload) => {
3754 this.update(&mut cx, |this, cx| {
3755 this.on_lsp_work_end(language_server_id, payload.token, cx);
3756 })
3757 }
3758 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3759 this.update(&mut cx, |this, cx| {
3760 this.disk_based_diagnostics_started(cx);
3761 })
3762 }
3763 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3764 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3765 }
3766 }
3767
3768 Ok(())
3769 }
3770
3771 async fn handle_update_buffer(
3772 this: ModelHandle<Self>,
3773 envelope: TypedEnvelope<proto::UpdateBuffer>,
3774 _: Arc<Client>,
3775 mut cx: AsyncAppContext,
3776 ) -> Result<()> {
3777 this.update(&mut cx, |this, cx| {
3778 let payload = envelope.payload.clone();
3779 let buffer_id = payload.buffer_id;
3780 let ops = payload
3781 .operations
3782 .into_iter()
3783 .map(|op| language::proto::deserialize_operation(op))
3784 .collect::<Result<Vec<_>, _>>()?;
3785 match this.opened_buffers.entry(buffer_id) {
3786 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3787 OpenBuffer::Strong(buffer) => {
3788 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3789 }
3790 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3791 OpenBuffer::Weak(_) => {}
3792 },
3793 hash_map::Entry::Vacant(e) => {
3794 e.insert(OpenBuffer::Loading(ops));
3795 }
3796 }
3797 Ok(())
3798 })
3799 }
3800
3801 async fn handle_update_buffer_file(
3802 this: ModelHandle<Self>,
3803 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3804 _: Arc<Client>,
3805 mut cx: AsyncAppContext,
3806 ) -> Result<()> {
3807 this.update(&mut cx, |this, cx| {
3808 let payload = envelope.payload.clone();
3809 let buffer_id = payload.buffer_id;
3810 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3811 let worktree = this
3812 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3813 .ok_or_else(|| anyhow!("no such worktree"))?;
3814 let file = File::from_proto(file, worktree.clone(), cx)?;
3815 let buffer = this
3816 .opened_buffers
3817 .get_mut(&buffer_id)
3818 .and_then(|b| b.upgrade(cx))
3819 .ok_or_else(|| anyhow!("no such buffer"))?;
3820 buffer.update(cx, |buffer, cx| {
3821 buffer.file_updated(Box::new(file), cx).detach();
3822 });
3823 Ok(())
3824 })
3825 }
3826
3827 async fn handle_save_buffer(
3828 this: ModelHandle<Self>,
3829 envelope: TypedEnvelope<proto::SaveBuffer>,
3830 _: Arc<Client>,
3831 mut cx: AsyncAppContext,
3832 ) -> Result<proto::BufferSaved> {
3833 let buffer_id = envelope.payload.buffer_id;
3834 let requested_version = deserialize_version(envelope.payload.version);
3835
3836 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3837 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3838 let buffer = this
3839 .opened_buffers
3840 .get(&buffer_id)
3841 .and_then(|buffer| buffer.upgrade(cx))
3842 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3843 Ok::<_, anyhow::Error>((project_id, buffer))
3844 })?;
3845 buffer
3846 .update(&mut cx, |buffer, _| {
3847 buffer.wait_for_version(requested_version)
3848 })
3849 .await;
3850
3851 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3852 Ok(proto::BufferSaved {
3853 project_id,
3854 buffer_id,
3855 version: serialize_version(&saved_version),
3856 mtime: Some(mtime.into()),
3857 })
3858 }
3859
3860 async fn handle_reload_buffers(
3861 this: ModelHandle<Self>,
3862 envelope: TypedEnvelope<proto::ReloadBuffers>,
3863 _: Arc<Client>,
3864 mut cx: AsyncAppContext,
3865 ) -> Result<proto::ReloadBuffersResponse> {
3866 let sender_id = envelope.original_sender_id()?;
3867 let reload = this.update(&mut cx, |this, cx| {
3868 let mut buffers = HashSet::default();
3869 for buffer_id in &envelope.payload.buffer_ids {
3870 buffers.insert(
3871 this.opened_buffers
3872 .get(buffer_id)
3873 .and_then(|buffer| buffer.upgrade(cx))
3874 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3875 );
3876 }
3877 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3878 })?;
3879
3880 let project_transaction = reload.await?;
3881 let project_transaction = this.update(&mut cx, |this, cx| {
3882 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3883 });
3884 Ok(proto::ReloadBuffersResponse {
3885 transaction: Some(project_transaction),
3886 })
3887 }
3888
3889 async fn handle_format_buffers(
3890 this: ModelHandle<Self>,
3891 envelope: TypedEnvelope<proto::FormatBuffers>,
3892 _: Arc<Client>,
3893 mut cx: AsyncAppContext,
3894 ) -> Result<proto::FormatBuffersResponse> {
3895 let sender_id = envelope.original_sender_id()?;
3896 let format = this.update(&mut cx, |this, cx| {
3897 let mut buffers = HashSet::default();
3898 for buffer_id in &envelope.payload.buffer_ids {
3899 buffers.insert(
3900 this.opened_buffers
3901 .get(buffer_id)
3902 .and_then(|buffer| buffer.upgrade(cx))
3903 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3904 );
3905 }
3906 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3907 })?;
3908
3909 let project_transaction = format.await?;
3910 let project_transaction = this.update(&mut cx, |this, cx| {
3911 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3912 });
3913 Ok(proto::FormatBuffersResponse {
3914 transaction: Some(project_transaction),
3915 })
3916 }
3917
3918 async fn handle_get_completions(
3919 this: ModelHandle<Self>,
3920 envelope: TypedEnvelope<proto::GetCompletions>,
3921 _: Arc<Client>,
3922 mut cx: AsyncAppContext,
3923 ) -> Result<proto::GetCompletionsResponse> {
3924 let position = envelope
3925 .payload
3926 .position
3927 .and_then(language::proto::deserialize_anchor)
3928 .ok_or_else(|| anyhow!("invalid position"))?;
3929 let version = deserialize_version(envelope.payload.version);
3930 let buffer = this.read_with(&cx, |this, cx| {
3931 this.opened_buffers
3932 .get(&envelope.payload.buffer_id)
3933 .and_then(|buffer| buffer.upgrade(cx))
3934 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3935 })?;
3936 buffer
3937 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3938 .await;
3939 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3940 let completions = this
3941 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3942 .await?;
3943
3944 Ok(proto::GetCompletionsResponse {
3945 completions: completions
3946 .iter()
3947 .map(language::proto::serialize_completion)
3948 .collect(),
3949 version: serialize_version(&version),
3950 })
3951 }
3952
3953 async fn handle_apply_additional_edits_for_completion(
3954 this: ModelHandle<Self>,
3955 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3956 _: Arc<Client>,
3957 mut cx: AsyncAppContext,
3958 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3959 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3960 let buffer = this
3961 .opened_buffers
3962 .get(&envelope.payload.buffer_id)
3963 .and_then(|buffer| buffer.upgrade(cx))
3964 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3965 let language = buffer.read(cx).language();
3966 let completion = language::proto::deserialize_completion(
3967 envelope
3968 .payload
3969 .completion
3970 .ok_or_else(|| anyhow!("invalid completion"))?,
3971 language,
3972 )?;
3973 Ok::<_, anyhow::Error>(
3974 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3975 )
3976 })?;
3977
3978 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3979 transaction: apply_additional_edits
3980 .await?
3981 .as_ref()
3982 .map(language::proto::serialize_transaction),
3983 })
3984 }
3985
3986 async fn handle_get_code_actions(
3987 this: ModelHandle<Self>,
3988 envelope: TypedEnvelope<proto::GetCodeActions>,
3989 _: Arc<Client>,
3990 mut cx: AsyncAppContext,
3991 ) -> Result<proto::GetCodeActionsResponse> {
3992 let start = envelope
3993 .payload
3994 .start
3995 .and_then(language::proto::deserialize_anchor)
3996 .ok_or_else(|| anyhow!("invalid start"))?;
3997 let end = envelope
3998 .payload
3999 .end
4000 .and_then(language::proto::deserialize_anchor)
4001 .ok_or_else(|| anyhow!("invalid end"))?;
4002 let buffer = this.update(&mut cx, |this, cx| {
4003 this.opened_buffers
4004 .get(&envelope.payload.buffer_id)
4005 .and_then(|buffer| buffer.upgrade(cx))
4006 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4007 })?;
4008 buffer
4009 .update(&mut cx, |buffer, _| {
4010 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4011 })
4012 .await;
4013
4014 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4015 let code_actions = this.update(&mut cx, |this, cx| {
4016 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4017 })?;
4018
4019 Ok(proto::GetCodeActionsResponse {
4020 actions: code_actions
4021 .await?
4022 .iter()
4023 .map(language::proto::serialize_code_action)
4024 .collect(),
4025 version: serialize_version(&version),
4026 })
4027 }
4028
4029 async fn handle_apply_code_action(
4030 this: ModelHandle<Self>,
4031 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4032 _: Arc<Client>,
4033 mut cx: AsyncAppContext,
4034 ) -> Result<proto::ApplyCodeActionResponse> {
4035 let sender_id = envelope.original_sender_id()?;
4036 let action = language::proto::deserialize_code_action(
4037 envelope
4038 .payload
4039 .action
4040 .ok_or_else(|| anyhow!("invalid action"))?,
4041 )?;
4042 let apply_code_action = this.update(&mut cx, |this, cx| {
4043 let buffer = this
4044 .opened_buffers
4045 .get(&envelope.payload.buffer_id)
4046 .and_then(|buffer| buffer.upgrade(cx))
4047 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4048 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4049 })?;
4050
4051 let project_transaction = apply_code_action.await?;
4052 let project_transaction = this.update(&mut cx, |this, cx| {
4053 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4054 });
4055 Ok(proto::ApplyCodeActionResponse {
4056 transaction: Some(project_transaction),
4057 })
4058 }
4059
4060 async fn handle_lsp_command<T: LspCommand>(
4061 this: ModelHandle<Self>,
4062 envelope: TypedEnvelope<T::ProtoRequest>,
4063 _: Arc<Client>,
4064 mut cx: AsyncAppContext,
4065 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4066 where
4067 <T::LspRequest as lsp::request::Request>::Result: Send,
4068 {
4069 let sender_id = envelope.original_sender_id()?;
4070 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4071 let buffer_handle = this.read_with(&cx, |this, _| {
4072 this.opened_buffers
4073 .get(&buffer_id)
4074 .and_then(|buffer| buffer.upgrade(&cx))
4075 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4076 })?;
4077 let request = T::from_proto(
4078 envelope.payload,
4079 this.clone(),
4080 buffer_handle.clone(),
4081 cx.clone(),
4082 )
4083 .await?;
4084 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4085 let response = this
4086 .update(&mut cx, |this, cx| {
4087 this.request_lsp(buffer_handle, request, cx)
4088 })
4089 .await?;
4090 this.update(&mut cx, |this, cx| {
4091 Ok(T::response_to_proto(
4092 response,
4093 this,
4094 sender_id,
4095 &buffer_version,
4096 cx,
4097 ))
4098 })
4099 }
4100
4101 async fn handle_get_project_symbols(
4102 this: ModelHandle<Self>,
4103 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4104 _: Arc<Client>,
4105 mut cx: AsyncAppContext,
4106 ) -> Result<proto::GetProjectSymbolsResponse> {
4107 let symbols = this
4108 .update(&mut cx, |this, cx| {
4109 this.symbols(&envelope.payload.query, cx)
4110 })
4111 .await?;
4112
4113 Ok(proto::GetProjectSymbolsResponse {
4114 symbols: symbols.iter().map(serialize_symbol).collect(),
4115 })
4116 }
4117
4118 async fn handle_search_project(
4119 this: ModelHandle<Self>,
4120 envelope: TypedEnvelope<proto::SearchProject>,
4121 _: Arc<Client>,
4122 mut cx: AsyncAppContext,
4123 ) -> Result<proto::SearchProjectResponse> {
4124 let peer_id = envelope.original_sender_id()?;
4125 let query = SearchQuery::from_proto(envelope.payload)?;
4126 let result = this
4127 .update(&mut cx, |this, cx| this.search(query, cx))
4128 .await?;
4129
4130 this.update(&mut cx, |this, cx| {
4131 let mut locations = Vec::new();
4132 for (buffer, ranges) in result {
4133 for range in ranges {
4134 let start = serialize_anchor(&range.start);
4135 let end = serialize_anchor(&range.end);
4136 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4137 locations.push(proto::Location {
4138 buffer: Some(buffer),
4139 start: Some(start),
4140 end: Some(end),
4141 });
4142 }
4143 }
4144 Ok(proto::SearchProjectResponse { locations })
4145 })
4146 }
4147
4148 async fn handle_open_buffer_for_symbol(
4149 this: ModelHandle<Self>,
4150 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4151 _: Arc<Client>,
4152 mut cx: AsyncAppContext,
4153 ) -> Result<proto::OpenBufferForSymbolResponse> {
4154 let peer_id = envelope.original_sender_id()?;
4155 let symbol = envelope
4156 .payload
4157 .symbol
4158 .ok_or_else(|| anyhow!("invalid symbol"))?;
4159 let symbol = this.read_with(&cx, |this, _| {
4160 let symbol = this.deserialize_symbol(symbol)?;
4161 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4162 if signature == symbol.signature {
4163 Ok(symbol)
4164 } else {
4165 Err(anyhow!("invalid symbol signature"))
4166 }
4167 })?;
4168 let buffer = this
4169 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4170 .await?;
4171
4172 Ok(proto::OpenBufferForSymbolResponse {
4173 buffer: Some(this.update(&mut cx, |this, cx| {
4174 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4175 })),
4176 })
4177 }
4178
4179 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4180 let mut hasher = Sha256::new();
4181 hasher.update(worktree_id.to_proto().to_be_bytes());
4182 hasher.update(path.to_string_lossy().as_bytes());
4183 hasher.update(self.nonce.to_be_bytes());
4184 hasher.finalize().as_slice().try_into().unwrap()
4185 }
4186
4187 async fn handle_open_buffer_by_id(
4188 this: ModelHandle<Self>,
4189 envelope: TypedEnvelope<proto::OpenBufferById>,
4190 _: Arc<Client>,
4191 mut cx: AsyncAppContext,
4192 ) -> Result<proto::OpenBufferResponse> {
4193 let peer_id = envelope.original_sender_id()?;
4194 let buffer = this
4195 .update(&mut cx, |this, cx| {
4196 this.open_buffer_by_id(envelope.payload.id, cx)
4197 })
4198 .await?;
4199 this.update(&mut cx, |this, cx| {
4200 Ok(proto::OpenBufferResponse {
4201 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4202 })
4203 })
4204 }
4205
4206 async fn handle_open_buffer_by_path(
4207 this: ModelHandle<Self>,
4208 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4209 _: Arc<Client>,
4210 mut cx: AsyncAppContext,
4211 ) -> Result<proto::OpenBufferResponse> {
4212 let peer_id = envelope.original_sender_id()?;
4213 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4214 let open_buffer = this.update(&mut cx, |this, cx| {
4215 this.open_buffer(
4216 ProjectPath {
4217 worktree_id,
4218 path: PathBuf::from(envelope.payload.path).into(),
4219 },
4220 cx,
4221 )
4222 });
4223
4224 let buffer = open_buffer.await?;
4225 this.update(&mut cx, |this, cx| {
4226 Ok(proto::OpenBufferResponse {
4227 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4228 })
4229 })
4230 }
4231
4232 fn serialize_project_transaction_for_peer(
4233 &mut self,
4234 project_transaction: ProjectTransaction,
4235 peer_id: PeerId,
4236 cx: &AppContext,
4237 ) -> proto::ProjectTransaction {
4238 let mut serialized_transaction = proto::ProjectTransaction {
4239 buffers: Default::default(),
4240 transactions: Default::default(),
4241 };
4242 for (buffer, transaction) in project_transaction.0 {
4243 serialized_transaction
4244 .buffers
4245 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4246 serialized_transaction
4247 .transactions
4248 .push(language::proto::serialize_transaction(&transaction));
4249 }
4250 serialized_transaction
4251 }
4252
4253 fn deserialize_project_transaction(
4254 &mut self,
4255 message: proto::ProjectTransaction,
4256 push_to_history: bool,
4257 cx: &mut ModelContext<Self>,
4258 ) -> Task<Result<ProjectTransaction>> {
4259 cx.spawn(|this, mut cx| async move {
4260 let mut project_transaction = ProjectTransaction::default();
4261 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4262 let buffer = this
4263 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4264 .await?;
4265 let transaction = language::proto::deserialize_transaction(transaction)?;
4266 project_transaction.0.insert(buffer, transaction);
4267 }
4268
4269 for (buffer, transaction) in &project_transaction.0 {
4270 buffer
4271 .update(&mut cx, |buffer, _| {
4272 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4273 })
4274 .await;
4275
4276 if push_to_history {
4277 buffer.update(&mut cx, |buffer, _| {
4278 buffer.push_transaction(transaction.clone(), Instant::now());
4279 });
4280 }
4281 }
4282
4283 Ok(project_transaction)
4284 })
4285 }
4286
4287 fn serialize_buffer_for_peer(
4288 &mut self,
4289 buffer: &ModelHandle<Buffer>,
4290 peer_id: PeerId,
4291 cx: &AppContext,
4292 ) -> proto::Buffer {
4293 let buffer_id = buffer.read(cx).remote_id();
4294 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4295 if shared_buffers.insert(buffer_id) {
4296 proto::Buffer {
4297 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4298 }
4299 } else {
4300 proto::Buffer {
4301 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4302 }
4303 }
4304 }
4305
4306 fn deserialize_buffer(
4307 &mut self,
4308 buffer: proto::Buffer,
4309 cx: &mut ModelContext<Self>,
4310 ) -> Task<Result<ModelHandle<Buffer>>> {
4311 let replica_id = self.replica_id();
4312
4313 let opened_buffer_tx = self.opened_buffer.0.clone();
4314 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4315 cx.spawn(|this, mut cx| async move {
4316 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4317 proto::buffer::Variant::Id(id) => {
4318 let buffer = loop {
4319 let buffer = this.read_with(&cx, |this, cx| {
4320 this.opened_buffers
4321 .get(&id)
4322 .and_then(|buffer| buffer.upgrade(cx))
4323 });
4324 if let Some(buffer) = buffer {
4325 break buffer;
4326 }
4327 opened_buffer_rx
4328 .next()
4329 .await
4330 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4331 };
4332 Ok(buffer)
4333 }
4334 proto::buffer::Variant::State(mut buffer) => {
4335 let mut buffer_worktree = None;
4336 let mut buffer_file = None;
4337 if let Some(file) = buffer.file.take() {
4338 this.read_with(&cx, |this, cx| {
4339 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4340 let worktree =
4341 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4342 anyhow!("no worktree found for id {}", file.worktree_id)
4343 })?;
4344 buffer_file =
4345 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4346 as Box<dyn language::File>);
4347 buffer_worktree = Some(worktree);
4348 Ok::<_, anyhow::Error>(())
4349 })?;
4350 }
4351
4352 let buffer = cx.add_model(|cx| {
4353 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4354 });
4355
4356 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4357
4358 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4359 Ok(buffer)
4360 }
4361 }
4362 })
4363 }
4364
4365 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4366 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4367 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4368 let start = serialized_symbol
4369 .start
4370 .ok_or_else(|| anyhow!("invalid start"))?;
4371 let end = serialized_symbol
4372 .end
4373 .ok_or_else(|| anyhow!("invalid end"))?;
4374 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4375 let path = PathBuf::from(serialized_symbol.path);
4376 let language = self.languages.select_language(&path);
4377 Ok(Symbol {
4378 source_worktree_id,
4379 worktree_id,
4380 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4381 label: language
4382 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4383 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4384 name: serialized_symbol.name,
4385 path,
4386 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4387 kind,
4388 signature: serialized_symbol
4389 .signature
4390 .try_into()
4391 .map_err(|_| anyhow!("invalid signature"))?,
4392 })
4393 }
4394
4395 async fn handle_buffer_saved(
4396 this: ModelHandle<Self>,
4397 envelope: TypedEnvelope<proto::BufferSaved>,
4398 _: Arc<Client>,
4399 mut cx: AsyncAppContext,
4400 ) -> Result<()> {
4401 let version = deserialize_version(envelope.payload.version);
4402 let mtime = envelope
4403 .payload
4404 .mtime
4405 .ok_or_else(|| anyhow!("missing mtime"))?
4406 .into();
4407
4408 this.update(&mut cx, |this, cx| {
4409 let buffer = this
4410 .opened_buffers
4411 .get(&envelope.payload.buffer_id)
4412 .and_then(|buffer| buffer.upgrade(cx));
4413 if let Some(buffer) = buffer {
4414 buffer.update(cx, |buffer, cx| {
4415 buffer.did_save(version, mtime, None, cx);
4416 });
4417 }
4418 Ok(())
4419 })
4420 }
4421
4422 async fn handle_buffer_reloaded(
4423 this: ModelHandle<Self>,
4424 envelope: TypedEnvelope<proto::BufferReloaded>,
4425 _: Arc<Client>,
4426 mut cx: AsyncAppContext,
4427 ) -> Result<()> {
4428 let payload = envelope.payload.clone();
4429 let version = deserialize_version(payload.version);
4430 let mtime = payload
4431 .mtime
4432 .ok_or_else(|| anyhow!("missing mtime"))?
4433 .into();
4434 this.update(&mut cx, |this, cx| {
4435 let buffer = this
4436 .opened_buffers
4437 .get(&payload.buffer_id)
4438 .and_then(|buffer| buffer.upgrade(cx));
4439 if let Some(buffer) = buffer {
4440 buffer.update(cx, |buffer, cx| {
4441 buffer.did_reload(version, mtime, cx);
4442 });
4443 }
4444 Ok(())
4445 })
4446 }
4447
4448 pub fn match_paths<'a>(
4449 &self,
4450 query: &'a str,
4451 include_ignored: bool,
4452 smart_case: bool,
4453 max_results: usize,
4454 cancel_flag: &'a AtomicBool,
4455 cx: &AppContext,
4456 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4457 let worktrees = self
4458 .worktrees(cx)
4459 .filter(|worktree| worktree.read(cx).is_visible())
4460 .collect::<Vec<_>>();
4461 let include_root_name = worktrees.len() > 1;
4462 let candidate_sets = worktrees
4463 .into_iter()
4464 .map(|worktree| CandidateSet {
4465 snapshot: worktree.read(cx).snapshot(),
4466 include_ignored,
4467 include_root_name,
4468 })
4469 .collect::<Vec<_>>();
4470
4471 let background = cx.background().clone();
4472 async move {
4473 fuzzy::match_paths(
4474 candidate_sets.as_slice(),
4475 query,
4476 smart_case,
4477 max_results,
4478 cancel_flag,
4479 background,
4480 )
4481 .await
4482 }
4483 }
4484
4485 fn edits_from_lsp(
4486 &mut self,
4487 buffer: &ModelHandle<Buffer>,
4488 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4489 version: Option<i32>,
4490 cx: &mut ModelContext<Self>,
4491 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4492 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4493 cx.background().spawn(async move {
4494 let snapshot = snapshot?;
4495 let mut lsp_edits = lsp_edits
4496 .into_iter()
4497 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4498 .peekable();
4499
4500 let mut edits = Vec::new();
4501 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4502 // Combine any LSP edits that are adjacent.
4503 //
4504 // Also, combine LSP edits that are separated from each other by only
4505 // a newline. This is important because for some code actions,
4506 // Rust-analyzer rewrites the entire buffer via a series of edits that
4507 // are separated by unchanged newline characters.
4508 //
4509 // In order for the diffing logic below to work properly, any edits that
4510 // cancel each other out must be combined into one.
4511 while let Some((next_range, next_text)) = lsp_edits.peek() {
4512 if next_range.start > range.end {
4513 if next_range.start.row > range.end.row + 1
4514 || next_range.start.column > 0
4515 || snapshot.clip_point_utf16(
4516 PointUtf16::new(range.end.row, u32::MAX),
4517 Bias::Left,
4518 ) > range.end
4519 {
4520 break;
4521 }
4522 new_text.push('\n');
4523 }
4524 range.end = next_range.end;
4525 new_text.push_str(&next_text);
4526 lsp_edits.next();
4527 }
4528
4529 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4530 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4531 {
4532 return Err(anyhow!("invalid edits received from language server"));
4533 }
4534
4535 // For multiline edits, perform a diff of the old and new text so that
4536 // we can identify the changes more precisely, preserving the locations
4537 // of any anchors positioned in the unchanged regions.
4538 if range.end.row > range.start.row {
4539 let mut offset = range.start.to_offset(&snapshot);
4540 let old_text = snapshot.text_for_range(range).collect::<String>();
4541
4542 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4543 let mut moved_since_edit = true;
4544 for change in diff.iter_all_changes() {
4545 let tag = change.tag();
4546 let value = change.value();
4547 match tag {
4548 ChangeTag::Equal => {
4549 offset += value.len();
4550 moved_since_edit = true;
4551 }
4552 ChangeTag::Delete => {
4553 let start = snapshot.anchor_after(offset);
4554 let end = snapshot.anchor_before(offset + value.len());
4555 if moved_since_edit {
4556 edits.push((start..end, String::new()));
4557 } else {
4558 edits.last_mut().unwrap().0.end = end;
4559 }
4560 offset += value.len();
4561 moved_since_edit = false;
4562 }
4563 ChangeTag::Insert => {
4564 if moved_since_edit {
4565 let anchor = snapshot.anchor_after(offset);
4566 edits.push((anchor.clone()..anchor, value.to_string()));
4567 } else {
4568 edits.last_mut().unwrap().1.push_str(value);
4569 }
4570 moved_since_edit = false;
4571 }
4572 }
4573 }
4574 } else if range.end == range.start {
4575 let anchor = snapshot.anchor_after(range.start);
4576 edits.push((anchor.clone()..anchor, new_text));
4577 } else {
4578 let edit_start = snapshot.anchor_after(range.start);
4579 let edit_end = snapshot.anchor_before(range.end);
4580 edits.push((edit_start..edit_end, new_text));
4581 }
4582 }
4583
4584 Ok(edits)
4585 })
4586 }
4587
4588 fn buffer_snapshot_for_lsp_version(
4589 &mut self,
4590 buffer: &ModelHandle<Buffer>,
4591 version: Option<i32>,
4592 cx: &AppContext,
4593 ) -> Result<TextBufferSnapshot> {
4594 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4595
4596 if let Some(version) = version {
4597 let buffer_id = buffer.read(cx).remote_id();
4598 let snapshots = self
4599 .buffer_snapshots
4600 .get_mut(&buffer_id)
4601 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4602 let mut found_snapshot = None;
4603 snapshots.retain(|(snapshot_version, snapshot)| {
4604 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4605 false
4606 } else {
4607 if *snapshot_version == version {
4608 found_snapshot = Some(snapshot.clone());
4609 }
4610 true
4611 }
4612 });
4613
4614 found_snapshot.ok_or_else(|| {
4615 anyhow!(
4616 "snapshot not found for buffer {} at version {}",
4617 buffer_id,
4618 version
4619 )
4620 })
4621 } else {
4622 Ok((buffer.read(cx)).text_snapshot())
4623 }
4624 }
4625
4626 fn language_server_for_buffer(
4627 &self,
4628 buffer: &Buffer,
4629 cx: &AppContext,
4630 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4631 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4632 let worktree_id = file.worktree_id(cx);
4633 self.language_servers
4634 .get(&(worktree_id, language.lsp_adapter()?.name()))
4635 } else {
4636 None
4637 }
4638 }
4639}
4640
4641impl WorktreeHandle {
4642 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4643 match self {
4644 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4645 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4646 }
4647 }
4648}
4649
4650impl OpenBuffer {
4651 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4652 match self {
4653 OpenBuffer::Strong(handle) => Some(handle.clone()),
4654 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4655 OpenBuffer::Loading(_) => None,
4656 }
4657 }
4658}
4659
4660struct CandidateSet {
4661 snapshot: Snapshot,
4662 include_ignored: bool,
4663 include_root_name: bool,
4664}
4665
4666impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4667 type Candidates = CandidateSetIter<'a>;
4668
4669 fn id(&self) -> usize {
4670 self.snapshot.id().to_usize()
4671 }
4672
4673 fn len(&self) -> usize {
4674 if self.include_ignored {
4675 self.snapshot.file_count()
4676 } else {
4677 self.snapshot.visible_file_count()
4678 }
4679 }
4680
4681 fn prefix(&self) -> Arc<str> {
4682 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4683 self.snapshot.root_name().into()
4684 } else if self.include_root_name {
4685 format!("{}/", self.snapshot.root_name()).into()
4686 } else {
4687 "".into()
4688 }
4689 }
4690
4691 fn candidates(&'a self, start: usize) -> Self::Candidates {
4692 CandidateSetIter {
4693 traversal: self.snapshot.files(self.include_ignored, start),
4694 }
4695 }
4696}
4697
4698struct CandidateSetIter<'a> {
4699 traversal: Traversal<'a>,
4700}
4701
4702impl<'a> Iterator for CandidateSetIter<'a> {
4703 type Item = PathMatchCandidate<'a>;
4704
4705 fn next(&mut self) -> Option<Self::Item> {
4706 self.traversal.next().map(|entry| {
4707 if let EntryKind::File(char_bag) = entry.kind {
4708 PathMatchCandidate {
4709 path: &entry.path,
4710 char_bag,
4711 }
4712 } else {
4713 unreachable!()
4714 }
4715 })
4716 }
4717}
4718
4719impl Entity for Project {
4720 type Event = Event;
4721
4722 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4723 match &self.client_state {
4724 ProjectClientState::Local { remote_id_rx, .. } => {
4725 if let Some(project_id) = *remote_id_rx.borrow() {
4726 self.client
4727 .send(proto::UnregisterProject { project_id })
4728 .log_err();
4729 }
4730 }
4731 ProjectClientState::Remote { remote_id, .. } => {
4732 self.client
4733 .send(proto::LeaveProject {
4734 project_id: *remote_id,
4735 })
4736 .log_err();
4737 }
4738 }
4739 }
4740
4741 fn app_will_quit(
4742 &mut self,
4743 _: &mut MutableAppContext,
4744 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4745 let shutdown_futures = self
4746 .language_servers
4747 .drain()
4748 .filter_map(|(_, (_, server))| server.shutdown())
4749 .collect::<Vec<_>>();
4750 Some(
4751 async move {
4752 futures::future::join_all(shutdown_futures).await;
4753 }
4754 .boxed(),
4755 )
4756 }
4757}
4758
4759impl Collaborator {
4760 fn from_proto(
4761 message: proto::Collaborator,
4762 user_store: &ModelHandle<UserStore>,
4763 cx: &mut AsyncAppContext,
4764 ) -> impl Future<Output = Result<Self>> {
4765 let user = user_store.update(cx, |user_store, cx| {
4766 user_store.fetch_user(message.user_id, cx)
4767 });
4768
4769 async move {
4770 Ok(Self {
4771 peer_id: PeerId(message.peer_id),
4772 user: user.await?,
4773 replica_id: message.replica_id as ReplicaId,
4774 })
4775 }
4776 }
4777}
4778
4779impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4780 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4781 Self {
4782 worktree_id,
4783 path: path.as_ref().into(),
4784 }
4785 }
4786}
4787
4788impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4789 fn from(options: lsp::CreateFileOptions) -> Self {
4790 Self {
4791 overwrite: options.overwrite.unwrap_or(false),
4792 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4793 }
4794 }
4795}
4796
4797impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4798 fn from(options: lsp::RenameFileOptions) -> Self {
4799 Self {
4800 overwrite: options.overwrite.unwrap_or(false),
4801 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4802 }
4803 }
4804}
4805
4806impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4807 fn from(options: lsp::DeleteFileOptions) -> Self {
4808 Self {
4809 recursive: options.recursive.unwrap_or(false),
4810 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4811 }
4812 }
4813}
4814
4815fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4816 proto::Symbol {
4817 source_worktree_id: symbol.source_worktree_id.to_proto(),
4818 worktree_id: symbol.worktree_id.to_proto(),
4819 language_server_name: symbol.language_server_name.0.to_string(),
4820 name: symbol.name.clone(),
4821 kind: unsafe { mem::transmute(symbol.kind) },
4822 path: symbol.path.to_string_lossy().to_string(),
4823 start: Some(proto::Point {
4824 row: symbol.range.start.row,
4825 column: symbol.range.start.column,
4826 }),
4827 end: Some(proto::Point {
4828 row: symbol.range.end.row,
4829 column: symbol.range.end.column,
4830 }),
4831 signature: symbol.signature.to_vec(),
4832 }
4833}
4834
4835fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4836 let mut path_components = path.components();
4837 let mut base_components = base.components();
4838 let mut components: Vec<Component> = Vec::new();
4839 loop {
4840 match (path_components.next(), base_components.next()) {
4841 (None, None) => break,
4842 (Some(a), None) => {
4843 components.push(a);
4844 components.extend(path_components.by_ref());
4845 break;
4846 }
4847 (None, _) => components.push(Component::ParentDir),
4848 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4849 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4850 (Some(a), Some(_)) => {
4851 components.push(Component::ParentDir);
4852 for _ in base_components {
4853 components.push(Component::ParentDir);
4854 }
4855 components.push(a);
4856 components.extend(path_components.by_ref());
4857 break;
4858 }
4859 }
4860 }
4861 components.iter().map(|c| c.as_os_str()).collect()
4862}
4863
4864impl Item for Buffer {
4865 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4866 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4867 }
4868}
4869
4870#[cfg(test)]
4871mod tests {
4872 use super::{Event, *};
4873 use fs::RealFs;
4874 use futures::{future, StreamExt};
4875 use gpui::test::subscribe;
4876 use language::{
4877 tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, OffsetRangeExt, Point,
4878 ToPoint,
4879 };
4880 use lsp::Url;
4881 use serde_json::json;
4882 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4883 use unindent::Unindent as _;
4884 use util::{assert_set_eq, test::temp_tree};
4885 use worktree::WorktreeHandle as _;
4886
4887 #[gpui::test]
4888 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4889 let dir = temp_tree(json!({
4890 "root": {
4891 "apple": "",
4892 "banana": {
4893 "carrot": {
4894 "date": "",
4895 "endive": "",
4896 }
4897 },
4898 "fennel": {
4899 "grape": "",
4900 }
4901 }
4902 }));
4903
4904 let root_link_path = dir.path().join("root_link");
4905 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4906 unix::fs::symlink(
4907 &dir.path().join("root/fennel"),
4908 &dir.path().join("root/finnochio"),
4909 )
4910 .unwrap();
4911
4912 let project = Project::test(Arc::new(RealFs), cx);
4913
4914 let (tree, _) = project
4915 .update(cx, |project, cx| {
4916 project.find_or_create_local_worktree(&root_link_path, true, cx)
4917 })
4918 .await
4919 .unwrap();
4920
4921 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4922 .await;
4923 cx.read(|cx| {
4924 let tree = tree.read(cx);
4925 assert_eq!(tree.file_count(), 5);
4926 assert_eq!(
4927 tree.inode_for_path("fennel/grape"),
4928 tree.inode_for_path("finnochio/grape")
4929 );
4930 });
4931
4932 let cancel_flag = Default::default();
4933 let results = project
4934 .read_with(cx, |project, cx| {
4935 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4936 })
4937 .await;
4938 assert_eq!(
4939 results
4940 .into_iter()
4941 .map(|result| result.path)
4942 .collect::<Vec<Arc<Path>>>(),
4943 vec![
4944 PathBuf::from("banana/carrot/date").into(),
4945 PathBuf::from("banana/carrot/endive").into(),
4946 ]
4947 );
4948 }
4949
4950 #[gpui::test]
4951 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4952 cx.foreground().forbid_parking();
4953
4954 let mut rust_language = Language::new(
4955 LanguageConfig {
4956 name: "Rust".into(),
4957 path_suffixes: vec!["rs".to_string()],
4958 ..Default::default()
4959 },
4960 Some(tree_sitter_rust::language()),
4961 );
4962 let mut json_language = Language::new(
4963 LanguageConfig {
4964 name: "JSON".into(),
4965 path_suffixes: vec!["json".to_string()],
4966 ..Default::default()
4967 },
4968 None,
4969 );
4970 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
4971 name: "the-rust-language-server",
4972 capabilities: lsp::ServerCapabilities {
4973 completion_provider: Some(lsp::CompletionOptions {
4974 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4975 ..Default::default()
4976 }),
4977 ..Default::default()
4978 },
4979 ..Default::default()
4980 });
4981 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
4982 name: "the-json-language-server",
4983 capabilities: lsp::ServerCapabilities {
4984 completion_provider: Some(lsp::CompletionOptions {
4985 trigger_characters: Some(vec![":".to_string()]),
4986 ..Default::default()
4987 }),
4988 ..Default::default()
4989 },
4990 ..Default::default()
4991 });
4992
4993 let fs = FakeFs::new(cx.background());
4994 fs.insert_tree(
4995 "/the-root",
4996 json!({
4997 "test.rs": "const A: i32 = 1;",
4998 "test2.rs": "",
4999 "Cargo.toml": "a = 1",
5000 "package.json": "{\"a\": 1}",
5001 }),
5002 )
5003 .await;
5004
5005 let project = Project::test(fs.clone(), cx);
5006 project.update(cx, |project, _| {
5007 project.languages.add(Arc::new(rust_language));
5008 project.languages.add(Arc::new(json_language));
5009 });
5010
5011 let worktree_id = project
5012 .update(cx, |project, cx| {
5013 project.find_or_create_local_worktree("/the-root", true, cx)
5014 })
5015 .await
5016 .unwrap()
5017 .0
5018 .read_with(cx, |tree, _| tree.id());
5019
5020 // Open a buffer without an associated language server.
5021 let toml_buffer = project
5022 .update(cx, |project, cx| {
5023 project.open_buffer((worktree_id, "Cargo.toml"), cx)
5024 })
5025 .await
5026 .unwrap();
5027
5028 // Open a buffer with an associated language server.
5029 let rust_buffer = project
5030 .update(cx, |project, cx| {
5031 project.open_buffer((worktree_id, "test.rs"), cx)
5032 })
5033 .await
5034 .unwrap();
5035
5036 // A server is started up, and it is notified about Rust files.
5037 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5038 assert_eq!(
5039 fake_rust_server
5040 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5041 .await
5042 .text_document,
5043 lsp::TextDocumentItem {
5044 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5045 version: 0,
5046 text: "const A: i32 = 1;".to_string(),
5047 language_id: Default::default()
5048 }
5049 );
5050
5051 // The buffer is configured based on the language server's capabilities.
5052 rust_buffer.read_with(cx, |buffer, _| {
5053 assert_eq!(
5054 buffer.completion_triggers(),
5055 &[".".to_string(), "::".to_string()]
5056 );
5057 });
5058 toml_buffer.read_with(cx, |buffer, _| {
5059 assert!(buffer.completion_triggers().is_empty());
5060 });
5061
5062 // Edit a buffer. The changes are reported to the language server.
5063 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
5064 assert_eq!(
5065 fake_rust_server
5066 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5067 .await
5068 .text_document,
5069 lsp::VersionedTextDocumentIdentifier::new(
5070 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5071 1
5072 )
5073 );
5074
5075 // Open a third buffer with a different associated language server.
5076 let json_buffer = project
5077 .update(cx, |project, cx| {
5078 project.open_buffer((worktree_id, "package.json"), cx)
5079 })
5080 .await
5081 .unwrap();
5082
5083 // A json language server is started up and is only notified about the json buffer.
5084 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5085 assert_eq!(
5086 fake_json_server
5087 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5088 .await
5089 .text_document,
5090 lsp::TextDocumentItem {
5091 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5092 version: 0,
5093 text: "{\"a\": 1}".to_string(),
5094 language_id: Default::default()
5095 }
5096 );
5097
5098 // This buffer is configured based on the second language server's
5099 // capabilities.
5100 json_buffer.read_with(cx, |buffer, _| {
5101 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5102 });
5103
5104 // When opening another buffer whose language server is already running,
5105 // it is also configured based on the existing language server's capabilities.
5106 let rust_buffer2 = project
5107 .update(cx, |project, cx| {
5108 project.open_buffer((worktree_id, "test2.rs"), cx)
5109 })
5110 .await
5111 .unwrap();
5112 rust_buffer2.read_with(cx, |buffer, _| {
5113 assert_eq!(
5114 buffer.completion_triggers(),
5115 &[".".to_string(), "::".to_string()]
5116 );
5117 });
5118
5119 // Changes are reported only to servers matching the buffer's language.
5120 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
5121 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
5122 assert_eq!(
5123 fake_rust_server
5124 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5125 .await
5126 .text_document,
5127 lsp::VersionedTextDocumentIdentifier::new(
5128 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5129 1
5130 )
5131 );
5132
5133 // Save notifications are reported to all servers.
5134 toml_buffer
5135 .update(cx, |buffer, cx| buffer.save(cx))
5136 .await
5137 .unwrap();
5138 assert_eq!(
5139 fake_rust_server
5140 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5141 .await
5142 .text_document,
5143 lsp::TextDocumentIdentifier::new(
5144 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5145 )
5146 );
5147 assert_eq!(
5148 fake_json_server
5149 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5150 .await
5151 .text_document,
5152 lsp::TextDocumentIdentifier::new(
5153 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5154 )
5155 );
5156
5157 // Renames are reported only to servers matching the buffer's language.
5158 fs.rename(
5159 Path::new("/the-root/test2.rs"),
5160 Path::new("/the-root/test3.rs"),
5161 Default::default(),
5162 )
5163 .await
5164 .unwrap();
5165 assert_eq!(
5166 fake_rust_server
5167 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5168 .await
5169 .text_document,
5170 lsp::TextDocumentIdentifier::new(
5171 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5172 ),
5173 );
5174 assert_eq!(
5175 fake_rust_server
5176 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5177 .await
5178 .text_document,
5179 lsp::TextDocumentItem {
5180 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5181 version: 0,
5182 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5183 language_id: Default::default()
5184 },
5185 );
5186
5187 rust_buffer2.update(cx, |buffer, cx| {
5188 buffer.update_diagnostics(
5189 DiagnosticSet::from_sorted_entries(
5190 vec![DiagnosticEntry {
5191 diagnostic: Default::default(),
5192 range: Anchor::MIN..Anchor::MAX,
5193 }],
5194 &buffer.snapshot(),
5195 ),
5196 cx,
5197 );
5198 assert_eq!(
5199 buffer
5200 .snapshot()
5201 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5202 .count(),
5203 1
5204 );
5205 });
5206
5207 // When the rename changes the extension of the file, the buffer gets closed on the old
5208 // language server and gets opened on the new one.
5209 fs.rename(
5210 Path::new("/the-root/test3.rs"),
5211 Path::new("/the-root/test3.json"),
5212 Default::default(),
5213 )
5214 .await
5215 .unwrap();
5216 assert_eq!(
5217 fake_rust_server
5218 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5219 .await
5220 .text_document,
5221 lsp::TextDocumentIdentifier::new(
5222 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5223 ),
5224 );
5225 assert_eq!(
5226 fake_json_server
5227 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5228 .await
5229 .text_document,
5230 lsp::TextDocumentItem {
5231 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5232 version: 0,
5233 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5234 language_id: Default::default()
5235 },
5236 );
5237 // We clear the diagnostics, since the language has changed.
5238 rust_buffer2.read_with(cx, |buffer, _| {
5239 assert_eq!(
5240 buffer
5241 .snapshot()
5242 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5243 .count(),
5244 0
5245 );
5246 });
5247
5248 // The renamed file's version resets after changing language server.
5249 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "// ", cx));
5250 assert_eq!(
5251 fake_json_server
5252 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5253 .await
5254 .text_document,
5255 lsp::VersionedTextDocumentIdentifier::new(
5256 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5257 1
5258 )
5259 );
5260
5261 // Restart language servers
5262 project.update(cx, |project, cx| {
5263 project.restart_language_servers_for_buffers(
5264 vec![rust_buffer.clone(), json_buffer.clone()],
5265 cx,
5266 );
5267 });
5268
5269 let mut rust_shutdown_requests = fake_rust_server
5270 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5271 let mut json_shutdown_requests = fake_json_server
5272 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5273 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5274
5275 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5276 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5277
5278 // Ensure rust document is reopened in new rust language server
5279 assert_eq!(
5280 fake_rust_server
5281 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5282 .await
5283 .text_document,
5284 lsp::TextDocumentItem {
5285 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5286 version: 1,
5287 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5288 language_id: Default::default()
5289 }
5290 );
5291
5292 // Ensure json documents are reopened in new json language server
5293 assert_set_eq!(
5294 [
5295 fake_json_server
5296 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5297 .await
5298 .text_document,
5299 fake_json_server
5300 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5301 .await
5302 .text_document,
5303 ],
5304 [
5305 lsp::TextDocumentItem {
5306 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5307 version: 0,
5308 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5309 language_id: Default::default()
5310 },
5311 lsp::TextDocumentItem {
5312 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5313 version: 1,
5314 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5315 language_id: Default::default()
5316 }
5317 ]
5318 );
5319
5320 // Close notifications are reported only to servers matching the buffer's language.
5321 cx.update(|_| drop(json_buffer));
5322 let close_message = lsp::DidCloseTextDocumentParams {
5323 text_document: lsp::TextDocumentIdentifier::new(
5324 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5325 ),
5326 };
5327 assert_eq!(
5328 fake_json_server
5329 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5330 .await,
5331 close_message,
5332 );
5333 }
5334
5335 #[gpui::test]
5336 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5337 cx.foreground().forbid_parking();
5338
5339 let fs = FakeFs::new(cx.background());
5340 fs.insert_tree(
5341 "/dir",
5342 json!({
5343 "a.rs": "let a = 1;",
5344 "b.rs": "let b = 2;"
5345 }),
5346 )
5347 .await;
5348
5349 let project = Project::test(fs, cx);
5350 let worktree_a_id = project
5351 .update(cx, |project, cx| {
5352 project.find_or_create_local_worktree("/dir/a.rs", true, cx)
5353 })
5354 .await
5355 .unwrap()
5356 .0
5357 .read_with(cx, |tree, _| tree.id());
5358 let worktree_b_id = project
5359 .update(cx, |project, cx| {
5360 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5361 })
5362 .await
5363 .unwrap()
5364 .0
5365 .read_with(cx, |tree, _| tree.id());
5366
5367 let buffer_a = project
5368 .update(cx, |project, cx| {
5369 project.open_buffer((worktree_a_id, ""), cx)
5370 })
5371 .await
5372 .unwrap();
5373 let buffer_b = project
5374 .update(cx, |project, cx| {
5375 project.open_buffer((worktree_b_id, ""), cx)
5376 })
5377 .await
5378 .unwrap();
5379
5380 project.update(cx, |project, cx| {
5381 project
5382 .update_diagnostics(
5383 lsp::PublishDiagnosticsParams {
5384 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5385 version: None,
5386 diagnostics: vec![lsp::Diagnostic {
5387 range: lsp::Range::new(
5388 lsp::Position::new(0, 4),
5389 lsp::Position::new(0, 5),
5390 ),
5391 severity: Some(lsp::DiagnosticSeverity::ERROR),
5392 message: "error 1".to_string(),
5393 ..Default::default()
5394 }],
5395 },
5396 &[],
5397 cx,
5398 )
5399 .unwrap();
5400 project
5401 .update_diagnostics(
5402 lsp::PublishDiagnosticsParams {
5403 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5404 version: None,
5405 diagnostics: vec![lsp::Diagnostic {
5406 range: lsp::Range::new(
5407 lsp::Position::new(0, 4),
5408 lsp::Position::new(0, 5),
5409 ),
5410 severity: Some(lsp::DiagnosticSeverity::WARNING),
5411 message: "error 2".to_string(),
5412 ..Default::default()
5413 }],
5414 },
5415 &[],
5416 cx,
5417 )
5418 .unwrap();
5419 });
5420
5421 buffer_a.read_with(cx, |buffer, _| {
5422 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5423 assert_eq!(
5424 chunks
5425 .iter()
5426 .map(|(s, d)| (s.as_str(), *d))
5427 .collect::<Vec<_>>(),
5428 &[
5429 ("let ", None),
5430 ("a", Some(DiagnosticSeverity::ERROR)),
5431 (" = 1;", None),
5432 ]
5433 );
5434 });
5435 buffer_b.read_with(cx, |buffer, _| {
5436 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5437 assert_eq!(
5438 chunks
5439 .iter()
5440 .map(|(s, d)| (s.as_str(), *d))
5441 .collect::<Vec<_>>(),
5442 &[
5443 ("let ", None),
5444 ("b", Some(DiagnosticSeverity::WARNING)),
5445 (" = 2;", None),
5446 ]
5447 );
5448 });
5449 }
5450
5451 #[gpui::test]
5452 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5453 cx.foreground().forbid_parking();
5454
5455 let progress_token = "the-progress-token";
5456 let mut language = Language::new(
5457 LanguageConfig {
5458 name: "Rust".into(),
5459 path_suffixes: vec!["rs".to_string()],
5460 ..Default::default()
5461 },
5462 Some(tree_sitter_rust::language()),
5463 );
5464 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5465 disk_based_diagnostics_progress_token: Some(progress_token),
5466 disk_based_diagnostics_sources: &["disk"],
5467 ..Default::default()
5468 });
5469
5470 let fs = FakeFs::new(cx.background());
5471 fs.insert_tree(
5472 "/dir",
5473 json!({
5474 "a.rs": "fn a() { A }",
5475 "b.rs": "const y: i32 = 1",
5476 }),
5477 )
5478 .await;
5479
5480 let project = Project::test(fs, cx);
5481 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5482
5483 let (tree, _) = project
5484 .update(cx, |project, cx| {
5485 project.find_or_create_local_worktree("/dir", true, cx)
5486 })
5487 .await
5488 .unwrap();
5489 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5490
5491 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5492 .await;
5493
5494 // Cause worktree to start the fake language server
5495 let _buffer = project
5496 .update(cx, |project, cx| {
5497 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
5498 })
5499 .await
5500 .unwrap();
5501
5502 let mut events = subscribe(&project, cx);
5503
5504 let mut fake_server = fake_servers.next().await.unwrap();
5505 fake_server.start_progress(progress_token).await;
5506 assert_eq!(
5507 events.next().await.unwrap(),
5508 Event::DiskBasedDiagnosticsStarted
5509 );
5510
5511 fake_server.start_progress(progress_token).await;
5512 fake_server.end_progress(progress_token).await;
5513 fake_server.start_progress(progress_token).await;
5514
5515 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5516 lsp::PublishDiagnosticsParams {
5517 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5518 version: None,
5519 diagnostics: vec![lsp::Diagnostic {
5520 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5521 severity: Some(lsp::DiagnosticSeverity::ERROR),
5522 message: "undefined variable 'A'".to_string(),
5523 ..Default::default()
5524 }],
5525 },
5526 );
5527 assert_eq!(
5528 events.next().await.unwrap(),
5529 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5530 );
5531
5532 fake_server.end_progress(progress_token).await;
5533 fake_server.end_progress(progress_token).await;
5534 assert_eq!(
5535 events.next().await.unwrap(),
5536 Event::DiskBasedDiagnosticsUpdated
5537 );
5538 assert_eq!(
5539 events.next().await.unwrap(),
5540 Event::DiskBasedDiagnosticsFinished
5541 );
5542
5543 let buffer = project
5544 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
5545 .await
5546 .unwrap();
5547
5548 buffer.read_with(cx, |buffer, _| {
5549 let snapshot = buffer.snapshot();
5550 let diagnostics = snapshot
5551 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5552 .collect::<Vec<_>>();
5553 assert_eq!(
5554 diagnostics,
5555 &[DiagnosticEntry {
5556 range: Point::new(0, 9)..Point::new(0, 10),
5557 diagnostic: Diagnostic {
5558 severity: lsp::DiagnosticSeverity::ERROR,
5559 message: "undefined variable 'A'".to_string(),
5560 group_id: 0,
5561 is_primary: true,
5562 ..Default::default()
5563 }
5564 }]
5565 )
5566 });
5567 }
5568
5569 #[gpui::test]
5570 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5571 cx.foreground().forbid_parking();
5572
5573 let progress_token = "the-progress-token";
5574 let mut language = Language::new(
5575 LanguageConfig {
5576 path_suffixes: vec!["rs".to_string()],
5577 ..Default::default()
5578 },
5579 None,
5580 );
5581 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5582 disk_based_diagnostics_sources: &["disk"],
5583 disk_based_diagnostics_progress_token: Some(progress_token),
5584 ..Default::default()
5585 });
5586
5587 let fs = FakeFs::new(cx.background());
5588 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5589
5590 let project = Project::test(fs, cx);
5591 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5592
5593 let worktree_id = project
5594 .update(cx, |project, cx| {
5595 project.find_or_create_local_worktree("/dir", true, cx)
5596 })
5597 .await
5598 .unwrap()
5599 .0
5600 .read_with(cx, |tree, _| tree.id());
5601
5602 let buffer = project
5603 .update(cx, |project, cx| {
5604 project.open_buffer((worktree_id, "a.rs"), cx)
5605 })
5606 .await
5607 .unwrap();
5608
5609 // Simulate diagnostics starting to update.
5610 let mut fake_server = fake_servers.next().await.unwrap();
5611 fake_server.start_progress(progress_token).await;
5612
5613 // Restart the server before the diagnostics finish updating.
5614 project.update(cx, |project, cx| {
5615 project.restart_language_servers_for_buffers([buffer], cx);
5616 });
5617 let mut events = subscribe(&project, cx);
5618
5619 // Simulate the newly started server sending more diagnostics.
5620 let mut fake_server = fake_servers.next().await.unwrap();
5621 fake_server.start_progress(progress_token).await;
5622 assert_eq!(
5623 events.next().await.unwrap(),
5624 Event::DiskBasedDiagnosticsStarted
5625 );
5626
5627 // All diagnostics are considered done, despite the old server's diagnostic
5628 // task never completing.
5629 fake_server.end_progress(progress_token).await;
5630 assert_eq!(
5631 events.next().await.unwrap(),
5632 Event::DiskBasedDiagnosticsUpdated
5633 );
5634 assert_eq!(
5635 events.next().await.unwrap(),
5636 Event::DiskBasedDiagnosticsFinished
5637 );
5638 project.read_with(cx, |project, _| {
5639 assert!(!project.is_running_disk_based_diagnostics());
5640 });
5641 }
5642
5643 #[gpui::test]
5644 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5645 cx.foreground().forbid_parking();
5646
5647 let mut language = Language::new(
5648 LanguageConfig {
5649 name: "Rust".into(),
5650 path_suffixes: vec!["rs".to_string()],
5651 ..Default::default()
5652 },
5653 Some(tree_sitter_rust::language()),
5654 );
5655 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5656 disk_based_diagnostics_sources: &["disk"],
5657 ..Default::default()
5658 });
5659
5660 let text = "
5661 fn a() { A }
5662 fn b() { BB }
5663 fn c() { CCC }
5664 "
5665 .unindent();
5666
5667 let fs = FakeFs::new(cx.background());
5668 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5669
5670 let project = Project::test(fs, cx);
5671 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5672
5673 let worktree_id = project
5674 .update(cx, |project, cx| {
5675 project.find_or_create_local_worktree("/dir", true, cx)
5676 })
5677 .await
5678 .unwrap()
5679 .0
5680 .read_with(cx, |tree, _| tree.id());
5681
5682 let buffer = project
5683 .update(cx, |project, cx| {
5684 project.open_buffer((worktree_id, "a.rs"), cx)
5685 })
5686 .await
5687 .unwrap();
5688
5689 let mut fake_server = fake_servers.next().await.unwrap();
5690 let open_notification = fake_server
5691 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5692 .await;
5693
5694 // Edit the buffer, moving the content down
5695 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
5696 let change_notification_1 = fake_server
5697 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5698 .await;
5699 assert!(
5700 change_notification_1.text_document.version > open_notification.text_document.version
5701 );
5702
5703 // Report some diagnostics for the initial version of the buffer
5704 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5705 lsp::PublishDiagnosticsParams {
5706 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5707 version: Some(open_notification.text_document.version),
5708 diagnostics: vec![
5709 lsp::Diagnostic {
5710 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5711 severity: Some(DiagnosticSeverity::ERROR),
5712 message: "undefined variable 'A'".to_string(),
5713 source: Some("disk".to_string()),
5714 ..Default::default()
5715 },
5716 lsp::Diagnostic {
5717 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5718 severity: Some(DiagnosticSeverity::ERROR),
5719 message: "undefined variable 'BB'".to_string(),
5720 source: Some("disk".to_string()),
5721 ..Default::default()
5722 },
5723 lsp::Diagnostic {
5724 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5725 severity: Some(DiagnosticSeverity::ERROR),
5726 source: Some("disk".to_string()),
5727 message: "undefined variable 'CCC'".to_string(),
5728 ..Default::default()
5729 },
5730 ],
5731 },
5732 );
5733
5734 // The diagnostics have moved down since they were created.
5735 buffer.next_notification(cx).await;
5736 buffer.read_with(cx, |buffer, _| {
5737 assert_eq!(
5738 buffer
5739 .snapshot()
5740 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5741 .collect::<Vec<_>>(),
5742 &[
5743 DiagnosticEntry {
5744 range: Point::new(3, 9)..Point::new(3, 11),
5745 diagnostic: Diagnostic {
5746 severity: DiagnosticSeverity::ERROR,
5747 message: "undefined variable 'BB'".to_string(),
5748 is_disk_based: true,
5749 group_id: 1,
5750 is_primary: true,
5751 ..Default::default()
5752 },
5753 },
5754 DiagnosticEntry {
5755 range: Point::new(4, 9)..Point::new(4, 12),
5756 diagnostic: Diagnostic {
5757 severity: DiagnosticSeverity::ERROR,
5758 message: "undefined variable 'CCC'".to_string(),
5759 is_disk_based: true,
5760 group_id: 2,
5761 is_primary: true,
5762 ..Default::default()
5763 }
5764 }
5765 ]
5766 );
5767 assert_eq!(
5768 chunks_with_diagnostics(buffer, 0..buffer.len()),
5769 [
5770 ("\n\nfn a() { ".to_string(), None),
5771 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5772 (" }\nfn b() { ".to_string(), None),
5773 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5774 (" }\nfn c() { ".to_string(), None),
5775 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5776 (" }\n".to_string(), None),
5777 ]
5778 );
5779 assert_eq!(
5780 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5781 [
5782 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5783 (" }\nfn c() { ".to_string(), None),
5784 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5785 ]
5786 );
5787 });
5788
5789 // Ensure overlapping diagnostics are highlighted correctly.
5790 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5791 lsp::PublishDiagnosticsParams {
5792 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5793 version: Some(open_notification.text_document.version),
5794 diagnostics: vec![
5795 lsp::Diagnostic {
5796 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5797 severity: Some(DiagnosticSeverity::ERROR),
5798 message: "undefined variable 'A'".to_string(),
5799 source: Some("disk".to_string()),
5800 ..Default::default()
5801 },
5802 lsp::Diagnostic {
5803 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5804 severity: Some(DiagnosticSeverity::WARNING),
5805 message: "unreachable statement".to_string(),
5806 source: Some("disk".to_string()),
5807 ..Default::default()
5808 },
5809 ],
5810 },
5811 );
5812
5813 buffer.next_notification(cx).await;
5814 buffer.read_with(cx, |buffer, _| {
5815 assert_eq!(
5816 buffer
5817 .snapshot()
5818 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5819 .collect::<Vec<_>>(),
5820 &[
5821 DiagnosticEntry {
5822 range: Point::new(2, 9)..Point::new(2, 12),
5823 diagnostic: Diagnostic {
5824 severity: DiagnosticSeverity::WARNING,
5825 message: "unreachable statement".to_string(),
5826 is_disk_based: true,
5827 group_id: 1,
5828 is_primary: true,
5829 ..Default::default()
5830 }
5831 },
5832 DiagnosticEntry {
5833 range: Point::new(2, 9)..Point::new(2, 10),
5834 diagnostic: Diagnostic {
5835 severity: DiagnosticSeverity::ERROR,
5836 message: "undefined variable 'A'".to_string(),
5837 is_disk_based: true,
5838 group_id: 0,
5839 is_primary: true,
5840 ..Default::default()
5841 },
5842 }
5843 ]
5844 );
5845 assert_eq!(
5846 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5847 [
5848 ("fn a() { ".to_string(), None),
5849 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5850 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5851 ("\n".to_string(), None),
5852 ]
5853 );
5854 assert_eq!(
5855 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5856 [
5857 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5858 ("\n".to_string(), None),
5859 ]
5860 );
5861 });
5862
5863 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5864 // changes since the last save.
5865 buffer.update(cx, |buffer, cx| {
5866 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
5867 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
5868 buffer.edit(Some(Point::new(3, 10)..Point::new(3, 10)), "xxx", cx);
5869 });
5870 let change_notification_2 = fake_server
5871 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5872 .await;
5873 assert!(
5874 change_notification_2.text_document.version
5875 > change_notification_1.text_document.version
5876 );
5877
5878 // Handle out-of-order diagnostics
5879 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5880 lsp::PublishDiagnosticsParams {
5881 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5882 version: Some(change_notification_2.text_document.version),
5883 diagnostics: vec![
5884 lsp::Diagnostic {
5885 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5886 severity: Some(DiagnosticSeverity::ERROR),
5887 message: "undefined variable 'BB'".to_string(),
5888 source: Some("disk".to_string()),
5889 ..Default::default()
5890 },
5891 lsp::Diagnostic {
5892 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5893 severity: Some(DiagnosticSeverity::WARNING),
5894 message: "undefined variable 'A'".to_string(),
5895 source: Some("disk".to_string()),
5896 ..Default::default()
5897 },
5898 ],
5899 },
5900 );
5901
5902 buffer.next_notification(cx).await;
5903 buffer.read_with(cx, |buffer, _| {
5904 assert_eq!(
5905 buffer
5906 .snapshot()
5907 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5908 .collect::<Vec<_>>(),
5909 &[
5910 DiagnosticEntry {
5911 range: Point::new(2, 21)..Point::new(2, 22),
5912 diagnostic: Diagnostic {
5913 severity: DiagnosticSeverity::WARNING,
5914 message: "undefined variable 'A'".to_string(),
5915 is_disk_based: true,
5916 group_id: 1,
5917 is_primary: true,
5918 ..Default::default()
5919 }
5920 },
5921 DiagnosticEntry {
5922 range: Point::new(3, 9)..Point::new(3, 14),
5923 diagnostic: Diagnostic {
5924 severity: DiagnosticSeverity::ERROR,
5925 message: "undefined variable 'BB'".to_string(),
5926 is_disk_based: true,
5927 group_id: 0,
5928 is_primary: true,
5929 ..Default::default()
5930 },
5931 }
5932 ]
5933 );
5934 });
5935 }
5936
5937 #[gpui::test]
5938 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5939 cx.foreground().forbid_parking();
5940
5941 let text = concat!(
5942 "let one = ;\n", //
5943 "let two = \n",
5944 "let three = 3;\n",
5945 );
5946
5947 let fs = FakeFs::new(cx.background());
5948 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5949
5950 let project = Project::test(fs, cx);
5951 let worktree_id = project
5952 .update(cx, |project, cx| {
5953 project.find_or_create_local_worktree("/dir", true, cx)
5954 })
5955 .await
5956 .unwrap()
5957 .0
5958 .read_with(cx, |tree, _| tree.id());
5959
5960 let buffer = project
5961 .update(cx, |project, cx| {
5962 project.open_buffer((worktree_id, "a.rs"), cx)
5963 })
5964 .await
5965 .unwrap();
5966
5967 project.update(cx, |project, cx| {
5968 project
5969 .update_buffer_diagnostics(
5970 &buffer,
5971 vec![
5972 DiagnosticEntry {
5973 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5974 diagnostic: Diagnostic {
5975 severity: DiagnosticSeverity::ERROR,
5976 message: "syntax error 1".to_string(),
5977 ..Default::default()
5978 },
5979 },
5980 DiagnosticEntry {
5981 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5982 diagnostic: Diagnostic {
5983 severity: DiagnosticSeverity::ERROR,
5984 message: "syntax error 2".to_string(),
5985 ..Default::default()
5986 },
5987 },
5988 ],
5989 None,
5990 cx,
5991 )
5992 .unwrap();
5993 });
5994
5995 // An empty range is extended forward to include the following character.
5996 // At the end of a line, an empty range is extended backward to include
5997 // the preceding character.
5998 buffer.read_with(cx, |buffer, _| {
5999 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6000 assert_eq!(
6001 chunks
6002 .iter()
6003 .map(|(s, d)| (s.as_str(), *d))
6004 .collect::<Vec<_>>(),
6005 &[
6006 ("let one = ", None),
6007 (";", Some(DiagnosticSeverity::ERROR)),
6008 ("\nlet two =", None),
6009 (" ", Some(DiagnosticSeverity::ERROR)),
6010 ("\nlet three = 3;\n", None)
6011 ]
6012 );
6013 });
6014 }
6015
6016 #[gpui::test]
6017 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6018 cx.foreground().forbid_parking();
6019
6020 let mut language = Language::new(
6021 LanguageConfig {
6022 name: "Rust".into(),
6023 path_suffixes: vec!["rs".to_string()],
6024 ..Default::default()
6025 },
6026 Some(tree_sitter_rust::language()),
6027 );
6028 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6029
6030 let text = "
6031 fn a() {
6032 f1();
6033 }
6034 fn b() {
6035 f2();
6036 }
6037 fn c() {
6038 f3();
6039 }
6040 "
6041 .unindent();
6042
6043 let fs = FakeFs::new(cx.background());
6044 fs.insert_tree(
6045 "/dir",
6046 json!({
6047 "a.rs": text.clone(),
6048 }),
6049 )
6050 .await;
6051
6052 let project = Project::test(fs, cx);
6053 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6054
6055 let worktree_id = project
6056 .update(cx, |project, cx| {
6057 project.find_or_create_local_worktree("/dir", true, cx)
6058 })
6059 .await
6060 .unwrap()
6061 .0
6062 .read_with(cx, |tree, _| tree.id());
6063
6064 let buffer = project
6065 .update(cx, |project, cx| {
6066 project.open_buffer((worktree_id, "a.rs"), cx)
6067 })
6068 .await
6069 .unwrap();
6070
6071 let mut fake_server = fake_servers.next().await.unwrap();
6072 let lsp_document_version = fake_server
6073 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6074 .await
6075 .text_document
6076 .version;
6077
6078 // Simulate editing the buffer after the language server computes some edits.
6079 buffer.update(cx, |buffer, cx| {
6080 buffer.edit(
6081 [Point::new(0, 0)..Point::new(0, 0)],
6082 "// above first function\n",
6083 cx,
6084 );
6085 buffer.edit(
6086 [Point::new(2, 0)..Point::new(2, 0)],
6087 " // inside first function\n",
6088 cx,
6089 );
6090 buffer.edit(
6091 [Point::new(6, 4)..Point::new(6, 4)],
6092 "// inside second function ",
6093 cx,
6094 );
6095
6096 assert_eq!(
6097 buffer.text(),
6098 "
6099 // above first function
6100 fn a() {
6101 // inside first function
6102 f1();
6103 }
6104 fn b() {
6105 // inside second function f2();
6106 }
6107 fn c() {
6108 f3();
6109 }
6110 "
6111 .unindent()
6112 );
6113 });
6114
6115 let edits = project
6116 .update(cx, |project, cx| {
6117 project.edits_from_lsp(
6118 &buffer,
6119 vec![
6120 // replace body of first function
6121 lsp::TextEdit {
6122 range: lsp::Range::new(
6123 lsp::Position::new(0, 0),
6124 lsp::Position::new(3, 0),
6125 ),
6126 new_text: "
6127 fn a() {
6128 f10();
6129 }
6130 "
6131 .unindent(),
6132 },
6133 // edit inside second function
6134 lsp::TextEdit {
6135 range: lsp::Range::new(
6136 lsp::Position::new(4, 6),
6137 lsp::Position::new(4, 6),
6138 ),
6139 new_text: "00".into(),
6140 },
6141 // edit inside third function via two distinct edits
6142 lsp::TextEdit {
6143 range: lsp::Range::new(
6144 lsp::Position::new(7, 5),
6145 lsp::Position::new(7, 5),
6146 ),
6147 new_text: "4000".into(),
6148 },
6149 lsp::TextEdit {
6150 range: lsp::Range::new(
6151 lsp::Position::new(7, 5),
6152 lsp::Position::new(7, 6),
6153 ),
6154 new_text: "".into(),
6155 },
6156 ],
6157 Some(lsp_document_version),
6158 cx,
6159 )
6160 })
6161 .await
6162 .unwrap();
6163
6164 buffer.update(cx, |buffer, cx| {
6165 for (range, new_text) in edits {
6166 buffer.edit([range], new_text, cx);
6167 }
6168 assert_eq!(
6169 buffer.text(),
6170 "
6171 // above first function
6172 fn a() {
6173 // inside first function
6174 f10();
6175 }
6176 fn b() {
6177 // inside second function f200();
6178 }
6179 fn c() {
6180 f4000();
6181 }
6182 "
6183 .unindent()
6184 );
6185 });
6186 }
6187
6188 #[gpui::test]
6189 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6190 cx.foreground().forbid_parking();
6191
6192 let text = "
6193 use a::b;
6194 use a::c;
6195
6196 fn f() {
6197 b();
6198 c();
6199 }
6200 "
6201 .unindent();
6202
6203 let fs = FakeFs::new(cx.background());
6204 fs.insert_tree(
6205 "/dir",
6206 json!({
6207 "a.rs": text.clone(),
6208 }),
6209 )
6210 .await;
6211
6212 let project = Project::test(fs, cx);
6213 let worktree_id = project
6214 .update(cx, |project, cx| {
6215 project.find_or_create_local_worktree("/dir", true, cx)
6216 })
6217 .await
6218 .unwrap()
6219 .0
6220 .read_with(cx, |tree, _| tree.id());
6221
6222 let buffer = project
6223 .update(cx, |project, cx| {
6224 project.open_buffer((worktree_id, "a.rs"), cx)
6225 })
6226 .await
6227 .unwrap();
6228
6229 // Simulate the language server sending us a small edit in the form of a very large diff.
6230 // Rust-analyzer does this when performing a merge-imports code action.
6231 let edits = project
6232 .update(cx, |project, cx| {
6233 project.edits_from_lsp(
6234 &buffer,
6235 [
6236 // Replace the first use statement without editing the semicolon.
6237 lsp::TextEdit {
6238 range: lsp::Range::new(
6239 lsp::Position::new(0, 4),
6240 lsp::Position::new(0, 8),
6241 ),
6242 new_text: "a::{b, c}".into(),
6243 },
6244 // Reinsert the remainder of the file between the semicolon and the final
6245 // newline of the file.
6246 lsp::TextEdit {
6247 range: lsp::Range::new(
6248 lsp::Position::new(0, 9),
6249 lsp::Position::new(0, 9),
6250 ),
6251 new_text: "\n\n".into(),
6252 },
6253 lsp::TextEdit {
6254 range: lsp::Range::new(
6255 lsp::Position::new(0, 9),
6256 lsp::Position::new(0, 9),
6257 ),
6258 new_text: "
6259 fn f() {
6260 b();
6261 c();
6262 }"
6263 .unindent(),
6264 },
6265 // Delete everything after the first newline of the file.
6266 lsp::TextEdit {
6267 range: lsp::Range::new(
6268 lsp::Position::new(1, 0),
6269 lsp::Position::new(7, 0),
6270 ),
6271 new_text: "".into(),
6272 },
6273 ],
6274 None,
6275 cx,
6276 )
6277 })
6278 .await
6279 .unwrap();
6280
6281 buffer.update(cx, |buffer, cx| {
6282 let edits = edits
6283 .into_iter()
6284 .map(|(range, text)| {
6285 (
6286 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6287 text,
6288 )
6289 })
6290 .collect::<Vec<_>>();
6291
6292 assert_eq!(
6293 edits,
6294 [
6295 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6296 (Point::new(1, 0)..Point::new(2, 0), "".into())
6297 ]
6298 );
6299
6300 for (range, new_text) in edits {
6301 buffer.edit([range], new_text, cx);
6302 }
6303 assert_eq!(
6304 buffer.text(),
6305 "
6306 use a::{b, c};
6307
6308 fn f() {
6309 b();
6310 c();
6311 }
6312 "
6313 .unindent()
6314 );
6315 });
6316 }
6317
6318 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6319 buffer: &Buffer,
6320 range: Range<T>,
6321 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6322 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6323 for chunk in buffer.snapshot().chunks(range, true) {
6324 if chunks.last().map_or(false, |prev_chunk| {
6325 prev_chunk.1 == chunk.diagnostic_severity
6326 }) {
6327 chunks.last_mut().unwrap().0.push_str(chunk.text);
6328 } else {
6329 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6330 }
6331 }
6332 chunks
6333 }
6334
6335 #[gpui::test]
6336 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6337 let dir = temp_tree(json!({
6338 "root": {
6339 "dir1": {},
6340 "dir2": {
6341 "dir3": {}
6342 }
6343 }
6344 }));
6345
6346 let project = Project::test(Arc::new(RealFs), cx);
6347 let (tree, _) = project
6348 .update(cx, |project, cx| {
6349 project.find_or_create_local_worktree(&dir.path(), true, cx)
6350 })
6351 .await
6352 .unwrap();
6353
6354 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6355 .await;
6356
6357 let cancel_flag = Default::default();
6358 let results = project
6359 .read_with(cx, |project, cx| {
6360 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6361 })
6362 .await;
6363
6364 assert!(results.is_empty());
6365 }
6366
6367 #[gpui::test]
6368 async fn test_definition(cx: &mut gpui::TestAppContext) {
6369 let mut language = Language::new(
6370 LanguageConfig {
6371 name: "Rust".into(),
6372 path_suffixes: vec!["rs".to_string()],
6373 ..Default::default()
6374 },
6375 Some(tree_sitter_rust::language()),
6376 );
6377 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6378
6379 let fs = FakeFs::new(cx.background());
6380 fs.insert_tree(
6381 "/dir",
6382 json!({
6383 "a.rs": "const fn a() { A }",
6384 "b.rs": "const y: i32 = crate::a()",
6385 }),
6386 )
6387 .await;
6388
6389 let project = Project::test(fs, cx);
6390 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6391
6392 let (tree, _) = project
6393 .update(cx, |project, cx| {
6394 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
6395 })
6396 .await
6397 .unwrap();
6398 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6399 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6400 .await;
6401
6402 let buffer = project
6403 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6404 .await
6405 .unwrap();
6406
6407 let fake_server = fake_servers.next().await.unwrap();
6408 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6409 let params = params.text_document_position_params;
6410 assert_eq!(
6411 params.text_document.uri.to_file_path().unwrap(),
6412 Path::new("/dir/b.rs"),
6413 );
6414 assert_eq!(params.position, lsp::Position::new(0, 22));
6415
6416 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6417 lsp::Location::new(
6418 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6419 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6420 ),
6421 )))
6422 });
6423
6424 let mut definitions = project
6425 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6426 .await
6427 .unwrap();
6428
6429 assert_eq!(definitions.len(), 1);
6430 let definition = definitions.pop().unwrap();
6431 cx.update(|cx| {
6432 let target_buffer = definition.buffer.read(cx);
6433 assert_eq!(
6434 target_buffer
6435 .file()
6436 .unwrap()
6437 .as_local()
6438 .unwrap()
6439 .abs_path(cx),
6440 Path::new("/dir/a.rs"),
6441 );
6442 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6443 assert_eq!(
6444 list_worktrees(&project, cx),
6445 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6446 );
6447
6448 drop(definition);
6449 });
6450 cx.read(|cx| {
6451 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6452 });
6453
6454 fn list_worktrees<'a>(
6455 project: &'a ModelHandle<Project>,
6456 cx: &'a AppContext,
6457 ) -> Vec<(&'a Path, bool)> {
6458 project
6459 .read(cx)
6460 .worktrees(cx)
6461 .map(|worktree| {
6462 let worktree = worktree.read(cx);
6463 (
6464 worktree.as_local().unwrap().abs_path().as_ref(),
6465 worktree.is_visible(),
6466 )
6467 })
6468 .collect::<Vec<_>>()
6469 }
6470 }
6471
6472 #[gpui::test(iterations = 10)]
6473 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6474 let mut language = Language::new(
6475 LanguageConfig {
6476 name: "TypeScript".into(),
6477 path_suffixes: vec!["ts".to_string()],
6478 ..Default::default()
6479 },
6480 None,
6481 );
6482 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6483
6484 let fs = FakeFs::new(cx.background());
6485 fs.insert_tree(
6486 "/dir",
6487 json!({
6488 "a.ts": "a",
6489 }),
6490 )
6491 .await;
6492
6493 let project = Project::test(fs, cx);
6494 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6495
6496 let (tree, _) = project
6497 .update(cx, |project, cx| {
6498 project.find_or_create_local_worktree("/dir", true, cx)
6499 })
6500 .await
6501 .unwrap();
6502 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6503 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6504 .await;
6505
6506 let buffer = project
6507 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
6508 .await
6509 .unwrap();
6510
6511 let fake_server = fake_language_servers.next().await.unwrap();
6512
6513 // Language server returns code actions that contain commands, and not edits.
6514 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6515 fake_server
6516 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6517 Ok(Some(vec![
6518 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6519 title: "The code action".into(),
6520 command: Some(lsp::Command {
6521 title: "The command".into(),
6522 command: "_the/command".into(),
6523 arguments: Some(vec![json!("the-argument")]),
6524 }),
6525 ..Default::default()
6526 }),
6527 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6528 title: "two".into(),
6529 ..Default::default()
6530 }),
6531 ]))
6532 })
6533 .next()
6534 .await;
6535
6536 let action = actions.await.unwrap()[0].clone();
6537 let apply = project.update(cx, |project, cx| {
6538 project.apply_code_action(buffer.clone(), action, true, cx)
6539 });
6540
6541 // Resolving the code action does not populate its edits. In absence of
6542 // edits, we must execute the given command.
6543 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6544 |action, _| async move { Ok(action) },
6545 );
6546
6547 // While executing the command, the language server sends the editor
6548 // a `workspaceEdit` request.
6549 fake_server
6550 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6551 let fake = fake_server.clone();
6552 move |params, _| {
6553 assert_eq!(params.command, "_the/command");
6554 let fake = fake.clone();
6555 async move {
6556 fake.server
6557 .request::<lsp::request::ApplyWorkspaceEdit>(
6558 lsp::ApplyWorkspaceEditParams {
6559 label: None,
6560 edit: lsp::WorkspaceEdit {
6561 changes: Some(
6562 [(
6563 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6564 vec![lsp::TextEdit {
6565 range: lsp::Range::new(
6566 lsp::Position::new(0, 0),
6567 lsp::Position::new(0, 0),
6568 ),
6569 new_text: "X".into(),
6570 }],
6571 )]
6572 .into_iter()
6573 .collect(),
6574 ),
6575 ..Default::default()
6576 },
6577 },
6578 )
6579 .await
6580 .unwrap();
6581 Ok(Some(json!(null)))
6582 }
6583 }
6584 })
6585 .next()
6586 .await;
6587
6588 // Applying the code action returns a project transaction containing the edits
6589 // sent by the language server in its `workspaceEdit` request.
6590 let transaction = apply.await.unwrap();
6591 assert!(transaction.0.contains_key(&buffer));
6592 buffer.update(cx, |buffer, cx| {
6593 assert_eq!(buffer.text(), "Xa");
6594 buffer.undo(cx);
6595 assert_eq!(buffer.text(), "a");
6596 });
6597 }
6598
6599 #[gpui::test]
6600 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6601 let fs = FakeFs::new(cx.background());
6602 fs.insert_tree(
6603 "/dir",
6604 json!({
6605 "file1": "the old contents",
6606 }),
6607 )
6608 .await;
6609
6610 let project = Project::test(fs.clone(), cx);
6611 let worktree_id = project
6612 .update(cx, |p, cx| {
6613 p.find_or_create_local_worktree("/dir", true, cx)
6614 })
6615 .await
6616 .unwrap()
6617 .0
6618 .read_with(cx, |tree, _| tree.id());
6619
6620 let buffer = project
6621 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6622 .await
6623 .unwrap();
6624 buffer
6625 .update(cx, |buffer, cx| {
6626 assert_eq!(buffer.text(), "the old contents");
6627 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6628 buffer.save(cx)
6629 })
6630 .await
6631 .unwrap();
6632
6633 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6634 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6635 }
6636
6637 #[gpui::test]
6638 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6639 let fs = FakeFs::new(cx.background());
6640 fs.insert_tree(
6641 "/dir",
6642 json!({
6643 "file1": "the old contents",
6644 }),
6645 )
6646 .await;
6647
6648 let project = Project::test(fs.clone(), cx);
6649 let worktree_id = project
6650 .update(cx, |p, cx| {
6651 p.find_or_create_local_worktree("/dir/file1", true, cx)
6652 })
6653 .await
6654 .unwrap()
6655 .0
6656 .read_with(cx, |tree, _| tree.id());
6657
6658 let buffer = project
6659 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
6660 .await
6661 .unwrap();
6662 buffer
6663 .update(cx, |buffer, cx| {
6664 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
6665 buffer.save(cx)
6666 })
6667 .await
6668 .unwrap();
6669
6670 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6671 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6672 }
6673
6674 #[gpui::test]
6675 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6676 let fs = FakeFs::new(cx.background());
6677 fs.insert_tree("/dir", json!({})).await;
6678
6679 let project = Project::test(fs.clone(), cx);
6680 let (worktree, _) = project
6681 .update(cx, |project, cx| {
6682 project.find_or_create_local_worktree("/dir", true, cx)
6683 })
6684 .await
6685 .unwrap();
6686 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6687
6688 let buffer = project.update(cx, |project, cx| {
6689 project.create_buffer("", None, cx).unwrap()
6690 });
6691 buffer.update(cx, |buffer, cx| {
6692 buffer.edit([0..0], "abc", cx);
6693 assert!(buffer.is_dirty());
6694 assert!(!buffer.has_conflict());
6695 });
6696 project
6697 .update(cx, |project, cx| {
6698 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6699 })
6700 .await
6701 .unwrap();
6702 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6703 buffer.read_with(cx, |buffer, cx| {
6704 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6705 assert!(!buffer.is_dirty());
6706 assert!(!buffer.has_conflict());
6707 });
6708
6709 let opened_buffer = project
6710 .update(cx, |project, cx| {
6711 project.open_buffer((worktree_id, "file1"), cx)
6712 })
6713 .await
6714 .unwrap();
6715 assert_eq!(opened_buffer, buffer);
6716 }
6717
6718 #[gpui::test(retries = 5)]
6719 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6720 let dir = temp_tree(json!({
6721 "a": {
6722 "file1": "",
6723 "file2": "",
6724 "file3": "",
6725 },
6726 "b": {
6727 "c": {
6728 "file4": "",
6729 "file5": "",
6730 }
6731 }
6732 }));
6733
6734 let project = Project::test(Arc::new(RealFs), cx);
6735 let rpc = project.read_with(cx, |p, _| p.client.clone());
6736
6737 let (tree, _) = project
6738 .update(cx, |p, cx| {
6739 p.find_or_create_local_worktree(dir.path(), true, cx)
6740 })
6741 .await
6742 .unwrap();
6743 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6744
6745 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6746 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
6747 async move { buffer.await.unwrap() }
6748 };
6749 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6750 tree.read_with(cx, |tree, _| {
6751 tree.entry_for_path(path)
6752 .expect(&format!("no entry for path {}", path))
6753 .id
6754 })
6755 };
6756
6757 let buffer2 = buffer_for_path("a/file2", cx).await;
6758 let buffer3 = buffer_for_path("a/file3", cx).await;
6759 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6760 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6761
6762 let file2_id = id_for_path("a/file2", &cx);
6763 let file3_id = id_for_path("a/file3", &cx);
6764 let file4_id = id_for_path("b/c/file4", &cx);
6765
6766 // Wait for the initial scan.
6767 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6768 .await;
6769
6770 // Create a remote copy of this worktree.
6771 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6772 let (remote, load_task) = cx.update(|cx| {
6773 Worktree::remote(
6774 1,
6775 1,
6776 initial_snapshot.to_proto(&Default::default(), true),
6777 rpc.clone(),
6778 cx,
6779 )
6780 });
6781 load_task.await;
6782
6783 cx.read(|cx| {
6784 assert!(!buffer2.read(cx).is_dirty());
6785 assert!(!buffer3.read(cx).is_dirty());
6786 assert!(!buffer4.read(cx).is_dirty());
6787 assert!(!buffer5.read(cx).is_dirty());
6788 });
6789
6790 // Rename and delete files and directories.
6791 tree.flush_fs_events(&cx).await;
6792 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6793 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6794 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6795 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6796 tree.flush_fs_events(&cx).await;
6797
6798 let expected_paths = vec![
6799 "a",
6800 "a/file1",
6801 "a/file2.new",
6802 "b",
6803 "d",
6804 "d/file3",
6805 "d/file4",
6806 ];
6807
6808 cx.read(|app| {
6809 assert_eq!(
6810 tree.read(app)
6811 .paths()
6812 .map(|p| p.to_str().unwrap())
6813 .collect::<Vec<_>>(),
6814 expected_paths
6815 );
6816
6817 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6818 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6819 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6820
6821 assert_eq!(
6822 buffer2.read(app).file().unwrap().path().as_ref(),
6823 Path::new("a/file2.new")
6824 );
6825 assert_eq!(
6826 buffer3.read(app).file().unwrap().path().as_ref(),
6827 Path::new("d/file3")
6828 );
6829 assert_eq!(
6830 buffer4.read(app).file().unwrap().path().as_ref(),
6831 Path::new("d/file4")
6832 );
6833 assert_eq!(
6834 buffer5.read(app).file().unwrap().path().as_ref(),
6835 Path::new("b/c/file5")
6836 );
6837
6838 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6839 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6840 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6841 assert!(buffer5.read(app).file().unwrap().is_deleted());
6842 });
6843
6844 // Update the remote worktree. Check that it becomes consistent with the
6845 // local worktree.
6846 remote.update(cx, |remote, cx| {
6847 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6848 &initial_snapshot,
6849 1,
6850 1,
6851 true,
6852 );
6853 remote
6854 .as_remote_mut()
6855 .unwrap()
6856 .snapshot
6857 .apply_remote_update(update_message)
6858 .unwrap();
6859
6860 assert_eq!(
6861 remote
6862 .paths()
6863 .map(|p| p.to_str().unwrap())
6864 .collect::<Vec<_>>(),
6865 expected_paths
6866 );
6867 });
6868 }
6869
6870 #[gpui::test]
6871 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6872 let fs = FakeFs::new(cx.background());
6873 fs.insert_tree(
6874 "/the-dir",
6875 json!({
6876 "a.txt": "a-contents",
6877 "b.txt": "b-contents",
6878 }),
6879 )
6880 .await;
6881
6882 let project = Project::test(fs.clone(), cx);
6883 let worktree_id = project
6884 .update(cx, |p, cx| {
6885 p.find_or_create_local_worktree("/the-dir", true, cx)
6886 })
6887 .await
6888 .unwrap()
6889 .0
6890 .read_with(cx, |tree, _| tree.id());
6891
6892 // Spawn multiple tasks to open paths, repeating some paths.
6893 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6894 (
6895 p.open_buffer((worktree_id, "a.txt"), cx),
6896 p.open_buffer((worktree_id, "b.txt"), cx),
6897 p.open_buffer((worktree_id, "a.txt"), cx),
6898 )
6899 });
6900
6901 let buffer_a_1 = buffer_a_1.await.unwrap();
6902 let buffer_a_2 = buffer_a_2.await.unwrap();
6903 let buffer_b = buffer_b.await.unwrap();
6904 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6905 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6906
6907 // There is only one buffer per path.
6908 let buffer_a_id = buffer_a_1.id();
6909 assert_eq!(buffer_a_2.id(), buffer_a_id);
6910
6911 // Open the same path again while it is still open.
6912 drop(buffer_a_1);
6913 let buffer_a_3 = project
6914 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
6915 .await
6916 .unwrap();
6917
6918 // There's still only one buffer per path.
6919 assert_eq!(buffer_a_3.id(), buffer_a_id);
6920 }
6921
6922 #[gpui::test]
6923 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6924 use std::fs;
6925
6926 let dir = temp_tree(json!({
6927 "file1": "abc",
6928 "file2": "def",
6929 "file3": "ghi",
6930 }));
6931
6932 let project = Project::test(Arc::new(RealFs), cx);
6933 let (worktree, _) = project
6934 .update(cx, |p, cx| {
6935 p.find_or_create_local_worktree(dir.path(), true, cx)
6936 })
6937 .await
6938 .unwrap();
6939 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
6940
6941 worktree.flush_fs_events(&cx).await;
6942 worktree
6943 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
6944 .await;
6945
6946 let buffer1 = project
6947 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
6948 .await
6949 .unwrap();
6950 let events = Rc::new(RefCell::new(Vec::new()));
6951
6952 // initially, the buffer isn't dirty.
6953 buffer1.update(cx, |buffer, cx| {
6954 cx.subscribe(&buffer1, {
6955 let events = events.clone();
6956 move |_, _, event, _| match event {
6957 BufferEvent::Operation(_) => {}
6958 _ => events.borrow_mut().push(event.clone()),
6959 }
6960 })
6961 .detach();
6962
6963 assert!(!buffer.is_dirty());
6964 assert!(events.borrow().is_empty());
6965
6966 buffer.edit(vec![1..2], "", cx);
6967 });
6968
6969 // after the first edit, the buffer is dirty, and emits a dirtied event.
6970 buffer1.update(cx, |buffer, cx| {
6971 assert!(buffer.text() == "ac");
6972 assert!(buffer.is_dirty());
6973 assert_eq!(
6974 *events.borrow(),
6975 &[language::Event::Edited, language::Event::Dirtied]
6976 );
6977 events.borrow_mut().clear();
6978 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6979 });
6980
6981 // after saving, the buffer is not dirty, and emits a saved event.
6982 buffer1.update(cx, |buffer, cx| {
6983 assert!(!buffer.is_dirty());
6984 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6985 events.borrow_mut().clear();
6986
6987 buffer.edit(vec![1..1], "B", cx);
6988 buffer.edit(vec![2..2], "D", cx);
6989 });
6990
6991 // after editing again, the buffer is dirty, and emits another dirty event.
6992 buffer1.update(cx, |buffer, cx| {
6993 assert!(buffer.text() == "aBDc");
6994 assert!(buffer.is_dirty());
6995 assert_eq!(
6996 *events.borrow(),
6997 &[
6998 language::Event::Edited,
6999 language::Event::Dirtied,
7000 language::Event::Edited,
7001 ],
7002 );
7003 events.borrow_mut().clear();
7004
7005 // TODO - currently, after restoring the buffer to its
7006 // previously-saved state, the is still considered dirty.
7007 buffer.edit([1..3], "", cx);
7008 assert!(buffer.text() == "ac");
7009 assert!(buffer.is_dirty());
7010 });
7011
7012 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7013
7014 // When a file is deleted, the buffer is considered dirty.
7015 let events = Rc::new(RefCell::new(Vec::new()));
7016 let buffer2 = project
7017 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
7018 .await
7019 .unwrap();
7020 buffer2.update(cx, |_, cx| {
7021 cx.subscribe(&buffer2, {
7022 let events = events.clone();
7023 move |_, _, event, _| events.borrow_mut().push(event.clone())
7024 })
7025 .detach();
7026 });
7027
7028 fs::remove_file(dir.path().join("file2")).unwrap();
7029 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7030 assert_eq!(
7031 *events.borrow(),
7032 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7033 );
7034
7035 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7036 let events = Rc::new(RefCell::new(Vec::new()));
7037 let buffer3 = project
7038 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
7039 .await
7040 .unwrap();
7041 buffer3.update(cx, |_, cx| {
7042 cx.subscribe(&buffer3, {
7043 let events = events.clone();
7044 move |_, _, event, _| events.borrow_mut().push(event.clone())
7045 })
7046 .detach();
7047 });
7048
7049 worktree.flush_fs_events(&cx).await;
7050 buffer3.update(cx, |buffer, cx| {
7051 buffer.edit(Some(0..0), "x", cx);
7052 });
7053 events.borrow_mut().clear();
7054 fs::remove_file(dir.path().join("file3")).unwrap();
7055 buffer3
7056 .condition(&cx, |_, _| !events.borrow().is_empty())
7057 .await;
7058 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7059 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7060 }
7061
7062 #[gpui::test]
7063 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7064 use std::fs;
7065
7066 let initial_contents = "aaa\nbbbbb\nc\n";
7067 let dir = temp_tree(json!({ "the-file": initial_contents }));
7068
7069 let project = Project::test(Arc::new(RealFs), cx);
7070 let (worktree, _) = project
7071 .update(cx, |p, cx| {
7072 p.find_or_create_local_worktree(dir.path(), true, cx)
7073 })
7074 .await
7075 .unwrap();
7076 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7077
7078 worktree
7079 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
7080 .await;
7081
7082 let abs_path = dir.path().join("the-file");
7083 let buffer = project
7084 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
7085 .await
7086 .unwrap();
7087
7088 // TODO
7089 // Add a cursor on each row.
7090 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
7091 // assert!(!buffer.is_dirty());
7092 // buffer.add_selection_set(
7093 // &(0..3)
7094 // .map(|row| Selection {
7095 // id: row as usize,
7096 // start: Point::new(row, 1),
7097 // end: Point::new(row, 1),
7098 // reversed: false,
7099 // goal: SelectionGoal::None,
7100 // })
7101 // .collect::<Vec<_>>(),
7102 // cx,
7103 // )
7104 // });
7105
7106 // Change the file on disk, adding two new lines of text, and removing
7107 // one line.
7108 buffer.read_with(cx, |buffer, _| {
7109 assert!(!buffer.is_dirty());
7110 assert!(!buffer.has_conflict());
7111 });
7112 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7113 fs::write(&abs_path, new_contents).unwrap();
7114
7115 // Because the buffer was not modified, it is reloaded from disk. Its
7116 // contents are edited according to the diff between the old and new
7117 // file contents.
7118 buffer
7119 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7120 .await;
7121
7122 buffer.update(cx, |buffer, _| {
7123 assert_eq!(buffer.text(), new_contents);
7124 assert!(!buffer.is_dirty());
7125 assert!(!buffer.has_conflict());
7126
7127 // TODO
7128 // let cursor_positions = buffer
7129 // .selection_set(selection_set_id)
7130 // .unwrap()
7131 // .selections::<Point>(&*buffer)
7132 // .map(|selection| {
7133 // assert_eq!(selection.start, selection.end);
7134 // selection.start
7135 // })
7136 // .collect::<Vec<_>>();
7137 // assert_eq!(
7138 // cursor_positions,
7139 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7140 // );
7141 });
7142
7143 // Modify the buffer
7144 buffer.update(cx, |buffer, cx| {
7145 buffer.edit(vec![0..0], " ", cx);
7146 assert!(buffer.is_dirty());
7147 assert!(!buffer.has_conflict());
7148 });
7149
7150 // Change the file on disk again, adding blank lines to the beginning.
7151 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
7152
7153 // Because the buffer is modified, it doesn't reload from disk, but is
7154 // marked as having a conflict.
7155 buffer
7156 .condition(&cx, |buffer, _| buffer.has_conflict())
7157 .await;
7158 }
7159
7160 #[gpui::test]
7161 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7162 cx.foreground().forbid_parking();
7163
7164 let fs = FakeFs::new(cx.background());
7165 fs.insert_tree(
7166 "/the-dir",
7167 json!({
7168 "a.rs": "
7169 fn foo(mut v: Vec<usize>) {
7170 for x in &v {
7171 v.push(1);
7172 }
7173 }
7174 "
7175 .unindent(),
7176 }),
7177 )
7178 .await;
7179
7180 let project = Project::test(fs.clone(), cx);
7181 let (worktree, _) = project
7182 .update(cx, |p, cx| {
7183 p.find_or_create_local_worktree("/the-dir", true, cx)
7184 })
7185 .await
7186 .unwrap();
7187 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
7188
7189 let buffer = project
7190 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
7191 .await
7192 .unwrap();
7193
7194 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7195 let message = lsp::PublishDiagnosticsParams {
7196 uri: buffer_uri.clone(),
7197 diagnostics: vec![
7198 lsp::Diagnostic {
7199 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7200 severity: Some(DiagnosticSeverity::WARNING),
7201 message: "error 1".to_string(),
7202 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7203 location: lsp::Location {
7204 uri: buffer_uri.clone(),
7205 range: lsp::Range::new(
7206 lsp::Position::new(1, 8),
7207 lsp::Position::new(1, 9),
7208 ),
7209 },
7210 message: "error 1 hint 1".to_string(),
7211 }]),
7212 ..Default::default()
7213 },
7214 lsp::Diagnostic {
7215 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7216 severity: Some(DiagnosticSeverity::HINT),
7217 message: "error 1 hint 1".to_string(),
7218 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7219 location: lsp::Location {
7220 uri: buffer_uri.clone(),
7221 range: lsp::Range::new(
7222 lsp::Position::new(1, 8),
7223 lsp::Position::new(1, 9),
7224 ),
7225 },
7226 message: "original diagnostic".to_string(),
7227 }]),
7228 ..Default::default()
7229 },
7230 lsp::Diagnostic {
7231 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7232 severity: Some(DiagnosticSeverity::ERROR),
7233 message: "error 2".to_string(),
7234 related_information: Some(vec![
7235 lsp::DiagnosticRelatedInformation {
7236 location: lsp::Location {
7237 uri: buffer_uri.clone(),
7238 range: lsp::Range::new(
7239 lsp::Position::new(1, 13),
7240 lsp::Position::new(1, 15),
7241 ),
7242 },
7243 message: "error 2 hint 1".to_string(),
7244 },
7245 lsp::DiagnosticRelatedInformation {
7246 location: lsp::Location {
7247 uri: buffer_uri.clone(),
7248 range: lsp::Range::new(
7249 lsp::Position::new(1, 13),
7250 lsp::Position::new(1, 15),
7251 ),
7252 },
7253 message: "error 2 hint 2".to_string(),
7254 },
7255 ]),
7256 ..Default::default()
7257 },
7258 lsp::Diagnostic {
7259 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7260 severity: Some(DiagnosticSeverity::HINT),
7261 message: "error 2 hint 1".to_string(),
7262 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7263 location: lsp::Location {
7264 uri: buffer_uri.clone(),
7265 range: lsp::Range::new(
7266 lsp::Position::new(2, 8),
7267 lsp::Position::new(2, 17),
7268 ),
7269 },
7270 message: "original diagnostic".to_string(),
7271 }]),
7272 ..Default::default()
7273 },
7274 lsp::Diagnostic {
7275 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7276 severity: Some(DiagnosticSeverity::HINT),
7277 message: "error 2 hint 2".to_string(),
7278 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7279 location: lsp::Location {
7280 uri: buffer_uri.clone(),
7281 range: lsp::Range::new(
7282 lsp::Position::new(2, 8),
7283 lsp::Position::new(2, 17),
7284 ),
7285 },
7286 message: "original diagnostic".to_string(),
7287 }]),
7288 ..Default::default()
7289 },
7290 ],
7291 version: None,
7292 };
7293
7294 project
7295 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7296 .unwrap();
7297 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7298
7299 assert_eq!(
7300 buffer
7301 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7302 .collect::<Vec<_>>(),
7303 &[
7304 DiagnosticEntry {
7305 range: Point::new(1, 8)..Point::new(1, 9),
7306 diagnostic: Diagnostic {
7307 severity: DiagnosticSeverity::WARNING,
7308 message: "error 1".to_string(),
7309 group_id: 0,
7310 is_primary: true,
7311 ..Default::default()
7312 }
7313 },
7314 DiagnosticEntry {
7315 range: Point::new(1, 8)..Point::new(1, 9),
7316 diagnostic: Diagnostic {
7317 severity: DiagnosticSeverity::HINT,
7318 message: "error 1 hint 1".to_string(),
7319 group_id: 0,
7320 is_primary: false,
7321 ..Default::default()
7322 }
7323 },
7324 DiagnosticEntry {
7325 range: Point::new(1, 13)..Point::new(1, 15),
7326 diagnostic: Diagnostic {
7327 severity: DiagnosticSeverity::HINT,
7328 message: "error 2 hint 1".to_string(),
7329 group_id: 1,
7330 is_primary: false,
7331 ..Default::default()
7332 }
7333 },
7334 DiagnosticEntry {
7335 range: Point::new(1, 13)..Point::new(1, 15),
7336 diagnostic: Diagnostic {
7337 severity: DiagnosticSeverity::HINT,
7338 message: "error 2 hint 2".to_string(),
7339 group_id: 1,
7340 is_primary: false,
7341 ..Default::default()
7342 }
7343 },
7344 DiagnosticEntry {
7345 range: Point::new(2, 8)..Point::new(2, 17),
7346 diagnostic: Diagnostic {
7347 severity: DiagnosticSeverity::ERROR,
7348 message: "error 2".to_string(),
7349 group_id: 1,
7350 is_primary: true,
7351 ..Default::default()
7352 }
7353 }
7354 ]
7355 );
7356
7357 assert_eq!(
7358 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7359 &[
7360 DiagnosticEntry {
7361 range: Point::new(1, 8)..Point::new(1, 9),
7362 diagnostic: Diagnostic {
7363 severity: DiagnosticSeverity::WARNING,
7364 message: "error 1".to_string(),
7365 group_id: 0,
7366 is_primary: true,
7367 ..Default::default()
7368 }
7369 },
7370 DiagnosticEntry {
7371 range: Point::new(1, 8)..Point::new(1, 9),
7372 diagnostic: Diagnostic {
7373 severity: DiagnosticSeverity::HINT,
7374 message: "error 1 hint 1".to_string(),
7375 group_id: 0,
7376 is_primary: false,
7377 ..Default::default()
7378 }
7379 },
7380 ]
7381 );
7382 assert_eq!(
7383 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7384 &[
7385 DiagnosticEntry {
7386 range: Point::new(1, 13)..Point::new(1, 15),
7387 diagnostic: Diagnostic {
7388 severity: DiagnosticSeverity::HINT,
7389 message: "error 2 hint 1".to_string(),
7390 group_id: 1,
7391 is_primary: false,
7392 ..Default::default()
7393 }
7394 },
7395 DiagnosticEntry {
7396 range: Point::new(1, 13)..Point::new(1, 15),
7397 diagnostic: Diagnostic {
7398 severity: DiagnosticSeverity::HINT,
7399 message: "error 2 hint 2".to_string(),
7400 group_id: 1,
7401 is_primary: false,
7402 ..Default::default()
7403 }
7404 },
7405 DiagnosticEntry {
7406 range: Point::new(2, 8)..Point::new(2, 17),
7407 diagnostic: Diagnostic {
7408 severity: DiagnosticSeverity::ERROR,
7409 message: "error 2".to_string(),
7410 group_id: 1,
7411 is_primary: true,
7412 ..Default::default()
7413 }
7414 }
7415 ]
7416 );
7417 }
7418
7419 #[gpui::test]
7420 async fn test_rename(cx: &mut gpui::TestAppContext) {
7421 cx.foreground().forbid_parking();
7422
7423 let mut language = Language::new(
7424 LanguageConfig {
7425 name: "Rust".into(),
7426 path_suffixes: vec!["rs".to_string()],
7427 ..Default::default()
7428 },
7429 Some(tree_sitter_rust::language()),
7430 );
7431 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7432
7433 let fs = FakeFs::new(cx.background());
7434 fs.insert_tree(
7435 "/dir",
7436 json!({
7437 "one.rs": "const ONE: usize = 1;",
7438 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7439 }),
7440 )
7441 .await;
7442
7443 let project = Project::test(fs.clone(), cx);
7444 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7445
7446 let (tree, _) = project
7447 .update(cx, |project, cx| {
7448 project.find_or_create_local_worktree("/dir", true, cx)
7449 })
7450 .await
7451 .unwrap();
7452 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7453 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7454 .await;
7455
7456 let buffer = project
7457 .update(cx, |project, cx| {
7458 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
7459 })
7460 .await
7461 .unwrap();
7462
7463 let fake_server = fake_servers.next().await.unwrap();
7464
7465 let response = project.update(cx, |project, cx| {
7466 project.prepare_rename(buffer.clone(), 7, cx)
7467 });
7468 fake_server
7469 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7470 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7471 assert_eq!(params.position, lsp::Position::new(0, 7));
7472 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7473 lsp::Position::new(0, 6),
7474 lsp::Position::new(0, 9),
7475 ))))
7476 })
7477 .next()
7478 .await
7479 .unwrap();
7480 let range = response.await.unwrap().unwrap();
7481 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7482 assert_eq!(range, 6..9);
7483
7484 let response = project.update(cx, |project, cx| {
7485 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7486 });
7487 fake_server
7488 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7489 assert_eq!(
7490 params.text_document_position.text_document.uri.as_str(),
7491 "file:///dir/one.rs"
7492 );
7493 assert_eq!(
7494 params.text_document_position.position,
7495 lsp::Position::new(0, 7)
7496 );
7497 assert_eq!(params.new_name, "THREE");
7498 Ok(Some(lsp::WorkspaceEdit {
7499 changes: Some(
7500 [
7501 (
7502 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7503 vec![lsp::TextEdit::new(
7504 lsp::Range::new(
7505 lsp::Position::new(0, 6),
7506 lsp::Position::new(0, 9),
7507 ),
7508 "THREE".to_string(),
7509 )],
7510 ),
7511 (
7512 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7513 vec![
7514 lsp::TextEdit::new(
7515 lsp::Range::new(
7516 lsp::Position::new(0, 24),
7517 lsp::Position::new(0, 27),
7518 ),
7519 "THREE".to_string(),
7520 ),
7521 lsp::TextEdit::new(
7522 lsp::Range::new(
7523 lsp::Position::new(0, 35),
7524 lsp::Position::new(0, 38),
7525 ),
7526 "THREE".to_string(),
7527 ),
7528 ],
7529 ),
7530 ]
7531 .into_iter()
7532 .collect(),
7533 ),
7534 ..Default::default()
7535 }))
7536 })
7537 .next()
7538 .await
7539 .unwrap();
7540 let mut transaction = response.await.unwrap().0;
7541 assert_eq!(transaction.len(), 2);
7542 assert_eq!(
7543 transaction
7544 .remove_entry(&buffer)
7545 .unwrap()
7546 .0
7547 .read_with(cx, |buffer, _| buffer.text()),
7548 "const THREE: usize = 1;"
7549 );
7550 assert_eq!(
7551 transaction
7552 .into_keys()
7553 .next()
7554 .unwrap()
7555 .read_with(cx, |buffer, _| buffer.text()),
7556 "const TWO: usize = one::THREE + one::THREE;"
7557 );
7558 }
7559
7560 #[gpui::test]
7561 async fn test_search(cx: &mut gpui::TestAppContext) {
7562 let fs = FakeFs::new(cx.background());
7563 fs.insert_tree(
7564 "/dir",
7565 json!({
7566 "one.rs": "const ONE: usize = 1;",
7567 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7568 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7569 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7570 }),
7571 )
7572 .await;
7573 let project = Project::test(fs.clone(), cx);
7574 let (tree, _) = project
7575 .update(cx, |project, cx| {
7576 project.find_or_create_local_worktree("/dir", true, cx)
7577 })
7578 .await
7579 .unwrap();
7580 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
7581 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7582 .await;
7583
7584 assert_eq!(
7585 search(&project, SearchQuery::text("TWO", false, true), cx)
7586 .await
7587 .unwrap(),
7588 HashMap::from_iter([
7589 ("two.rs".to_string(), vec![6..9]),
7590 ("three.rs".to_string(), vec![37..40])
7591 ])
7592 );
7593
7594 let buffer_4 = project
7595 .update(cx, |project, cx| {
7596 project.open_buffer((worktree_id, "four.rs"), cx)
7597 })
7598 .await
7599 .unwrap();
7600 buffer_4.update(cx, |buffer, cx| {
7601 buffer.edit([20..28, 31..43], "two::TWO", cx);
7602 });
7603
7604 assert_eq!(
7605 search(&project, SearchQuery::text("TWO", false, true), cx)
7606 .await
7607 .unwrap(),
7608 HashMap::from_iter([
7609 ("two.rs".to_string(), vec![6..9]),
7610 ("three.rs".to_string(), vec![37..40]),
7611 ("four.rs".to_string(), vec![25..28, 36..39])
7612 ])
7613 );
7614
7615 async fn search(
7616 project: &ModelHandle<Project>,
7617 query: SearchQuery,
7618 cx: &mut gpui::TestAppContext,
7619 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7620 let results = project
7621 .update(cx, |project, cx| project.search(query, cx))
7622 .await?;
7623
7624 Ok(results
7625 .into_iter()
7626 .map(|(buffer, ranges)| {
7627 buffer.read_with(cx, |buffer, _| {
7628 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7629 let ranges = ranges
7630 .into_iter()
7631 .map(|range| range.to_offset(buffer))
7632 .collect::<Vec<_>>();
7633 (path, ranges)
7634 })
7635 })
7636 .collect())
7637 }
7638 }
7639}