1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 hash::Hash,
40 mem,
41 ops::Range,
42 path::{Component, Path, PathBuf},
43 rc::Rc,
44 sync::{
45 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
46 Arc,
47 },
48 time::Instant,
49};
50use util::{post_inc, ResultExt, TryFutureExt as _};
51
52pub use fs::*;
53pub use worktree::*;
54
55pub trait Item: Entity {
56 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
57}
58
59pub struct Project {
60 worktrees: Vec<WorktreeHandle>,
61 active_entry: Option<ProjectEntryId>,
62 languages: Arc<LanguageRegistry>,
63 language_servers:
64 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
65 started_language_servers:
66 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
67 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
68 language_server_settings: Arc<Mutex<serde_json::Value>>,
69 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
70 next_language_server_id: usize,
71 client: Arc<client::Client>,
72 next_entry_id: Arc<AtomicUsize>,
73 user_store: ModelHandle<UserStore>,
74 fs: Arc<dyn Fs>,
75 client_state: ProjectClientState,
76 collaborators: HashMap<PeerId, Collaborator>,
77 subscriptions: Vec<client::Subscription>,
78 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
79 shared_buffers: HashMap<PeerId, HashSet<u64>>,
80 loading_buffers: HashMap<
81 ProjectPath,
82 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
83 >,
84 loading_local_worktrees:
85 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
86 opened_buffers: HashMap<u64, OpenBuffer>,
87 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
88 nonce: u128,
89}
90
91enum OpenBuffer {
92 Strong(ModelHandle<Buffer>),
93 Weak(WeakModelHandle<Buffer>),
94 Loading(Vec<Operation>),
95}
96
97enum WorktreeHandle {
98 Strong(ModelHandle<Worktree>),
99 Weak(WeakModelHandle<Worktree>),
100}
101
102enum ProjectClientState {
103 Local {
104 is_shared: bool,
105 remote_id_tx: watch::Sender<Option<u64>>,
106 remote_id_rx: watch::Receiver<Option<u64>>,
107 _maintain_remote_id_task: Task<Option<()>>,
108 },
109 Remote {
110 sharing_has_stopped: bool,
111 remote_id: u64,
112 replica_id: ReplicaId,
113 _detect_unshare_task: Task<Option<()>>,
114 },
115}
116
117#[derive(Clone, Debug)]
118pub struct Collaborator {
119 pub user: Arc<User>,
120 pub peer_id: PeerId,
121 pub replica_id: ReplicaId,
122}
123
124#[derive(Clone, Debug, PartialEq)]
125pub enum Event {
126 ActiveEntryChanged(Option<ProjectEntryId>),
127 WorktreeRemoved(WorktreeId),
128 DiskBasedDiagnosticsStarted,
129 DiskBasedDiagnosticsUpdated,
130 DiskBasedDiagnosticsFinished,
131 DiagnosticsUpdated(ProjectPath),
132 RemoteIdChanged(Option<u64>),
133 CollaboratorLeft(PeerId),
134}
135
136#[derive(Serialize)]
137pub struct LanguageServerStatus {
138 pub name: String,
139 pub pending_work: BTreeMap<String, LanguageServerProgress>,
140 pub pending_diagnostic_updates: isize,
141}
142
143#[derive(Clone, Debug, Serialize)]
144pub struct LanguageServerProgress {
145 pub message: Option<String>,
146 pub percentage: Option<usize>,
147 #[serde(skip_serializing)]
148 pub last_update_at: Instant,
149}
150
151#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
152pub struct ProjectPath {
153 pub worktree_id: WorktreeId,
154 pub path: Arc<Path>,
155}
156
157#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
158pub struct DiagnosticSummary {
159 pub error_count: usize,
160 pub warning_count: usize,
161}
162
163#[derive(Debug)]
164pub struct Location {
165 pub buffer: ModelHandle<Buffer>,
166 pub range: Range<language::Anchor>,
167}
168
169#[derive(Debug)]
170pub struct DocumentHighlight {
171 pub range: Range<language::Anchor>,
172 pub kind: DocumentHighlightKind,
173}
174
175#[derive(Clone, Debug)]
176pub struct Symbol {
177 pub source_worktree_id: WorktreeId,
178 pub worktree_id: WorktreeId,
179 pub language_server_name: LanguageServerName,
180 pub path: PathBuf,
181 pub label: CodeLabel,
182 pub name: String,
183 pub kind: lsp::SymbolKind,
184 pub range: Range<PointUtf16>,
185 pub signature: [u8; 32],
186}
187
188#[derive(Default)]
189pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
190
191impl DiagnosticSummary {
192 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
193 let mut this = Self {
194 error_count: 0,
195 warning_count: 0,
196 };
197
198 for entry in diagnostics {
199 if entry.diagnostic.is_primary {
200 match entry.diagnostic.severity {
201 DiagnosticSeverity::ERROR => this.error_count += 1,
202 DiagnosticSeverity::WARNING => this.warning_count += 1,
203 _ => {}
204 }
205 }
206 }
207
208 this
209 }
210
211 pub fn is_empty(&self) -> bool {
212 self.error_count == 0 && self.warning_count == 0
213 }
214
215 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
216 proto::DiagnosticSummary {
217 path: path.to_string_lossy().to_string(),
218 error_count: self.error_count as u32,
219 warning_count: self.warning_count as u32,
220 }
221 }
222}
223
224#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
225pub struct ProjectEntryId(usize);
226
227impl ProjectEntryId {
228 pub const MAX: Self = Self(usize::MAX);
229
230 pub fn new(counter: &AtomicUsize) -> Self {
231 Self(counter.fetch_add(1, SeqCst))
232 }
233
234 pub fn from_proto(id: u64) -> Self {
235 Self(id as usize)
236 }
237
238 pub fn to_proto(&self) -> u64 {
239 self.0 as u64
240 }
241
242 pub fn to_usize(&self) -> usize {
243 self.0
244 }
245}
246
247impl Project {
248 pub fn init(client: &Arc<Client>) {
249 client.add_model_message_handler(Self::handle_add_collaborator);
250 client.add_model_message_handler(Self::handle_buffer_reloaded);
251 client.add_model_message_handler(Self::handle_buffer_saved);
252 client.add_model_message_handler(Self::handle_start_language_server);
253 client.add_model_message_handler(Self::handle_update_language_server);
254 client.add_model_message_handler(Self::handle_remove_collaborator);
255 client.add_model_message_handler(Self::handle_register_worktree);
256 client.add_model_message_handler(Self::handle_unregister_worktree);
257 client.add_model_message_handler(Self::handle_unshare_project);
258 client.add_model_message_handler(Self::handle_update_buffer_file);
259 client.add_model_message_handler(Self::handle_update_buffer);
260 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
261 client.add_model_message_handler(Self::handle_update_worktree);
262 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
263 client.add_model_request_handler(Self::handle_apply_code_action);
264 client.add_model_request_handler(Self::handle_reload_buffers);
265 client.add_model_request_handler(Self::handle_format_buffers);
266 client.add_model_request_handler(Self::handle_get_code_actions);
267 client.add_model_request_handler(Self::handle_get_completions);
268 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
269 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
270 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
271 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
272 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
273 client.add_model_request_handler(Self::handle_search_project);
274 client.add_model_request_handler(Self::handle_get_project_symbols);
275 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
276 client.add_model_request_handler(Self::handle_open_buffer_by_id);
277 client.add_model_request_handler(Self::handle_open_buffer_by_path);
278 client.add_model_request_handler(Self::handle_save_buffer);
279 }
280
281 pub fn local(
282 client: Arc<Client>,
283 user_store: ModelHandle<UserStore>,
284 languages: Arc<LanguageRegistry>,
285 fs: Arc<dyn Fs>,
286 cx: &mut MutableAppContext,
287 ) -> ModelHandle<Self> {
288 cx.add_model(|cx: &mut ModelContext<Self>| {
289 let (remote_id_tx, remote_id_rx) = watch::channel();
290 let _maintain_remote_id_task = cx.spawn_weak({
291 let rpc = client.clone();
292 move |this, mut cx| {
293 async move {
294 let mut status = rpc.status();
295 while let Some(status) = status.next().await {
296 if let Some(this) = this.upgrade(&cx) {
297 if status.is_connected() {
298 this.update(&mut cx, |this, cx| this.register(cx)).await?;
299 } else {
300 this.update(&mut cx, |this, cx| this.unregister(cx));
301 }
302 }
303 }
304 Ok(())
305 }
306 .log_err()
307 }
308 });
309
310 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
311 Self {
312 worktrees: Default::default(),
313 collaborators: Default::default(),
314 opened_buffers: Default::default(),
315 shared_buffers: Default::default(),
316 loading_buffers: Default::default(),
317 loading_local_worktrees: Default::default(),
318 buffer_snapshots: Default::default(),
319 client_state: ProjectClientState::Local {
320 is_shared: false,
321 remote_id_tx,
322 remote_id_rx,
323 _maintain_remote_id_task,
324 },
325 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
326 subscriptions: Vec::new(),
327 active_entry: None,
328 languages,
329 client,
330 user_store,
331 fs,
332 next_entry_id: Default::default(),
333 language_servers: Default::default(),
334 started_language_servers: Default::default(),
335 language_server_statuses: Default::default(),
336 last_workspace_edits_by_language_server: Default::default(),
337 language_server_settings: Default::default(),
338 next_language_server_id: 0,
339 nonce: StdRng::from_entropy().gen(),
340 }
341 })
342 }
343
344 pub async fn remote(
345 remote_id: u64,
346 client: Arc<Client>,
347 user_store: ModelHandle<UserStore>,
348 languages: Arc<LanguageRegistry>,
349 fs: Arc<dyn Fs>,
350 cx: &mut AsyncAppContext,
351 ) -> Result<ModelHandle<Self>> {
352 client.authenticate_and_connect(true, &cx).await?;
353
354 let response = client
355 .request(proto::JoinProject {
356 project_id: remote_id,
357 })
358 .await?;
359
360 let replica_id = response.replica_id as ReplicaId;
361
362 let mut worktrees = Vec::new();
363 for worktree in response.worktrees {
364 let (worktree, load_task) = cx
365 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
366 worktrees.push(worktree);
367 load_task.detach();
368 }
369
370 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
371 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
372 let mut this = Self {
373 worktrees: Vec::new(),
374 loading_buffers: Default::default(),
375 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
376 shared_buffers: Default::default(),
377 loading_local_worktrees: Default::default(),
378 active_entry: None,
379 collaborators: Default::default(),
380 languages,
381 user_store: user_store.clone(),
382 fs,
383 next_entry_id: Default::default(),
384 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
385 client: client.clone(),
386 client_state: ProjectClientState::Remote {
387 sharing_has_stopped: false,
388 remote_id,
389 replica_id,
390 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
391 async move {
392 let mut status = client.status();
393 let is_connected =
394 status.next().await.map_or(false, |s| s.is_connected());
395 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
396 if !is_connected || status.next().await.is_some() {
397 if let Some(this) = this.upgrade(&cx) {
398 this.update(&mut cx, |this, cx| this.project_unshared(cx))
399 }
400 }
401 Ok(())
402 }
403 .log_err()
404 }),
405 },
406 language_servers: Default::default(),
407 started_language_servers: Default::default(),
408 language_server_settings: Default::default(),
409 language_server_statuses: response
410 .language_servers
411 .into_iter()
412 .map(|server| {
413 (
414 server.id as usize,
415 LanguageServerStatus {
416 name: server.name,
417 pending_work: Default::default(),
418 pending_diagnostic_updates: 0,
419 },
420 )
421 })
422 .collect(),
423 last_workspace_edits_by_language_server: Default::default(),
424 next_language_server_id: 0,
425 opened_buffers: Default::default(),
426 buffer_snapshots: Default::default(),
427 nonce: StdRng::from_entropy().gen(),
428 };
429 for worktree in worktrees {
430 this.add_worktree(&worktree, cx);
431 }
432 this
433 });
434
435 let user_ids = response
436 .collaborators
437 .iter()
438 .map(|peer| peer.user_id)
439 .collect();
440 user_store
441 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
442 .await?;
443 let mut collaborators = HashMap::default();
444 for message in response.collaborators {
445 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
446 collaborators.insert(collaborator.peer_id, collaborator);
447 }
448
449 this.update(cx, |this, _| {
450 this.collaborators = collaborators;
451 });
452
453 Ok(this)
454 }
455
456 #[cfg(any(test, feature = "test-support"))]
457 pub async fn test(
458 fs: Arc<dyn Fs>,
459 root_paths: impl IntoIterator<Item = impl AsRef<Path>>,
460 cx: &mut gpui::TestAppContext,
461 ) -> ModelHandle<Project> {
462 let languages = Arc::new(LanguageRegistry::test());
463 let http_client = client::test::FakeHttpClient::with_404_response();
464 let client = client::Client::new(http_client.clone());
465 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
466 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
467 for path in root_paths {
468 let (tree, _) = project
469 .update(cx, |project, cx| {
470 project.find_or_create_local_worktree(path, true, cx)
471 })
472 .await
473 .unwrap();
474 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
475 .await;
476 }
477 project
478 }
479
480 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
481 self.opened_buffers
482 .get(&remote_id)
483 .and_then(|buffer| buffer.upgrade(cx))
484 }
485
486 pub fn languages(&self) -> &Arc<LanguageRegistry> {
487 &self.languages
488 }
489
490 #[cfg(any(test, feature = "test-support"))]
491 pub fn check_invariants(&self, cx: &AppContext) {
492 if self.is_local() {
493 let mut worktree_root_paths = HashMap::default();
494 for worktree in self.worktrees(cx) {
495 let worktree = worktree.read(cx);
496 let abs_path = worktree.as_local().unwrap().abs_path().clone();
497 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
498 assert_eq!(
499 prev_worktree_id,
500 None,
501 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
502 abs_path,
503 worktree.id(),
504 prev_worktree_id
505 )
506 }
507 } else {
508 let replica_id = self.replica_id();
509 for buffer in self.opened_buffers.values() {
510 if let Some(buffer) = buffer.upgrade(cx) {
511 let buffer = buffer.read(cx);
512 assert_eq!(
513 buffer.deferred_ops_len(),
514 0,
515 "replica {}, buffer {} has deferred operations",
516 replica_id,
517 buffer.remote_id()
518 );
519 }
520 }
521 }
522 }
523
524 #[cfg(any(test, feature = "test-support"))]
525 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
526 let path = path.into();
527 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
528 self.opened_buffers.iter().any(|(_, buffer)| {
529 if let Some(buffer) = buffer.upgrade(cx) {
530 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
531 if file.worktree == worktree && file.path() == &path.path {
532 return true;
533 }
534 }
535 }
536 false
537 })
538 } else {
539 false
540 }
541 }
542
543 pub fn fs(&self) -> &Arc<dyn Fs> {
544 &self.fs
545 }
546
547 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
548 self.unshare(cx);
549 for worktree in &self.worktrees {
550 if let Some(worktree) = worktree.upgrade(cx) {
551 worktree.update(cx, |worktree, _| {
552 worktree.as_local_mut().unwrap().unregister();
553 });
554 }
555 }
556
557 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
558 *remote_id_tx.borrow_mut() = None;
559 }
560
561 self.subscriptions.clear();
562 }
563
564 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
565 self.unregister(cx);
566
567 let response = self.client.request(proto::RegisterProject {});
568 cx.spawn(|this, mut cx| async move {
569 let remote_id = response.await?.project_id;
570
571 let mut registrations = Vec::new();
572 this.update(&mut cx, |this, cx| {
573 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
574 *remote_id_tx.borrow_mut() = Some(remote_id);
575 }
576
577 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
578
579 this.subscriptions
580 .push(this.client.add_model_for_remote_entity(remote_id, cx));
581
582 for worktree in &this.worktrees {
583 if let Some(worktree) = worktree.upgrade(cx) {
584 registrations.push(worktree.update(cx, |worktree, cx| {
585 let worktree = worktree.as_local_mut().unwrap();
586 worktree.register(remote_id, cx)
587 }));
588 }
589 }
590 });
591
592 futures::future::try_join_all(registrations).await?;
593 Ok(())
594 })
595 }
596
597 pub fn remote_id(&self) -> Option<u64> {
598 match &self.client_state {
599 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
600 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
601 }
602 }
603
604 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
605 let mut id = None;
606 let mut watch = None;
607 match &self.client_state {
608 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
609 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
610 }
611
612 async move {
613 if let Some(id) = id {
614 return id;
615 }
616 let mut watch = watch.unwrap();
617 loop {
618 let id = *watch.borrow();
619 if let Some(id) = id {
620 return id;
621 }
622 watch.next().await;
623 }
624 }
625 }
626
627 pub fn replica_id(&self) -> ReplicaId {
628 match &self.client_state {
629 ProjectClientState::Local { .. } => 0,
630 ProjectClientState::Remote { replica_id, .. } => *replica_id,
631 }
632 }
633
634 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
635 &self.collaborators
636 }
637
638 pub fn worktrees<'a>(
639 &'a self,
640 cx: &'a AppContext,
641 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
642 self.worktrees
643 .iter()
644 .filter_map(move |worktree| worktree.upgrade(cx))
645 }
646
647 pub fn visible_worktrees<'a>(
648 &'a self,
649 cx: &'a AppContext,
650 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
651 self.worktrees.iter().filter_map(|worktree| {
652 worktree.upgrade(cx).and_then(|worktree| {
653 if worktree.read(cx).is_visible() {
654 Some(worktree)
655 } else {
656 None
657 }
658 })
659 })
660 }
661
662 pub fn worktree_for_id(
663 &self,
664 id: WorktreeId,
665 cx: &AppContext,
666 ) -> Option<ModelHandle<Worktree>> {
667 self.worktrees(cx)
668 .find(|worktree| worktree.read(cx).id() == id)
669 }
670
671 pub fn worktree_for_entry(
672 &self,
673 entry_id: ProjectEntryId,
674 cx: &AppContext,
675 ) -> Option<ModelHandle<Worktree>> {
676 self.worktrees(cx)
677 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
678 }
679
680 pub fn worktree_id_for_entry(
681 &self,
682 entry_id: ProjectEntryId,
683 cx: &AppContext,
684 ) -> Option<WorktreeId> {
685 self.worktree_for_entry(entry_id, cx)
686 .map(|worktree| worktree.read(cx).id())
687 }
688
689 pub fn can_share(&self, cx: &AppContext) -> bool {
690 self.is_local() && self.visible_worktrees(cx).next().is_some()
691 }
692
693 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
694 let rpc = self.client.clone();
695 cx.spawn(|this, mut cx| async move {
696 let project_id = this.update(&mut cx, |this, cx| {
697 if let ProjectClientState::Local {
698 is_shared,
699 remote_id_rx,
700 ..
701 } = &mut this.client_state
702 {
703 *is_shared = true;
704
705 for open_buffer in this.opened_buffers.values_mut() {
706 match open_buffer {
707 OpenBuffer::Strong(_) => {}
708 OpenBuffer::Weak(buffer) => {
709 if let Some(buffer) = buffer.upgrade(cx) {
710 *open_buffer = OpenBuffer::Strong(buffer);
711 }
712 }
713 OpenBuffer::Loading(_) => unreachable!(),
714 }
715 }
716
717 for worktree_handle in this.worktrees.iter_mut() {
718 match worktree_handle {
719 WorktreeHandle::Strong(_) => {}
720 WorktreeHandle::Weak(worktree) => {
721 if let Some(worktree) = worktree.upgrade(cx) {
722 *worktree_handle = WorktreeHandle::Strong(worktree);
723 }
724 }
725 }
726 }
727
728 remote_id_rx
729 .borrow()
730 .ok_or_else(|| anyhow!("no project id"))
731 } else {
732 Err(anyhow!("can't share a remote project"))
733 }
734 })?;
735
736 rpc.request(proto::ShareProject { project_id }).await?;
737
738 let mut tasks = Vec::new();
739 this.update(&mut cx, |this, cx| {
740 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
741 worktree.update(cx, |worktree, cx| {
742 let worktree = worktree.as_local_mut().unwrap();
743 tasks.push(worktree.share(project_id, cx));
744 });
745 }
746 });
747 for task in tasks {
748 task.await?;
749 }
750 this.update(&mut cx, |_, cx| cx.notify());
751 Ok(())
752 })
753 }
754
755 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) {
756 let rpc = self.client.clone();
757
758 if let ProjectClientState::Local {
759 is_shared,
760 remote_id_rx,
761 ..
762 } = &mut self.client_state
763 {
764 if !*is_shared {
765 return;
766 }
767
768 *is_shared = false;
769 self.collaborators.clear();
770 self.shared_buffers.clear();
771 for worktree_handle in self.worktrees.iter_mut() {
772 if let WorktreeHandle::Strong(worktree) = worktree_handle {
773 let is_visible = worktree.update(cx, |worktree, _| {
774 worktree.as_local_mut().unwrap().unshare();
775 worktree.is_visible()
776 });
777 if !is_visible {
778 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
779 }
780 }
781 }
782
783 for open_buffer in self.opened_buffers.values_mut() {
784 match open_buffer {
785 OpenBuffer::Strong(buffer) => {
786 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
787 }
788 _ => {}
789 }
790 }
791
792 if let Some(project_id) = *remote_id_rx.borrow() {
793 rpc.send(proto::UnshareProject { project_id }).log_err();
794 }
795
796 cx.notify();
797 } else {
798 log::error!("attempted to unshare a remote project");
799 }
800 }
801
802 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
803 if let ProjectClientState::Remote {
804 sharing_has_stopped,
805 ..
806 } = &mut self.client_state
807 {
808 *sharing_has_stopped = true;
809 self.collaborators.clear();
810 cx.notify();
811 }
812 }
813
814 pub fn is_read_only(&self) -> bool {
815 match &self.client_state {
816 ProjectClientState::Local { .. } => false,
817 ProjectClientState::Remote {
818 sharing_has_stopped,
819 ..
820 } => *sharing_has_stopped,
821 }
822 }
823
824 pub fn is_local(&self) -> bool {
825 match &self.client_state {
826 ProjectClientState::Local { .. } => true,
827 ProjectClientState::Remote { .. } => false,
828 }
829 }
830
831 pub fn is_remote(&self) -> bool {
832 !self.is_local()
833 }
834
835 pub fn create_buffer(
836 &mut self,
837 text: &str,
838 language: Option<Arc<Language>>,
839 cx: &mut ModelContext<Self>,
840 ) -> Result<ModelHandle<Buffer>> {
841 if self.is_remote() {
842 return Err(anyhow!("creating buffers as a guest is not supported yet"));
843 }
844
845 let buffer = cx.add_model(|cx| {
846 Buffer::new(self.replica_id(), text, cx)
847 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
848 });
849 self.register_buffer(&buffer, cx)?;
850 Ok(buffer)
851 }
852
853 pub fn open_path(
854 &mut self,
855 path: impl Into<ProjectPath>,
856 cx: &mut ModelContext<Self>,
857 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
858 let task = self.open_buffer(path, cx);
859 cx.spawn_weak(|_, cx| async move {
860 let buffer = task.await?;
861 let project_entry_id = buffer
862 .read_with(&cx, |buffer, cx| {
863 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
864 })
865 .ok_or_else(|| anyhow!("no project entry"))?;
866 Ok((project_entry_id, buffer.into()))
867 })
868 }
869
870 pub fn open_local_buffer(
871 &mut self,
872 abs_path: impl AsRef<Path>,
873 cx: &mut ModelContext<Self>,
874 ) -> Task<Result<ModelHandle<Buffer>>> {
875 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
876 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
877 } else {
878 Task::ready(Err(anyhow!("no such path")))
879 }
880 }
881
882 pub fn open_buffer(
883 &mut self,
884 path: impl Into<ProjectPath>,
885 cx: &mut ModelContext<Self>,
886 ) -> Task<Result<ModelHandle<Buffer>>> {
887 let project_path = path.into();
888 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
889 worktree
890 } else {
891 return Task::ready(Err(anyhow!("no such worktree")));
892 };
893
894 // If there is already a buffer for the given path, then return it.
895 let existing_buffer = self.get_open_buffer(&project_path, cx);
896 if let Some(existing_buffer) = existing_buffer {
897 return Task::ready(Ok(existing_buffer));
898 }
899
900 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
901 // If the given path is already being loaded, then wait for that existing
902 // task to complete and return the same buffer.
903 hash_map::Entry::Occupied(e) => e.get().clone(),
904
905 // Otherwise, record the fact that this path is now being loaded.
906 hash_map::Entry::Vacant(entry) => {
907 let (mut tx, rx) = postage::watch::channel();
908 entry.insert(rx.clone());
909
910 let load_buffer = if worktree.read(cx).is_local() {
911 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
912 } else {
913 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
914 };
915
916 cx.spawn(move |this, mut cx| async move {
917 let load_result = load_buffer.await;
918 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
919 // Record the fact that the buffer is no longer loading.
920 this.loading_buffers.remove(&project_path);
921 let buffer = load_result.map_err(Arc::new)?;
922 Ok(buffer)
923 }));
924 })
925 .detach();
926 rx
927 }
928 };
929
930 cx.foreground().spawn(async move {
931 loop {
932 if let Some(result) = loading_watch.borrow().as_ref() {
933 match result {
934 Ok(buffer) => return Ok(buffer.clone()),
935 Err(error) => return Err(anyhow!("{}", error)),
936 }
937 }
938 loading_watch.next().await;
939 }
940 })
941 }
942
943 fn open_local_buffer_internal(
944 &mut self,
945 path: &Arc<Path>,
946 worktree: &ModelHandle<Worktree>,
947 cx: &mut ModelContext<Self>,
948 ) -> Task<Result<ModelHandle<Buffer>>> {
949 let load_buffer = worktree.update(cx, |worktree, cx| {
950 let worktree = worktree.as_local_mut().unwrap();
951 worktree.load_buffer(path, cx)
952 });
953 cx.spawn(|this, mut cx| async move {
954 let buffer = load_buffer.await?;
955 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
956 Ok(buffer)
957 })
958 }
959
960 fn open_remote_buffer_internal(
961 &mut self,
962 path: &Arc<Path>,
963 worktree: &ModelHandle<Worktree>,
964 cx: &mut ModelContext<Self>,
965 ) -> Task<Result<ModelHandle<Buffer>>> {
966 let rpc = self.client.clone();
967 let project_id = self.remote_id().unwrap();
968 let remote_worktree_id = worktree.read(cx).id();
969 let path = path.clone();
970 let path_string = path.to_string_lossy().to_string();
971 cx.spawn(|this, mut cx| async move {
972 let response = rpc
973 .request(proto::OpenBufferByPath {
974 project_id,
975 worktree_id: remote_worktree_id.to_proto(),
976 path: path_string,
977 })
978 .await?;
979 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
980 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
981 .await
982 })
983 }
984
985 fn open_local_buffer_via_lsp(
986 &mut self,
987 abs_path: lsp::Url,
988 lsp_adapter: Arc<dyn LspAdapter>,
989 lsp_server: Arc<LanguageServer>,
990 cx: &mut ModelContext<Self>,
991 ) -> Task<Result<ModelHandle<Buffer>>> {
992 cx.spawn(|this, mut cx| async move {
993 let abs_path = abs_path
994 .to_file_path()
995 .map_err(|_| anyhow!("can't convert URI to path"))?;
996 let (worktree, relative_path) = if let Some(result) =
997 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
998 {
999 result
1000 } else {
1001 let worktree = this
1002 .update(&mut cx, |this, cx| {
1003 this.create_local_worktree(&abs_path, false, cx)
1004 })
1005 .await?;
1006 this.update(&mut cx, |this, cx| {
1007 this.language_servers.insert(
1008 (worktree.read(cx).id(), lsp_adapter.name()),
1009 (lsp_adapter, lsp_server),
1010 );
1011 });
1012 (worktree, PathBuf::new())
1013 };
1014
1015 let project_path = ProjectPath {
1016 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1017 path: relative_path.into(),
1018 };
1019 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1020 .await
1021 })
1022 }
1023
1024 pub fn open_buffer_by_id(
1025 &mut self,
1026 id: u64,
1027 cx: &mut ModelContext<Self>,
1028 ) -> Task<Result<ModelHandle<Buffer>>> {
1029 if let Some(buffer) = self.buffer_for_id(id, cx) {
1030 Task::ready(Ok(buffer))
1031 } else if self.is_local() {
1032 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1033 } else if let Some(project_id) = self.remote_id() {
1034 let request = self
1035 .client
1036 .request(proto::OpenBufferById { project_id, id });
1037 cx.spawn(|this, mut cx| async move {
1038 let buffer = request
1039 .await?
1040 .buffer
1041 .ok_or_else(|| anyhow!("invalid buffer"))?;
1042 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1043 .await
1044 })
1045 } else {
1046 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1047 }
1048 }
1049
1050 pub fn save_buffer_as(
1051 &mut self,
1052 buffer: ModelHandle<Buffer>,
1053 abs_path: PathBuf,
1054 cx: &mut ModelContext<Project>,
1055 ) -> Task<Result<()>> {
1056 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1057 let old_path =
1058 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1059 cx.spawn(|this, mut cx| async move {
1060 if let Some(old_path) = old_path {
1061 this.update(&mut cx, |this, cx| {
1062 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1063 });
1064 }
1065 let (worktree, path) = worktree_task.await?;
1066 worktree
1067 .update(&mut cx, |worktree, cx| {
1068 worktree
1069 .as_local_mut()
1070 .unwrap()
1071 .save_buffer_as(buffer.clone(), path, cx)
1072 })
1073 .await?;
1074 this.update(&mut cx, |this, cx| {
1075 this.assign_language_to_buffer(&buffer, cx);
1076 this.register_buffer_with_language_server(&buffer, cx);
1077 });
1078 Ok(())
1079 })
1080 }
1081
1082 pub fn get_open_buffer(
1083 &mut self,
1084 path: &ProjectPath,
1085 cx: &mut ModelContext<Self>,
1086 ) -> Option<ModelHandle<Buffer>> {
1087 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1088 self.opened_buffers.values().find_map(|buffer| {
1089 let buffer = buffer.upgrade(cx)?;
1090 let file = File::from_dyn(buffer.read(cx).file())?;
1091 if file.worktree == worktree && file.path() == &path.path {
1092 Some(buffer)
1093 } else {
1094 None
1095 }
1096 })
1097 }
1098
1099 fn register_buffer(
1100 &mut self,
1101 buffer: &ModelHandle<Buffer>,
1102 cx: &mut ModelContext<Self>,
1103 ) -> Result<()> {
1104 let remote_id = buffer.read(cx).remote_id();
1105 let open_buffer = if self.is_remote() || self.is_shared() {
1106 OpenBuffer::Strong(buffer.clone())
1107 } else {
1108 OpenBuffer::Weak(buffer.downgrade())
1109 };
1110
1111 match self.opened_buffers.insert(remote_id, open_buffer) {
1112 None => {}
1113 Some(OpenBuffer::Loading(operations)) => {
1114 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1115 }
1116 Some(OpenBuffer::Weak(existing_handle)) => {
1117 if existing_handle.upgrade(cx).is_some() {
1118 Err(anyhow!(
1119 "already registered buffer with remote id {}",
1120 remote_id
1121 ))?
1122 }
1123 }
1124 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1125 "already registered buffer with remote id {}",
1126 remote_id
1127 ))?,
1128 }
1129 cx.subscribe(buffer, |this, buffer, event, cx| {
1130 this.on_buffer_event(buffer, event, cx);
1131 })
1132 .detach();
1133
1134 self.assign_language_to_buffer(buffer, cx);
1135 self.register_buffer_with_language_server(buffer, cx);
1136 cx.observe_release(buffer, |this, buffer, cx| {
1137 if let Some(file) = File::from_dyn(buffer.file()) {
1138 if file.is_local() {
1139 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1140 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1141 server
1142 .notify::<lsp::notification::DidCloseTextDocument>(
1143 lsp::DidCloseTextDocumentParams {
1144 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1145 },
1146 )
1147 .log_err();
1148 }
1149 }
1150 }
1151 })
1152 .detach();
1153
1154 Ok(())
1155 }
1156
1157 fn register_buffer_with_language_server(
1158 &mut self,
1159 buffer_handle: &ModelHandle<Buffer>,
1160 cx: &mut ModelContext<Self>,
1161 ) {
1162 let buffer = buffer_handle.read(cx);
1163 let buffer_id = buffer.remote_id();
1164 if let Some(file) = File::from_dyn(buffer.file()) {
1165 if file.is_local() {
1166 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1167 let initial_snapshot = buffer.text_snapshot();
1168
1169 let mut language_server = None;
1170 let mut language_id = None;
1171 if let Some(language) = buffer.language() {
1172 let worktree_id = file.worktree_id(cx);
1173 if let Some(adapter) = language.lsp_adapter() {
1174 language_id = adapter.id_for_language(language.name().as_ref());
1175 language_server = self
1176 .language_servers
1177 .get(&(worktree_id, adapter.name()))
1178 .cloned();
1179 }
1180 }
1181
1182 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1183 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1184 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1185 .log_err();
1186 }
1187 }
1188
1189 if let Some((_, server)) = language_server {
1190 server
1191 .notify::<lsp::notification::DidOpenTextDocument>(
1192 lsp::DidOpenTextDocumentParams {
1193 text_document: lsp::TextDocumentItem::new(
1194 uri,
1195 language_id.unwrap_or_default(),
1196 0,
1197 initial_snapshot.text(),
1198 ),
1199 }
1200 .clone(),
1201 )
1202 .log_err();
1203 buffer_handle.update(cx, |buffer, cx| {
1204 buffer.set_completion_triggers(
1205 server
1206 .capabilities()
1207 .completion_provider
1208 .as_ref()
1209 .and_then(|provider| provider.trigger_characters.clone())
1210 .unwrap_or(Vec::new()),
1211 cx,
1212 )
1213 });
1214 self.buffer_snapshots
1215 .insert(buffer_id, vec![(0, initial_snapshot)]);
1216 }
1217 }
1218 }
1219 }
1220
1221 fn unregister_buffer_from_language_server(
1222 &mut self,
1223 buffer: &ModelHandle<Buffer>,
1224 old_path: PathBuf,
1225 cx: &mut ModelContext<Self>,
1226 ) {
1227 buffer.update(cx, |buffer, cx| {
1228 buffer.update_diagnostics(Default::default(), cx);
1229 self.buffer_snapshots.remove(&buffer.remote_id());
1230 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1231 language_server
1232 .notify::<lsp::notification::DidCloseTextDocument>(
1233 lsp::DidCloseTextDocumentParams {
1234 text_document: lsp::TextDocumentIdentifier::new(
1235 lsp::Url::from_file_path(old_path).unwrap(),
1236 ),
1237 },
1238 )
1239 .log_err();
1240 }
1241 });
1242 }
1243
1244 fn on_buffer_event(
1245 &mut self,
1246 buffer: ModelHandle<Buffer>,
1247 event: &BufferEvent,
1248 cx: &mut ModelContext<Self>,
1249 ) -> Option<()> {
1250 match event {
1251 BufferEvent::Operation(operation) => {
1252 let project_id = self.remote_id()?;
1253 let request = self.client.request(proto::UpdateBuffer {
1254 project_id,
1255 buffer_id: buffer.read(cx).remote_id(),
1256 operations: vec![language::proto::serialize_operation(&operation)],
1257 });
1258 cx.background().spawn(request).detach_and_log_err(cx);
1259 }
1260 BufferEvent::Edited { .. } => {
1261 let (_, language_server) = self
1262 .language_server_for_buffer(buffer.read(cx), cx)?
1263 .clone();
1264 let buffer = buffer.read(cx);
1265 let file = File::from_dyn(buffer.file())?;
1266 let abs_path = file.as_local()?.abs_path(cx);
1267 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1268 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1269 let (version, prev_snapshot) = buffer_snapshots.last()?;
1270 let next_snapshot = buffer.text_snapshot();
1271 let next_version = version + 1;
1272
1273 let content_changes = buffer
1274 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1275 .map(|edit| {
1276 let edit_start = edit.new.start.0;
1277 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1278 let new_text = next_snapshot
1279 .text_for_range(edit.new.start.1..edit.new.end.1)
1280 .collect();
1281 lsp::TextDocumentContentChangeEvent {
1282 range: Some(lsp::Range::new(
1283 point_to_lsp(edit_start),
1284 point_to_lsp(edit_end),
1285 )),
1286 range_length: None,
1287 text: new_text,
1288 }
1289 })
1290 .collect();
1291
1292 buffer_snapshots.push((next_version, next_snapshot));
1293
1294 language_server
1295 .notify::<lsp::notification::DidChangeTextDocument>(
1296 lsp::DidChangeTextDocumentParams {
1297 text_document: lsp::VersionedTextDocumentIdentifier::new(
1298 uri,
1299 next_version,
1300 ),
1301 content_changes,
1302 },
1303 )
1304 .log_err();
1305 }
1306 BufferEvent::Saved => {
1307 let file = File::from_dyn(buffer.read(cx).file())?;
1308 let worktree_id = file.worktree_id(cx);
1309 let abs_path = file.as_local()?.abs_path(cx);
1310 let text_document = lsp::TextDocumentIdentifier {
1311 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1312 };
1313
1314 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1315 server
1316 .notify::<lsp::notification::DidSaveTextDocument>(
1317 lsp::DidSaveTextDocumentParams {
1318 text_document: text_document.clone(),
1319 text: None,
1320 },
1321 )
1322 .log_err();
1323 }
1324 }
1325 _ => {}
1326 }
1327
1328 None
1329 }
1330
1331 fn language_servers_for_worktree(
1332 &self,
1333 worktree_id: WorktreeId,
1334 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1335 self.language_servers.iter().filter_map(
1336 move |((language_server_worktree_id, _), server)| {
1337 if *language_server_worktree_id == worktree_id {
1338 Some(server)
1339 } else {
1340 None
1341 }
1342 },
1343 )
1344 }
1345
1346 fn assign_language_to_buffer(
1347 &mut self,
1348 buffer: &ModelHandle<Buffer>,
1349 cx: &mut ModelContext<Self>,
1350 ) -> Option<()> {
1351 // If the buffer has a language, set it and start the language server if we haven't already.
1352 let full_path = buffer.read(cx).file()?.full_path(cx);
1353 let language = self.languages.select_language(&full_path)?;
1354 buffer.update(cx, |buffer, cx| {
1355 buffer.set_language(Some(language.clone()), cx);
1356 });
1357
1358 let file = File::from_dyn(buffer.read(cx).file())?;
1359 let worktree = file.worktree.read(cx).as_local()?;
1360 let worktree_id = worktree.id();
1361 let worktree_abs_path = worktree.abs_path().clone();
1362 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1363
1364 None
1365 }
1366
1367 fn start_language_server(
1368 &mut self,
1369 worktree_id: WorktreeId,
1370 worktree_path: Arc<Path>,
1371 language: Arc<Language>,
1372 cx: &mut ModelContext<Self>,
1373 ) {
1374 let adapter = if let Some(adapter) = language.lsp_adapter() {
1375 adapter
1376 } else {
1377 return;
1378 };
1379 let key = (worktree_id, adapter.name());
1380 self.started_language_servers
1381 .entry(key.clone())
1382 .or_insert_with(|| {
1383 let server_id = post_inc(&mut self.next_language_server_id);
1384 let language_server = self.languages.start_language_server(
1385 server_id,
1386 language.clone(),
1387 worktree_path,
1388 self.client.http_client(),
1389 cx,
1390 );
1391 cx.spawn_weak(|this, mut cx| async move {
1392 let language_server = language_server?.await.log_err()?;
1393 let language_server = language_server
1394 .initialize(adapter.initialization_options())
1395 .await
1396 .log_err()?;
1397 let this = this.upgrade(&cx)?;
1398 let disk_based_diagnostics_progress_token =
1399 adapter.disk_based_diagnostics_progress_token();
1400
1401 language_server
1402 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1403 let this = this.downgrade();
1404 let adapter = adapter.clone();
1405 move |params, mut cx| {
1406 if let Some(this) = this.upgrade(&cx) {
1407 this.update(&mut cx, |this, cx| {
1408 this.on_lsp_diagnostics_published(
1409 server_id,
1410 params,
1411 &adapter,
1412 disk_based_diagnostics_progress_token,
1413 cx,
1414 );
1415 });
1416 }
1417 }
1418 })
1419 .detach();
1420
1421 language_server
1422 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1423 let settings = this
1424 .read_with(&cx, |this, _| this.language_server_settings.clone());
1425 move |params, _| {
1426 let settings = settings.lock().clone();
1427 async move {
1428 Ok(params
1429 .items
1430 .into_iter()
1431 .map(|item| {
1432 if let Some(section) = &item.section {
1433 settings
1434 .get(section)
1435 .cloned()
1436 .unwrap_or(serde_json::Value::Null)
1437 } else {
1438 settings.clone()
1439 }
1440 })
1441 .collect())
1442 }
1443 }
1444 })
1445 .detach();
1446
1447 language_server
1448 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1449 let this = this.downgrade();
1450 let adapter = adapter.clone();
1451 let language_server = language_server.clone();
1452 move |params, cx| {
1453 Self::on_lsp_workspace_edit(
1454 this,
1455 params,
1456 server_id,
1457 adapter.clone(),
1458 language_server.clone(),
1459 cx,
1460 )
1461 }
1462 })
1463 .detach();
1464
1465 language_server
1466 .on_notification::<lsp::notification::Progress, _>({
1467 let this = this.downgrade();
1468 move |params, mut cx| {
1469 if let Some(this) = this.upgrade(&cx) {
1470 this.update(&mut cx, |this, cx| {
1471 this.on_lsp_progress(
1472 params,
1473 server_id,
1474 disk_based_diagnostics_progress_token,
1475 cx,
1476 );
1477 });
1478 }
1479 }
1480 })
1481 .detach();
1482
1483 this.update(&mut cx, |this, cx| {
1484 this.language_servers
1485 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1486 this.language_server_statuses.insert(
1487 server_id,
1488 LanguageServerStatus {
1489 name: language_server.name().to_string(),
1490 pending_work: Default::default(),
1491 pending_diagnostic_updates: 0,
1492 },
1493 );
1494 language_server
1495 .notify::<lsp::notification::DidChangeConfiguration>(
1496 lsp::DidChangeConfigurationParams {
1497 settings: this.language_server_settings.lock().clone(),
1498 },
1499 )
1500 .ok();
1501
1502 if let Some(project_id) = this.remote_id() {
1503 this.client
1504 .send(proto::StartLanguageServer {
1505 project_id,
1506 server: Some(proto::LanguageServer {
1507 id: server_id as u64,
1508 name: language_server.name().to_string(),
1509 }),
1510 })
1511 .log_err();
1512 }
1513
1514 // Tell the language server about every open buffer in the worktree that matches the language.
1515 for buffer in this.opened_buffers.values() {
1516 if let Some(buffer_handle) = buffer.upgrade(cx) {
1517 let buffer = buffer_handle.read(cx);
1518 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1519 file
1520 } else {
1521 continue;
1522 };
1523 let language = if let Some(language) = buffer.language() {
1524 language
1525 } else {
1526 continue;
1527 };
1528 if file.worktree.read(cx).id() != key.0
1529 || language.lsp_adapter().map(|a| a.name())
1530 != Some(key.1.clone())
1531 {
1532 continue;
1533 }
1534
1535 let file = file.as_local()?;
1536 let versions = this
1537 .buffer_snapshots
1538 .entry(buffer.remote_id())
1539 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1540 let (version, initial_snapshot) = versions.last().unwrap();
1541 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1542 let language_id = adapter.id_for_language(language.name().as_ref());
1543 language_server
1544 .notify::<lsp::notification::DidOpenTextDocument>(
1545 lsp::DidOpenTextDocumentParams {
1546 text_document: lsp::TextDocumentItem::new(
1547 uri,
1548 language_id.unwrap_or_default(),
1549 *version,
1550 initial_snapshot.text(),
1551 ),
1552 },
1553 )
1554 .log_err()?;
1555 buffer_handle.update(cx, |buffer, cx| {
1556 buffer.set_completion_triggers(
1557 language_server
1558 .capabilities()
1559 .completion_provider
1560 .as_ref()
1561 .and_then(|provider| {
1562 provider.trigger_characters.clone()
1563 })
1564 .unwrap_or(Vec::new()),
1565 cx,
1566 )
1567 });
1568 }
1569 }
1570
1571 cx.notify();
1572 Some(())
1573 });
1574
1575 Some(language_server)
1576 })
1577 });
1578 }
1579
1580 pub fn restart_language_servers_for_buffers(
1581 &mut self,
1582 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1583 cx: &mut ModelContext<Self>,
1584 ) -> Option<()> {
1585 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1586 .into_iter()
1587 .filter_map(|buffer| {
1588 let file = File::from_dyn(buffer.read(cx).file())?;
1589 let worktree = file.worktree.read(cx).as_local()?;
1590 let worktree_id = worktree.id();
1591 let worktree_abs_path = worktree.abs_path().clone();
1592 let full_path = file.full_path(cx);
1593 Some((worktree_id, worktree_abs_path, full_path))
1594 })
1595 .collect();
1596 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1597 let language = self.languages.select_language(&full_path)?;
1598 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1599 }
1600
1601 None
1602 }
1603
1604 fn restart_language_server(
1605 &mut self,
1606 worktree_id: WorktreeId,
1607 worktree_path: Arc<Path>,
1608 language: Arc<Language>,
1609 cx: &mut ModelContext<Self>,
1610 ) {
1611 let adapter = if let Some(adapter) = language.lsp_adapter() {
1612 adapter
1613 } else {
1614 return;
1615 };
1616 let key = (worktree_id, adapter.name());
1617 let server_to_shutdown = self.language_servers.remove(&key);
1618 self.started_language_servers.remove(&key);
1619 server_to_shutdown
1620 .as_ref()
1621 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1622 cx.spawn_weak(|this, mut cx| async move {
1623 if let Some(this) = this.upgrade(&cx) {
1624 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1625 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1626 shutdown_task.await;
1627 }
1628 }
1629
1630 this.update(&mut cx, |this, cx| {
1631 this.start_language_server(worktree_id, worktree_path, language, cx);
1632 });
1633 }
1634 })
1635 .detach();
1636 }
1637
1638 fn on_lsp_diagnostics_published(
1639 &mut self,
1640 server_id: usize,
1641 mut params: lsp::PublishDiagnosticsParams,
1642 adapter: &Arc<dyn LspAdapter>,
1643 disk_based_diagnostics_progress_token: Option<&str>,
1644 cx: &mut ModelContext<Self>,
1645 ) {
1646 adapter.process_diagnostics(&mut params);
1647 if disk_based_diagnostics_progress_token.is_none() {
1648 self.disk_based_diagnostics_started(cx);
1649 self.broadcast_language_server_update(
1650 server_id,
1651 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1652 proto::LspDiskBasedDiagnosticsUpdating {},
1653 ),
1654 );
1655 }
1656 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1657 .log_err();
1658 if disk_based_diagnostics_progress_token.is_none() {
1659 self.disk_based_diagnostics_finished(cx);
1660 self.broadcast_language_server_update(
1661 server_id,
1662 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1663 proto::LspDiskBasedDiagnosticsUpdated {},
1664 ),
1665 );
1666 }
1667 }
1668
1669 fn on_lsp_progress(
1670 &mut self,
1671 progress: lsp::ProgressParams,
1672 server_id: usize,
1673 disk_based_diagnostics_progress_token: Option<&str>,
1674 cx: &mut ModelContext<Self>,
1675 ) {
1676 let token = match progress.token {
1677 lsp::NumberOrString::String(token) => token,
1678 lsp::NumberOrString::Number(token) => {
1679 log::info!("skipping numeric progress token {}", token);
1680 return;
1681 }
1682 };
1683 let progress = match progress.value {
1684 lsp::ProgressParamsValue::WorkDone(value) => value,
1685 };
1686 let language_server_status =
1687 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1688 status
1689 } else {
1690 return;
1691 };
1692 match progress {
1693 lsp::WorkDoneProgress::Begin(_) => {
1694 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1695 language_server_status.pending_diagnostic_updates += 1;
1696 if language_server_status.pending_diagnostic_updates == 1 {
1697 self.disk_based_diagnostics_started(cx);
1698 self.broadcast_language_server_update(
1699 server_id,
1700 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1701 proto::LspDiskBasedDiagnosticsUpdating {},
1702 ),
1703 );
1704 }
1705 } else {
1706 self.on_lsp_work_start(server_id, token.clone(), cx);
1707 self.broadcast_language_server_update(
1708 server_id,
1709 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1710 token,
1711 }),
1712 );
1713 }
1714 }
1715 lsp::WorkDoneProgress::Report(report) => {
1716 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1717 self.on_lsp_work_progress(
1718 server_id,
1719 token.clone(),
1720 LanguageServerProgress {
1721 message: report.message.clone(),
1722 percentage: report.percentage.map(|p| p as usize),
1723 last_update_at: Instant::now(),
1724 },
1725 cx,
1726 );
1727 self.broadcast_language_server_update(
1728 server_id,
1729 proto::update_language_server::Variant::WorkProgress(
1730 proto::LspWorkProgress {
1731 token,
1732 message: report.message,
1733 percentage: report.percentage.map(|p| p as u32),
1734 },
1735 ),
1736 );
1737 }
1738 }
1739 lsp::WorkDoneProgress::End(_) => {
1740 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1741 language_server_status.pending_diagnostic_updates -= 1;
1742 if language_server_status.pending_diagnostic_updates == 0 {
1743 self.disk_based_diagnostics_finished(cx);
1744 self.broadcast_language_server_update(
1745 server_id,
1746 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1747 proto::LspDiskBasedDiagnosticsUpdated {},
1748 ),
1749 );
1750 }
1751 } else {
1752 self.on_lsp_work_end(server_id, token.clone(), cx);
1753 self.broadcast_language_server_update(
1754 server_id,
1755 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1756 token,
1757 }),
1758 );
1759 }
1760 }
1761 }
1762 }
1763
1764 fn on_lsp_work_start(
1765 &mut self,
1766 language_server_id: usize,
1767 token: String,
1768 cx: &mut ModelContext<Self>,
1769 ) {
1770 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1771 status.pending_work.insert(
1772 token,
1773 LanguageServerProgress {
1774 message: None,
1775 percentage: None,
1776 last_update_at: Instant::now(),
1777 },
1778 );
1779 cx.notify();
1780 }
1781 }
1782
1783 fn on_lsp_work_progress(
1784 &mut self,
1785 language_server_id: usize,
1786 token: String,
1787 progress: LanguageServerProgress,
1788 cx: &mut ModelContext<Self>,
1789 ) {
1790 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1791 status.pending_work.insert(token, progress);
1792 cx.notify();
1793 }
1794 }
1795
1796 fn on_lsp_work_end(
1797 &mut self,
1798 language_server_id: usize,
1799 token: String,
1800 cx: &mut ModelContext<Self>,
1801 ) {
1802 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1803 status.pending_work.remove(&token);
1804 cx.notify();
1805 }
1806 }
1807
1808 async fn on_lsp_workspace_edit(
1809 this: WeakModelHandle<Self>,
1810 params: lsp::ApplyWorkspaceEditParams,
1811 server_id: usize,
1812 adapter: Arc<dyn LspAdapter>,
1813 language_server: Arc<LanguageServer>,
1814 mut cx: AsyncAppContext,
1815 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1816 let this = this
1817 .upgrade(&cx)
1818 .ok_or_else(|| anyhow!("project project closed"))?;
1819 let transaction = Self::deserialize_workspace_edit(
1820 this.clone(),
1821 params.edit,
1822 true,
1823 adapter.clone(),
1824 language_server.clone(),
1825 &mut cx,
1826 )
1827 .await
1828 .log_err();
1829 this.update(&mut cx, |this, _| {
1830 if let Some(transaction) = transaction {
1831 this.last_workspace_edits_by_language_server
1832 .insert(server_id, transaction);
1833 }
1834 });
1835 Ok(lsp::ApplyWorkspaceEditResponse {
1836 applied: true,
1837 failed_change: None,
1838 failure_reason: None,
1839 })
1840 }
1841
1842 fn broadcast_language_server_update(
1843 &self,
1844 language_server_id: usize,
1845 event: proto::update_language_server::Variant,
1846 ) {
1847 if let Some(project_id) = self.remote_id() {
1848 self.client
1849 .send(proto::UpdateLanguageServer {
1850 project_id,
1851 language_server_id: language_server_id as u64,
1852 variant: Some(event),
1853 })
1854 .log_err();
1855 }
1856 }
1857
1858 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
1859 for (_, server) in self.language_servers.values() {
1860 server
1861 .notify::<lsp::notification::DidChangeConfiguration>(
1862 lsp::DidChangeConfigurationParams {
1863 settings: settings.clone(),
1864 },
1865 )
1866 .ok();
1867 }
1868 *self.language_server_settings.lock() = settings;
1869 }
1870
1871 pub fn language_server_statuses(
1872 &self,
1873 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
1874 self.language_server_statuses.values()
1875 }
1876
1877 pub fn update_diagnostics(
1878 &mut self,
1879 params: lsp::PublishDiagnosticsParams,
1880 disk_based_sources: &[&str],
1881 cx: &mut ModelContext<Self>,
1882 ) -> Result<()> {
1883 let abs_path = params
1884 .uri
1885 .to_file_path()
1886 .map_err(|_| anyhow!("URI is not a file"))?;
1887 let mut next_group_id = 0;
1888 let mut diagnostics = Vec::default();
1889 let mut primary_diagnostic_group_ids = HashMap::default();
1890 let mut sources_by_group_id = HashMap::default();
1891 let mut supporting_diagnostics = HashMap::default();
1892 for diagnostic in ¶ms.diagnostics {
1893 let source = diagnostic.source.as_ref();
1894 let code = diagnostic.code.as_ref().map(|code| match code {
1895 lsp::NumberOrString::Number(code) => code.to_string(),
1896 lsp::NumberOrString::String(code) => code.clone(),
1897 });
1898 let range = range_from_lsp(diagnostic.range);
1899 let is_supporting = diagnostic
1900 .related_information
1901 .as_ref()
1902 .map_or(false, |infos| {
1903 infos.iter().any(|info| {
1904 primary_diagnostic_group_ids.contains_key(&(
1905 source,
1906 code.clone(),
1907 range_from_lsp(info.location.range),
1908 ))
1909 })
1910 });
1911
1912 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
1913 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
1914 });
1915
1916 if is_supporting {
1917 supporting_diagnostics.insert(
1918 (source, code.clone(), range),
1919 (diagnostic.severity, is_unnecessary),
1920 );
1921 } else {
1922 let group_id = post_inc(&mut next_group_id);
1923 let is_disk_based = source.map_or(false, |source| {
1924 disk_based_sources.contains(&source.as_str())
1925 });
1926
1927 sources_by_group_id.insert(group_id, source);
1928 primary_diagnostic_group_ids
1929 .insert((source, code.clone(), range.clone()), group_id);
1930
1931 diagnostics.push(DiagnosticEntry {
1932 range,
1933 diagnostic: Diagnostic {
1934 code: code.clone(),
1935 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1936 message: diagnostic.message.clone(),
1937 group_id,
1938 is_primary: true,
1939 is_valid: true,
1940 is_disk_based,
1941 is_unnecessary,
1942 },
1943 });
1944 if let Some(infos) = &diagnostic.related_information {
1945 for info in infos {
1946 if info.location.uri == params.uri && !info.message.is_empty() {
1947 let range = range_from_lsp(info.location.range);
1948 diagnostics.push(DiagnosticEntry {
1949 range,
1950 diagnostic: Diagnostic {
1951 code: code.clone(),
1952 severity: DiagnosticSeverity::INFORMATION,
1953 message: info.message.clone(),
1954 group_id,
1955 is_primary: false,
1956 is_valid: true,
1957 is_disk_based,
1958 is_unnecessary: false,
1959 },
1960 });
1961 }
1962 }
1963 }
1964 }
1965 }
1966
1967 for entry in &mut diagnostics {
1968 let diagnostic = &mut entry.diagnostic;
1969 if !diagnostic.is_primary {
1970 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1971 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
1972 source,
1973 diagnostic.code.clone(),
1974 entry.range.clone(),
1975 )) {
1976 if let Some(severity) = severity {
1977 diagnostic.severity = severity;
1978 }
1979 diagnostic.is_unnecessary = is_unnecessary;
1980 }
1981 }
1982 }
1983
1984 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1985 Ok(())
1986 }
1987
1988 pub fn update_diagnostic_entries(
1989 &mut self,
1990 abs_path: PathBuf,
1991 version: Option<i32>,
1992 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1993 cx: &mut ModelContext<Project>,
1994 ) -> Result<(), anyhow::Error> {
1995 let (worktree, relative_path) = self
1996 .find_local_worktree(&abs_path, cx)
1997 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1998 if !worktree.read(cx).is_visible() {
1999 return Ok(());
2000 }
2001
2002 let project_path = ProjectPath {
2003 worktree_id: worktree.read(cx).id(),
2004 path: relative_path.into(),
2005 };
2006 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2007 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2008 }
2009
2010 let updated = worktree.update(cx, |worktree, cx| {
2011 worktree
2012 .as_local_mut()
2013 .ok_or_else(|| anyhow!("not a local worktree"))?
2014 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2015 })?;
2016 if updated {
2017 cx.emit(Event::DiagnosticsUpdated(project_path));
2018 }
2019 Ok(())
2020 }
2021
2022 fn update_buffer_diagnostics(
2023 &mut self,
2024 buffer: &ModelHandle<Buffer>,
2025 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2026 version: Option<i32>,
2027 cx: &mut ModelContext<Self>,
2028 ) -> Result<()> {
2029 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2030 Ordering::Equal
2031 .then_with(|| b.is_primary.cmp(&a.is_primary))
2032 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2033 .then_with(|| a.severity.cmp(&b.severity))
2034 .then_with(|| a.message.cmp(&b.message))
2035 }
2036
2037 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2038
2039 diagnostics.sort_unstable_by(|a, b| {
2040 Ordering::Equal
2041 .then_with(|| a.range.start.cmp(&b.range.start))
2042 .then_with(|| b.range.end.cmp(&a.range.end))
2043 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2044 });
2045
2046 let mut sanitized_diagnostics = Vec::new();
2047 let edits_since_save = Patch::new(
2048 snapshot
2049 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2050 .collect(),
2051 );
2052 for entry in diagnostics {
2053 let start;
2054 let end;
2055 if entry.diagnostic.is_disk_based {
2056 // Some diagnostics are based on files on disk instead of buffers'
2057 // current contents. Adjust these diagnostics' ranges to reflect
2058 // any unsaved edits.
2059 start = edits_since_save.old_to_new(entry.range.start);
2060 end = edits_since_save.old_to_new(entry.range.end);
2061 } else {
2062 start = entry.range.start;
2063 end = entry.range.end;
2064 }
2065
2066 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2067 ..snapshot.clip_point_utf16(end, Bias::Right);
2068
2069 // Expand empty ranges by one character
2070 if range.start == range.end {
2071 range.end.column += 1;
2072 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2073 if range.start == range.end && range.end.column > 0 {
2074 range.start.column -= 1;
2075 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2076 }
2077 }
2078
2079 sanitized_diagnostics.push(DiagnosticEntry {
2080 range,
2081 diagnostic: entry.diagnostic,
2082 });
2083 }
2084 drop(edits_since_save);
2085
2086 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2087 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2088 Ok(())
2089 }
2090
2091 pub fn reload_buffers(
2092 &self,
2093 buffers: HashSet<ModelHandle<Buffer>>,
2094 push_to_history: bool,
2095 cx: &mut ModelContext<Self>,
2096 ) -> Task<Result<ProjectTransaction>> {
2097 let mut local_buffers = Vec::new();
2098 let mut remote_buffers = None;
2099 for buffer_handle in buffers {
2100 let buffer = buffer_handle.read(cx);
2101 if buffer.is_dirty() {
2102 if let Some(file) = File::from_dyn(buffer.file()) {
2103 if file.is_local() {
2104 local_buffers.push(buffer_handle);
2105 } else {
2106 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2107 }
2108 }
2109 }
2110 }
2111
2112 let remote_buffers = self.remote_id().zip(remote_buffers);
2113 let client = self.client.clone();
2114
2115 cx.spawn(|this, mut cx| async move {
2116 let mut project_transaction = ProjectTransaction::default();
2117
2118 if let Some((project_id, remote_buffers)) = remote_buffers {
2119 let response = client
2120 .request(proto::ReloadBuffers {
2121 project_id,
2122 buffer_ids: remote_buffers
2123 .iter()
2124 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2125 .collect(),
2126 })
2127 .await?
2128 .transaction
2129 .ok_or_else(|| anyhow!("missing transaction"))?;
2130 project_transaction = this
2131 .update(&mut cx, |this, cx| {
2132 this.deserialize_project_transaction(response, push_to_history, cx)
2133 })
2134 .await?;
2135 }
2136
2137 for buffer in local_buffers {
2138 let transaction = buffer
2139 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2140 .await?;
2141 buffer.update(&mut cx, |buffer, cx| {
2142 if let Some(transaction) = transaction {
2143 if !push_to_history {
2144 buffer.forget_transaction(transaction.id);
2145 }
2146 project_transaction.0.insert(cx.handle(), transaction);
2147 }
2148 });
2149 }
2150
2151 Ok(project_transaction)
2152 })
2153 }
2154
2155 pub fn format(
2156 &self,
2157 buffers: HashSet<ModelHandle<Buffer>>,
2158 push_to_history: bool,
2159 cx: &mut ModelContext<Project>,
2160 ) -> Task<Result<ProjectTransaction>> {
2161 let mut local_buffers = Vec::new();
2162 let mut remote_buffers = None;
2163 for buffer_handle in buffers {
2164 let buffer = buffer_handle.read(cx);
2165 if let Some(file) = File::from_dyn(buffer.file()) {
2166 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2167 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2168 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2169 }
2170 } else {
2171 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2172 }
2173 } else {
2174 return Task::ready(Ok(Default::default()));
2175 }
2176 }
2177
2178 let remote_buffers = self.remote_id().zip(remote_buffers);
2179 let client = self.client.clone();
2180
2181 cx.spawn(|this, mut cx| async move {
2182 let mut project_transaction = ProjectTransaction::default();
2183
2184 if let Some((project_id, remote_buffers)) = remote_buffers {
2185 let response = client
2186 .request(proto::FormatBuffers {
2187 project_id,
2188 buffer_ids: remote_buffers
2189 .iter()
2190 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2191 .collect(),
2192 })
2193 .await?
2194 .transaction
2195 .ok_or_else(|| anyhow!("missing transaction"))?;
2196 project_transaction = this
2197 .update(&mut cx, |this, cx| {
2198 this.deserialize_project_transaction(response, push_to_history, cx)
2199 })
2200 .await?;
2201 }
2202
2203 for (buffer, buffer_abs_path, language_server) in local_buffers {
2204 let text_document = lsp::TextDocumentIdentifier::new(
2205 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2206 );
2207 let capabilities = &language_server.capabilities();
2208 let tab_size = cx.update(|cx| {
2209 let language_name = buffer.read(cx).language().map(|language| language.name());
2210 cx.global::<Settings>().tab_size(language_name.as_deref())
2211 });
2212 let lsp_edits = if capabilities
2213 .document_formatting_provider
2214 .as_ref()
2215 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2216 {
2217 language_server
2218 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2219 text_document,
2220 options: lsp::FormattingOptions {
2221 tab_size,
2222 insert_spaces: true,
2223 insert_final_newline: Some(true),
2224 ..Default::default()
2225 },
2226 work_done_progress_params: Default::default(),
2227 })
2228 .await?
2229 } else if capabilities
2230 .document_range_formatting_provider
2231 .as_ref()
2232 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2233 {
2234 let buffer_start = lsp::Position::new(0, 0);
2235 let buffer_end =
2236 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2237 language_server
2238 .request::<lsp::request::RangeFormatting>(
2239 lsp::DocumentRangeFormattingParams {
2240 text_document,
2241 range: lsp::Range::new(buffer_start, buffer_end),
2242 options: lsp::FormattingOptions {
2243 tab_size: 4,
2244 insert_spaces: true,
2245 insert_final_newline: Some(true),
2246 ..Default::default()
2247 },
2248 work_done_progress_params: Default::default(),
2249 },
2250 )
2251 .await?
2252 } else {
2253 continue;
2254 };
2255
2256 if let Some(lsp_edits) = lsp_edits {
2257 let edits = this
2258 .update(&mut cx, |this, cx| {
2259 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2260 })
2261 .await?;
2262 buffer.update(&mut cx, |buffer, cx| {
2263 buffer.finalize_last_transaction();
2264 buffer.start_transaction();
2265 for (range, text) in edits {
2266 buffer.edit([(range, text)], cx);
2267 }
2268 if buffer.end_transaction(cx).is_some() {
2269 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2270 if !push_to_history {
2271 buffer.forget_transaction(transaction.id);
2272 }
2273 project_transaction.0.insert(cx.handle(), transaction);
2274 }
2275 });
2276 }
2277 }
2278
2279 Ok(project_transaction)
2280 })
2281 }
2282
2283 pub fn definition<T: ToPointUtf16>(
2284 &self,
2285 buffer: &ModelHandle<Buffer>,
2286 position: T,
2287 cx: &mut ModelContext<Self>,
2288 ) -> Task<Result<Vec<Location>>> {
2289 let position = position.to_point_utf16(buffer.read(cx));
2290 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2291 }
2292
2293 pub fn references<T: ToPointUtf16>(
2294 &self,
2295 buffer: &ModelHandle<Buffer>,
2296 position: T,
2297 cx: &mut ModelContext<Self>,
2298 ) -> Task<Result<Vec<Location>>> {
2299 let position = position.to_point_utf16(buffer.read(cx));
2300 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2301 }
2302
2303 pub fn document_highlights<T: ToPointUtf16>(
2304 &self,
2305 buffer: &ModelHandle<Buffer>,
2306 position: T,
2307 cx: &mut ModelContext<Self>,
2308 ) -> Task<Result<Vec<DocumentHighlight>>> {
2309 let position = position.to_point_utf16(buffer.read(cx));
2310
2311 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2312 }
2313
2314 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2315 if self.is_local() {
2316 let mut requests = Vec::new();
2317 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2318 let worktree_id = *worktree_id;
2319 if let Some(worktree) = self
2320 .worktree_for_id(worktree_id, cx)
2321 .and_then(|worktree| worktree.read(cx).as_local())
2322 {
2323 let lsp_adapter = lsp_adapter.clone();
2324 let worktree_abs_path = worktree.abs_path().clone();
2325 requests.push(
2326 language_server
2327 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2328 query: query.to_string(),
2329 ..Default::default()
2330 })
2331 .log_err()
2332 .map(move |response| {
2333 (
2334 lsp_adapter,
2335 worktree_id,
2336 worktree_abs_path,
2337 response.unwrap_or_default(),
2338 )
2339 }),
2340 );
2341 }
2342 }
2343
2344 cx.spawn_weak(|this, cx| async move {
2345 let responses = futures::future::join_all(requests).await;
2346 let this = if let Some(this) = this.upgrade(&cx) {
2347 this
2348 } else {
2349 return Ok(Default::default());
2350 };
2351 this.read_with(&cx, |this, cx| {
2352 let mut symbols = Vec::new();
2353 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2354 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2355 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2356 let mut worktree_id = source_worktree_id;
2357 let path;
2358 if let Some((worktree, rel_path)) =
2359 this.find_local_worktree(&abs_path, cx)
2360 {
2361 worktree_id = worktree.read(cx).id();
2362 path = rel_path;
2363 } else {
2364 path = relativize_path(&worktree_abs_path, &abs_path);
2365 }
2366
2367 let label = this
2368 .languages
2369 .select_language(&path)
2370 .and_then(|language| {
2371 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2372 })
2373 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2374 let signature = this.symbol_signature(worktree_id, &path);
2375
2376 Some(Symbol {
2377 source_worktree_id,
2378 worktree_id,
2379 language_server_name: adapter.name(),
2380 name: lsp_symbol.name,
2381 kind: lsp_symbol.kind,
2382 label,
2383 path,
2384 range: range_from_lsp(lsp_symbol.location.range),
2385 signature,
2386 })
2387 }));
2388 }
2389 Ok(symbols)
2390 })
2391 })
2392 } else if let Some(project_id) = self.remote_id() {
2393 let request = self.client.request(proto::GetProjectSymbols {
2394 project_id,
2395 query: query.to_string(),
2396 });
2397 cx.spawn_weak(|this, cx| async move {
2398 let response = request.await?;
2399 let mut symbols = Vec::new();
2400 if let Some(this) = this.upgrade(&cx) {
2401 this.read_with(&cx, |this, _| {
2402 symbols.extend(
2403 response
2404 .symbols
2405 .into_iter()
2406 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2407 );
2408 })
2409 }
2410 Ok(symbols)
2411 })
2412 } else {
2413 Task::ready(Ok(Default::default()))
2414 }
2415 }
2416
2417 pub fn open_buffer_for_symbol(
2418 &mut self,
2419 symbol: &Symbol,
2420 cx: &mut ModelContext<Self>,
2421 ) -> Task<Result<ModelHandle<Buffer>>> {
2422 if self.is_local() {
2423 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2424 symbol.source_worktree_id,
2425 symbol.language_server_name.clone(),
2426 )) {
2427 server.clone()
2428 } else {
2429 return Task::ready(Err(anyhow!(
2430 "language server for worktree and language not found"
2431 )));
2432 };
2433
2434 let worktree_abs_path = if let Some(worktree_abs_path) = self
2435 .worktree_for_id(symbol.worktree_id, cx)
2436 .and_then(|worktree| worktree.read(cx).as_local())
2437 .map(|local_worktree| local_worktree.abs_path())
2438 {
2439 worktree_abs_path
2440 } else {
2441 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2442 };
2443 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2444 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2445 uri
2446 } else {
2447 return Task::ready(Err(anyhow!("invalid symbol path")));
2448 };
2449
2450 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2451 } else if let Some(project_id) = self.remote_id() {
2452 let request = self.client.request(proto::OpenBufferForSymbol {
2453 project_id,
2454 symbol: Some(serialize_symbol(symbol)),
2455 });
2456 cx.spawn(|this, mut cx| async move {
2457 let response = request.await?;
2458 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2459 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2460 .await
2461 })
2462 } else {
2463 Task::ready(Err(anyhow!("project does not have a remote id")))
2464 }
2465 }
2466
2467 pub fn completions<T: ToPointUtf16>(
2468 &self,
2469 source_buffer_handle: &ModelHandle<Buffer>,
2470 position: T,
2471 cx: &mut ModelContext<Self>,
2472 ) -> Task<Result<Vec<Completion>>> {
2473 let source_buffer_handle = source_buffer_handle.clone();
2474 let source_buffer = source_buffer_handle.read(cx);
2475 let buffer_id = source_buffer.remote_id();
2476 let language = source_buffer.language().cloned();
2477 let worktree;
2478 let buffer_abs_path;
2479 if let Some(file) = File::from_dyn(source_buffer.file()) {
2480 worktree = file.worktree.clone();
2481 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2482 } else {
2483 return Task::ready(Ok(Default::default()));
2484 };
2485
2486 let position = position.to_point_utf16(source_buffer);
2487 let anchor = source_buffer.anchor_after(position);
2488
2489 if worktree.read(cx).as_local().is_some() {
2490 let buffer_abs_path = buffer_abs_path.unwrap();
2491 let (_, lang_server) =
2492 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2493 server.clone()
2494 } else {
2495 return Task::ready(Ok(Default::default()));
2496 };
2497
2498 cx.spawn(|_, cx| async move {
2499 let completions = lang_server
2500 .request::<lsp::request::Completion>(lsp::CompletionParams {
2501 text_document_position: lsp::TextDocumentPositionParams::new(
2502 lsp::TextDocumentIdentifier::new(
2503 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2504 ),
2505 point_to_lsp(position),
2506 ),
2507 context: Default::default(),
2508 work_done_progress_params: Default::default(),
2509 partial_result_params: Default::default(),
2510 })
2511 .await
2512 .context("lsp completion request failed")?;
2513
2514 let completions = if let Some(completions) = completions {
2515 match completions {
2516 lsp::CompletionResponse::Array(completions) => completions,
2517 lsp::CompletionResponse::List(list) => list.items,
2518 }
2519 } else {
2520 Default::default()
2521 };
2522
2523 source_buffer_handle.read_with(&cx, |this, _| {
2524 let snapshot = this.snapshot();
2525 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2526 let mut range_for_token = None;
2527 Ok(completions
2528 .into_iter()
2529 .filter_map(|lsp_completion| {
2530 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2531 // If the language server provides a range to overwrite, then
2532 // check that the range is valid.
2533 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2534 let range = range_from_lsp(edit.range);
2535 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2536 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2537 if start != range.start || end != range.end {
2538 log::info!("completion out of expected range");
2539 return None;
2540 }
2541 (
2542 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2543 edit.new_text.clone(),
2544 )
2545 }
2546 // If the language server does not provide a range, then infer
2547 // the range based on the syntax tree.
2548 None => {
2549 if position != clipped_position {
2550 log::info!("completion out of expected range");
2551 return None;
2552 }
2553 let Range { start, end } = range_for_token
2554 .get_or_insert_with(|| {
2555 let offset = position.to_offset(&snapshot);
2556 snapshot
2557 .range_for_word_token_at(offset)
2558 .unwrap_or_else(|| offset..offset)
2559 })
2560 .clone();
2561 let text = lsp_completion
2562 .insert_text
2563 .as_ref()
2564 .unwrap_or(&lsp_completion.label)
2565 .clone();
2566 (
2567 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2568 text.clone(),
2569 )
2570 }
2571 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2572 log::info!("unsupported insert/replace completion");
2573 return None;
2574 }
2575 };
2576
2577 Some(Completion {
2578 old_range,
2579 new_text,
2580 label: language
2581 .as_ref()
2582 .and_then(|l| l.label_for_completion(&lsp_completion))
2583 .unwrap_or_else(|| {
2584 CodeLabel::plain(
2585 lsp_completion.label.clone(),
2586 lsp_completion.filter_text.as_deref(),
2587 )
2588 }),
2589 lsp_completion,
2590 })
2591 })
2592 .collect())
2593 })
2594 })
2595 } else if let Some(project_id) = self.remote_id() {
2596 let rpc = self.client.clone();
2597 let message = proto::GetCompletions {
2598 project_id,
2599 buffer_id,
2600 position: Some(language::proto::serialize_anchor(&anchor)),
2601 version: serialize_version(&source_buffer.version()),
2602 };
2603 cx.spawn_weak(|_, mut cx| async move {
2604 let response = rpc.request(message).await?;
2605
2606 source_buffer_handle
2607 .update(&mut cx, |buffer, _| {
2608 buffer.wait_for_version(deserialize_version(response.version))
2609 })
2610 .await;
2611
2612 response
2613 .completions
2614 .into_iter()
2615 .map(|completion| {
2616 language::proto::deserialize_completion(completion, language.as_ref())
2617 })
2618 .collect()
2619 })
2620 } else {
2621 Task::ready(Ok(Default::default()))
2622 }
2623 }
2624
2625 pub fn apply_additional_edits_for_completion(
2626 &self,
2627 buffer_handle: ModelHandle<Buffer>,
2628 completion: Completion,
2629 push_to_history: bool,
2630 cx: &mut ModelContext<Self>,
2631 ) -> Task<Result<Option<Transaction>>> {
2632 let buffer = buffer_handle.read(cx);
2633 let buffer_id = buffer.remote_id();
2634
2635 if self.is_local() {
2636 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2637 {
2638 server.clone()
2639 } else {
2640 return Task::ready(Ok(Default::default()));
2641 };
2642
2643 cx.spawn(|this, mut cx| async move {
2644 let resolved_completion = lang_server
2645 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2646 .await?;
2647 if let Some(edits) = resolved_completion.additional_text_edits {
2648 let edits = this
2649 .update(&mut cx, |this, cx| {
2650 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2651 })
2652 .await?;
2653 buffer_handle.update(&mut cx, |buffer, cx| {
2654 buffer.finalize_last_transaction();
2655 buffer.start_transaction();
2656 for (range, text) in edits {
2657 buffer.edit([(range, text)], cx);
2658 }
2659 let transaction = if buffer.end_transaction(cx).is_some() {
2660 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2661 if !push_to_history {
2662 buffer.forget_transaction(transaction.id);
2663 }
2664 Some(transaction)
2665 } else {
2666 None
2667 };
2668 Ok(transaction)
2669 })
2670 } else {
2671 Ok(None)
2672 }
2673 })
2674 } else if let Some(project_id) = self.remote_id() {
2675 let client = self.client.clone();
2676 cx.spawn(|_, mut cx| async move {
2677 let response = client
2678 .request(proto::ApplyCompletionAdditionalEdits {
2679 project_id,
2680 buffer_id,
2681 completion: Some(language::proto::serialize_completion(&completion)),
2682 })
2683 .await?;
2684
2685 if let Some(transaction) = response.transaction {
2686 let transaction = language::proto::deserialize_transaction(transaction)?;
2687 buffer_handle
2688 .update(&mut cx, |buffer, _| {
2689 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2690 })
2691 .await;
2692 if push_to_history {
2693 buffer_handle.update(&mut cx, |buffer, _| {
2694 buffer.push_transaction(transaction.clone(), Instant::now());
2695 });
2696 }
2697 Ok(Some(transaction))
2698 } else {
2699 Ok(None)
2700 }
2701 })
2702 } else {
2703 Task::ready(Err(anyhow!("project does not have a remote id")))
2704 }
2705 }
2706
2707 pub fn code_actions<T: Clone + ToOffset>(
2708 &self,
2709 buffer_handle: &ModelHandle<Buffer>,
2710 range: Range<T>,
2711 cx: &mut ModelContext<Self>,
2712 ) -> Task<Result<Vec<CodeAction>>> {
2713 let buffer_handle = buffer_handle.clone();
2714 let buffer = buffer_handle.read(cx);
2715 let snapshot = buffer.snapshot();
2716 let relevant_diagnostics = snapshot
2717 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2718 .map(|entry| entry.to_lsp_diagnostic_stub())
2719 .collect();
2720 let buffer_id = buffer.remote_id();
2721 let worktree;
2722 let buffer_abs_path;
2723 if let Some(file) = File::from_dyn(buffer.file()) {
2724 worktree = file.worktree.clone();
2725 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2726 } else {
2727 return Task::ready(Ok(Default::default()));
2728 };
2729 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2730
2731 if worktree.read(cx).as_local().is_some() {
2732 let buffer_abs_path = buffer_abs_path.unwrap();
2733 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2734 {
2735 server.clone()
2736 } else {
2737 return Task::ready(Ok(Default::default()));
2738 };
2739
2740 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2741 cx.foreground().spawn(async move {
2742 if !lang_server.capabilities().code_action_provider.is_some() {
2743 return Ok(Default::default());
2744 }
2745
2746 Ok(lang_server
2747 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2748 text_document: lsp::TextDocumentIdentifier::new(
2749 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2750 ),
2751 range: lsp_range,
2752 work_done_progress_params: Default::default(),
2753 partial_result_params: Default::default(),
2754 context: lsp::CodeActionContext {
2755 diagnostics: relevant_diagnostics,
2756 only: Some(vec![
2757 lsp::CodeActionKind::QUICKFIX,
2758 lsp::CodeActionKind::REFACTOR,
2759 lsp::CodeActionKind::REFACTOR_EXTRACT,
2760 lsp::CodeActionKind::SOURCE,
2761 ]),
2762 },
2763 })
2764 .await?
2765 .unwrap_or_default()
2766 .into_iter()
2767 .filter_map(|entry| {
2768 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2769 Some(CodeAction {
2770 range: range.clone(),
2771 lsp_action,
2772 })
2773 } else {
2774 None
2775 }
2776 })
2777 .collect())
2778 })
2779 } else if let Some(project_id) = self.remote_id() {
2780 let rpc = self.client.clone();
2781 let version = buffer.version();
2782 cx.spawn_weak(|_, mut cx| async move {
2783 let response = rpc
2784 .request(proto::GetCodeActions {
2785 project_id,
2786 buffer_id,
2787 start: Some(language::proto::serialize_anchor(&range.start)),
2788 end: Some(language::proto::serialize_anchor(&range.end)),
2789 version: serialize_version(&version),
2790 })
2791 .await?;
2792
2793 buffer_handle
2794 .update(&mut cx, |buffer, _| {
2795 buffer.wait_for_version(deserialize_version(response.version))
2796 })
2797 .await;
2798
2799 response
2800 .actions
2801 .into_iter()
2802 .map(language::proto::deserialize_code_action)
2803 .collect()
2804 })
2805 } else {
2806 Task::ready(Ok(Default::default()))
2807 }
2808 }
2809
2810 pub fn apply_code_action(
2811 &self,
2812 buffer_handle: ModelHandle<Buffer>,
2813 mut action: CodeAction,
2814 push_to_history: bool,
2815 cx: &mut ModelContext<Self>,
2816 ) -> Task<Result<ProjectTransaction>> {
2817 if self.is_local() {
2818 let buffer = buffer_handle.read(cx);
2819 let (lsp_adapter, lang_server) =
2820 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2821 server.clone()
2822 } else {
2823 return Task::ready(Ok(Default::default()));
2824 };
2825 let range = action.range.to_point_utf16(buffer);
2826
2827 cx.spawn(|this, mut cx| async move {
2828 if let Some(lsp_range) = action
2829 .lsp_action
2830 .data
2831 .as_mut()
2832 .and_then(|d| d.get_mut("codeActionParams"))
2833 .and_then(|d| d.get_mut("range"))
2834 {
2835 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2836 action.lsp_action = lang_server
2837 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2838 .await?;
2839 } else {
2840 let actions = this
2841 .update(&mut cx, |this, cx| {
2842 this.code_actions(&buffer_handle, action.range, cx)
2843 })
2844 .await?;
2845 action.lsp_action = actions
2846 .into_iter()
2847 .find(|a| a.lsp_action.title == action.lsp_action.title)
2848 .ok_or_else(|| anyhow!("code action is outdated"))?
2849 .lsp_action;
2850 }
2851
2852 if let Some(edit) = action.lsp_action.edit {
2853 Self::deserialize_workspace_edit(
2854 this,
2855 edit,
2856 push_to_history,
2857 lsp_adapter,
2858 lang_server,
2859 &mut cx,
2860 )
2861 .await
2862 } else if let Some(command) = action.lsp_action.command {
2863 this.update(&mut cx, |this, _| {
2864 this.last_workspace_edits_by_language_server
2865 .remove(&lang_server.server_id());
2866 });
2867 lang_server
2868 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
2869 command: command.command,
2870 arguments: command.arguments.unwrap_or_default(),
2871 ..Default::default()
2872 })
2873 .await?;
2874 Ok(this.update(&mut cx, |this, _| {
2875 this.last_workspace_edits_by_language_server
2876 .remove(&lang_server.server_id())
2877 .unwrap_or_default()
2878 }))
2879 } else {
2880 Ok(ProjectTransaction::default())
2881 }
2882 })
2883 } else if let Some(project_id) = self.remote_id() {
2884 let client = self.client.clone();
2885 let request = proto::ApplyCodeAction {
2886 project_id,
2887 buffer_id: buffer_handle.read(cx).remote_id(),
2888 action: Some(language::proto::serialize_code_action(&action)),
2889 };
2890 cx.spawn(|this, mut cx| async move {
2891 let response = client
2892 .request(request)
2893 .await?
2894 .transaction
2895 .ok_or_else(|| anyhow!("missing transaction"))?;
2896 this.update(&mut cx, |this, cx| {
2897 this.deserialize_project_transaction(response, push_to_history, cx)
2898 })
2899 .await
2900 })
2901 } else {
2902 Task::ready(Err(anyhow!("project does not have a remote id")))
2903 }
2904 }
2905
2906 async fn deserialize_workspace_edit(
2907 this: ModelHandle<Self>,
2908 edit: lsp::WorkspaceEdit,
2909 push_to_history: bool,
2910 lsp_adapter: Arc<dyn LspAdapter>,
2911 language_server: Arc<LanguageServer>,
2912 cx: &mut AsyncAppContext,
2913 ) -> Result<ProjectTransaction> {
2914 let fs = this.read_with(cx, |this, _| this.fs.clone());
2915 let mut operations = Vec::new();
2916 if let Some(document_changes) = edit.document_changes {
2917 match document_changes {
2918 lsp::DocumentChanges::Edits(edits) => {
2919 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2920 }
2921 lsp::DocumentChanges::Operations(ops) => operations = ops,
2922 }
2923 } else if let Some(changes) = edit.changes {
2924 operations.extend(changes.into_iter().map(|(uri, edits)| {
2925 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2926 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2927 uri,
2928 version: None,
2929 },
2930 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2931 })
2932 }));
2933 }
2934
2935 let mut project_transaction = ProjectTransaction::default();
2936 for operation in operations {
2937 match operation {
2938 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2939 let abs_path = op
2940 .uri
2941 .to_file_path()
2942 .map_err(|_| anyhow!("can't convert URI to path"))?;
2943
2944 if let Some(parent_path) = abs_path.parent() {
2945 fs.create_dir(parent_path).await?;
2946 }
2947 if abs_path.ends_with("/") {
2948 fs.create_dir(&abs_path).await?;
2949 } else {
2950 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2951 .await?;
2952 }
2953 }
2954 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2955 let source_abs_path = op
2956 .old_uri
2957 .to_file_path()
2958 .map_err(|_| anyhow!("can't convert URI to path"))?;
2959 let target_abs_path = op
2960 .new_uri
2961 .to_file_path()
2962 .map_err(|_| anyhow!("can't convert URI to path"))?;
2963 fs.rename(
2964 &source_abs_path,
2965 &target_abs_path,
2966 op.options.map(Into::into).unwrap_or_default(),
2967 )
2968 .await?;
2969 }
2970 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2971 let abs_path = op
2972 .uri
2973 .to_file_path()
2974 .map_err(|_| anyhow!("can't convert URI to path"))?;
2975 let options = op.options.map(Into::into).unwrap_or_default();
2976 if abs_path.ends_with("/") {
2977 fs.remove_dir(&abs_path, options).await?;
2978 } else {
2979 fs.remove_file(&abs_path, options).await?;
2980 }
2981 }
2982 lsp::DocumentChangeOperation::Edit(op) => {
2983 let buffer_to_edit = this
2984 .update(cx, |this, cx| {
2985 this.open_local_buffer_via_lsp(
2986 op.text_document.uri,
2987 lsp_adapter.clone(),
2988 language_server.clone(),
2989 cx,
2990 )
2991 })
2992 .await?;
2993
2994 let edits = this
2995 .update(cx, |this, cx| {
2996 let edits = op.edits.into_iter().map(|edit| match edit {
2997 lsp::OneOf::Left(edit) => edit,
2998 lsp::OneOf::Right(edit) => edit.text_edit,
2999 });
3000 this.edits_from_lsp(
3001 &buffer_to_edit,
3002 edits,
3003 op.text_document.version,
3004 cx,
3005 )
3006 })
3007 .await?;
3008
3009 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3010 buffer.finalize_last_transaction();
3011 buffer.start_transaction();
3012 for (range, text) in edits {
3013 buffer.edit([(range, text)], cx);
3014 }
3015 let transaction = if buffer.end_transaction(cx).is_some() {
3016 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3017 if !push_to_history {
3018 buffer.forget_transaction(transaction.id);
3019 }
3020 Some(transaction)
3021 } else {
3022 None
3023 };
3024
3025 transaction
3026 });
3027 if let Some(transaction) = transaction {
3028 project_transaction.0.insert(buffer_to_edit, transaction);
3029 }
3030 }
3031 }
3032 }
3033
3034 Ok(project_transaction)
3035 }
3036
3037 pub fn prepare_rename<T: ToPointUtf16>(
3038 &self,
3039 buffer: ModelHandle<Buffer>,
3040 position: T,
3041 cx: &mut ModelContext<Self>,
3042 ) -> Task<Result<Option<Range<Anchor>>>> {
3043 let position = position.to_point_utf16(buffer.read(cx));
3044 self.request_lsp(buffer, PrepareRename { position }, cx)
3045 }
3046
3047 pub fn perform_rename<T: ToPointUtf16>(
3048 &self,
3049 buffer: ModelHandle<Buffer>,
3050 position: T,
3051 new_name: String,
3052 push_to_history: bool,
3053 cx: &mut ModelContext<Self>,
3054 ) -> Task<Result<ProjectTransaction>> {
3055 let position = position.to_point_utf16(buffer.read(cx));
3056 self.request_lsp(
3057 buffer,
3058 PerformRename {
3059 position,
3060 new_name,
3061 push_to_history,
3062 },
3063 cx,
3064 )
3065 }
3066
3067 pub fn search(
3068 &self,
3069 query: SearchQuery,
3070 cx: &mut ModelContext<Self>,
3071 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3072 if self.is_local() {
3073 let snapshots = self
3074 .visible_worktrees(cx)
3075 .filter_map(|tree| {
3076 let tree = tree.read(cx).as_local()?;
3077 Some(tree.snapshot())
3078 })
3079 .collect::<Vec<_>>();
3080
3081 let background = cx.background().clone();
3082 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3083 if path_count == 0 {
3084 return Task::ready(Ok(Default::default()));
3085 }
3086 let workers = background.num_cpus().min(path_count);
3087 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3088 cx.background()
3089 .spawn({
3090 let fs = self.fs.clone();
3091 let background = cx.background().clone();
3092 let query = query.clone();
3093 async move {
3094 let fs = &fs;
3095 let query = &query;
3096 let matching_paths_tx = &matching_paths_tx;
3097 let paths_per_worker = (path_count + workers - 1) / workers;
3098 let snapshots = &snapshots;
3099 background
3100 .scoped(|scope| {
3101 for worker_ix in 0..workers {
3102 let worker_start_ix = worker_ix * paths_per_worker;
3103 let worker_end_ix = worker_start_ix + paths_per_worker;
3104 scope.spawn(async move {
3105 let mut snapshot_start_ix = 0;
3106 let mut abs_path = PathBuf::new();
3107 for snapshot in snapshots {
3108 let snapshot_end_ix =
3109 snapshot_start_ix + snapshot.visible_file_count();
3110 if worker_end_ix <= snapshot_start_ix {
3111 break;
3112 } else if worker_start_ix > snapshot_end_ix {
3113 snapshot_start_ix = snapshot_end_ix;
3114 continue;
3115 } else {
3116 let start_in_snapshot = worker_start_ix
3117 .saturating_sub(snapshot_start_ix);
3118 let end_in_snapshot =
3119 cmp::min(worker_end_ix, snapshot_end_ix)
3120 - snapshot_start_ix;
3121
3122 for entry in snapshot
3123 .files(false, start_in_snapshot)
3124 .take(end_in_snapshot - start_in_snapshot)
3125 {
3126 if matching_paths_tx.is_closed() {
3127 break;
3128 }
3129
3130 abs_path.clear();
3131 abs_path.push(&snapshot.abs_path());
3132 abs_path.push(&entry.path);
3133 let matches = if let Some(file) =
3134 fs.open_sync(&abs_path).await.log_err()
3135 {
3136 query.detect(file).unwrap_or(false)
3137 } else {
3138 false
3139 };
3140
3141 if matches {
3142 let project_path =
3143 (snapshot.id(), entry.path.clone());
3144 if matching_paths_tx
3145 .send(project_path)
3146 .await
3147 .is_err()
3148 {
3149 break;
3150 }
3151 }
3152 }
3153
3154 snapshot_start_ix = snapshot_end_ix;
3155 }
3156 }
3157 });
3158 }
3159 })
3160 .await;
3161 }
3162 })
3163 .detach();
3164
3165 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3166 let open_buffers = self
3167 .opened_buffers
3168 .values()
3169 .filter_map(|b| b.upgrade(cx))
3170 .collect::<HashSet<_>>();
3171 cx.spawn(|this, cx| async move {
3172 for buffer in &open_buffers {
3173 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3174 buffers_tx.send((buffer.clone(), snapshot)).await?;
3175 }
3176
3177 let open_buffers = Rc::new(RefCell::new(open_buffers));
3178 while let Some(project_path) = matching_paths_rx.next().await {
3179 if buffers_tx.is_closed() {
3180 break;
3181 }
3182
3183 let this = this.clone();
3184 let open_buffers = open_buffers.clone();
3185 let buffers_tx = buffers_tx.clone();
3186 cx.spawn(|mut cx| async move {
3187 if let Some(buffer) = this
3188 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3189 .await
3190 .log_err()
3191 {
3192 if open_buffers.borrow_mut().insert(buffer.clone()) {
3193 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3194 buffers_tx.send((buffer, snapshot)).await?;
3195 }
3196 }
3197
3198 Ok::<_, anyhow::Error>(())
3199 })
3200 .detach();
3201 }
3202
3203 Ok::<_, anyhow::Error>(())
3204 })
3205 .detach_and_log_err(cx);
3206
3207 let background = cx.background().clone();
3208 cx.background().spawn(async move {
3209 let query = &query;
3210 let mut matched_buffers = Vec::new();
3211 for _ in 0..workers {
3212 matched_buffers.push(HashMap::default());
3213 }
3214 background
3215 .scoped(|scope| {
3216 for worker_matched_buffers in matched_buffers.iter_mut() {
3217 let mut buffers_rx = buffers_rx.clone();
3218 scope.spawn(async move {
3219 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3220 let buffer_matches = query
3221 .search(snapshot.as_rope())
3222 .await
3223 .iter()
3224 .map(|range| {
3225 snapshot.anchor_before(range.start)
3226 ..snapshot.anchor_after(range.end)
3227 })
3228 .collect::<Vec<_>>();
3229 if !buffer_matches.is_empty() {
3230 worker_matched_buffers
3231 .insert(buffer.clone(), buffer_matches);
3232 }
3233 }
3234 });
3235 }
3236 })
3237 .await;
3238 Ok(matched_buffers.into_iter().flatten().collect())
3239 })
3240 } else if let Some(project_id) = self.remote_id() {
3241 let request = self.client.request(query.to_proto(project_id));
3242 cx.spawn(|this, mut cx| async move {
3243 let response = request.await?;
3244 let mut result = HashMap::default();
3245 for location in response.locations {
3246 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3247 let target_buffer = this
3248 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3249 .await?;
3250 let start = location
3251 .start
3252 .and_then(deserialize_anchor)
3253 .ok_or_else(|| anyhow!("missing target start"))?;
3254 let end = location
3255 .end
3256 .and_then(deserialize_anchor)
3257 .ok_or_else(|| anyhow!("missing target end"))?;
3258 result
3259 .entry(target_buffer)
3260 .or_insert(Vec::new())
3261 .push(start..end)
3262 }
3263 Ok(result)
3264 })
3265 } else {
3266 Task::ready(Ok(Default::default()))
3267 }
3268 }
3269
3270 fn request_lsp<R: LspCommand>(
3271 &self,
3272 buffer_handle: ModelHandle<Buffer>,
3273 request: R,
3274 cx: &mut ModelContext<Self>,
3275 ) -> Task<Result<R::Response>>
3276 where
3277 <R::LspRequest as lsp::request::Request>::Result: Send,
3278 {
3279 let buffer = buffer_handle.read(cx);
3280 if self.is_local() {
3281 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3282 if let Some((file, (_, language_server))) =
3283 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3284 {
3285 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3286 return cx.spawn(|this, cx| async move {
3287 if !request.check_capabilities(&language_server.capabilities()) {
3288 return Ok(Default::default());
3289 }
3290
3291 let response = language_server
3292 .request::<R::LspRequest>(lsp_params)
3293 .await
3294 .context("lsp request failed")?;
3295 request
3296 .response_from_lsp(response, this, buffer_handle, cx)
3297 .await
3298 });
3299 }
3300 } else if let Some(project_id) = self.remote_id() {
3301 let rpc = self.client.clone();
3302 let message = request.to_proto(project_id, buffer);
3303 return cx.spawn(|this, cx| async move {
3304 let response = rpc.request(message).await?;
3305 request
3306 .response_from_proto(response, this, buffer_handle, cx)
3307 .await
3308 });
3309 }
3310 Task::ready(Ok(Default::default()))
3311 }
3312
3313 pub fn find_or_create_local_worktree(
3314 &mut self,
3315 abs_path: impl AsRef<Path>,
3316 visible: bool,
3317 cx: &mut ModelContext<Self>,
3318 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3319 let abs_path = abs_path.as_ref();
3320 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3321 Task::ready(Ok((tree.clone(), relative_path.into())))
3322 } else {
3323 let worktree = self.create_local_worktree(abs_path, visible, cx);
3324 cx.foreground()
3325 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3326 }
3327 }
3328
3329 pub fn find_local_worktree(
3330 &self,
3331 abs_path: &Path,
3332 cx: &AppContext,
3333 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3334 for tree in self.worktrees(cx) {
3335 if let Some(relative_path) = tree
3336 .read(cx)
3337 .as_local()
3338 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3339 {
3340 return Some((tree.clone(), relative_path.into()));
3341 }
3342 }
3343 None
3344 }
3345
3346 pub fn is_shared(&self) -> bool {
3347 match &self.client_state {
3348 ProjectClientState::Local { is_shared, .. } => *is_shared,
3349 ProjectClientState::Remote { .. } => false,
3350 }
3351 }
3352
3353 fn create_local_worktree(
3354 &mut self,
3355 abs_path: impl AsRef<Path>,
3356 visible: bool,
3357 cx: &mut ModelContext<Self>,
3358 ) -> Task<Result<ModelHandle<Worktree>>> {
3359 let fs = self.fs.clone();
3360 let client = self.client.clone();
3361 let next_entry_id = self.next_entry_id.clone();
3362 let path: Arc<Path> = abs_path.as_ref().into();
3363 let task = self
3364 .loading_local_worktrees
3365 .entry(path.clone())
3366 .or_insert_with(|| {
3367 cx.spawn(|project, mut cx| {
3368 async move {
3369 let worktree = Worktree::local(
3370 client.clone(),
3371 path.clone(),
3372 visible,
3373 fs,
3374 next_entry_id,
3375 &mut cx,
3376 )
3377 .await;
3378 project.update(&mut cx, |project, _| {
3379 project.loading_local_worktrees.remove(&path);
3380 });
3381 let worktree = worktree?;
3382
3383 let (remote_project_id, is_shared) =
3384 project.update(&mut cx, |project, cx| {
3385 project.add_worktree(&worktree, cx);
3386 (project.remote_id(), project.is_shared())
3387 });
3388
3389 if let Some(project_id) = remote_project_id {
3390 if is_shared {
3391 worktree
3392 .update(&mut cx, |worktree, cx| {
3393 worktree.as_local_mut().unwrap().share(project_id, cx)
3394 })
3395 .await?;
3396 } else {
3397 worktree
3398 .update(&mut cx, |worktree, cx| {
3399 worktree.as_local_mut().unwrap().register(project_id, cx)
3400 })
3401 .await?;
3402 }
3403 }
3404
3405 Ok(worktree)
3406 }
3407 .map_err(|err| Arc::new(err))
3408 })
3409 .shared()
3410 })
3411 .clone();
3412 cx.foreground().spawn(async move {
3413 match task.await {
3414 Ok(worktree) => Ok(worktree),
3415 Err(err) => Err(anyhow!("{}", err)),
3416 }
3417 })
3418 }
3419
3420 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3421 self.worktrees.retain(|worktree| {
3422 worktree
3423 .upgrade(cx)
3424 .map_or(false, |w| w.read(cx).id() != id)
3425 });
3426 cx.notify();
3427 }
3428
3429 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3430 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3431 if worktree.read(cx).is_local() {
3432 cx.subscribe(&worktree, |this, worktree, _, cx| {
3433 this.update_local_worktree_buffers(worktree, cx);
3434 })
3435 .detach();
3436 }
3437
3438 let push_strong_handle = {
3439 let worktree = worktree.read(cx);
3440 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3441 };
3442 if push_strong_handle {
3443 self.worktrees
3444 .push(WorktreeHandle::Strong(worktree.clone()));
3445 } else {
3446 cx.observe_release(&worktree, |this, _, cx| {
3447 this.worktrees
3448 .retain(|worktree| worktree.upgrade(cx).is_some());
3449 cx.notify();
3450 })
3451 .detach();
3452 self.worktrees
3453 .push(WorktreeHandle::Weak(worktree.downgrade()));
3454 }
3455 cx.notify();
3456 }
3457
3458 fn update_local_worktree_buffers(
3459 &mut self,
3460 worktree_handle: ModelHandle<Worktree>,
3461 cx: &mut ModelContext<Self>,
3462 ) {
3463 let snapshot = worktree_handle.read(cx).snapshot();
3464 let mut buffers_to_delete = Vec::new();
3465 let mut renamed_buffers = Vec::new();
3466 for (buffer_id, buffer) in &self.opened_buffers {
3467 if let Some(buffer) = buffer.upgrade(cx) {
3468 buffer.update(cx, |buffer, cx| {
3469 if let Some(old_file) = File::from_dyn(buffer.file()) {
3470 if old_file.worktree != worktree_handle {
3471 return;
3472 }
3473
3474 let new_file = if let Some(entry) = old_file
3475 .entry_id
3476 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3477 {
3478 File {
3479 is_local: true,
3480 entry_id: Some(entry.id),
3481 mtime: entry.mtime,
3482 path: entry.path.clone(),
3483 worktree: worktree_handle.clone(),
3484 }
3485 } else if let Some(entry) =
3486 snapshot.entry_for_path(old_file.path().as_ref())
3487 {
3488 File {
3489 is_local: true,
3490 entry_id: Some(entry.id),
3491 mtime: entry.mtime,
3492 path: entry.path.clone(),
3493 worktree: worktree_handle.clone(),
3494 }
3495 } else {
3496 File {
3497 is_local: true,
3498 entry_id: None,
3499 path: old_file.path().clone(),
3500 mtime: old_file.mtime(),
3501 worktree: worktree_handle.clone(),
3502 }
3503 };
3504
3505 let old_path = old_file.abs_path(cx);
3506 if new_file.abs_path(cx) != old_path {
3507 renamed_buffers.push((cx.handle(), old_path));
3508 }
3509
3510 if let Some(project_id) = self.remote_id() {
3511 self.client
3512 .send(proto::UpdateBufferFile {
3513 project_id,
3514 buffer_id: *buffer_id as u64,
3515 file: Some(new_file.to_proto()),
3516 })
3517 .log_err();
3518 }
3519 buffer.file_updated(Box::new(new_file), cx).detach();
3520 }
3521 });
3522 } else {
3523 buffers_to_delete.push(*buffer_id);
3524 }
3525 }
3526
3527 for buffer_id in buffers_to_delete {
3528 self.opened_buffers.remove(&buffer_id);
3529 }
3530
3531 for (buffer, old_path) in renamed_buffers {
3532 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3533 self.assign_language_to_buffer(&buffer, cx);
3534 self.register_buffer_with_language_server(&buffer, cx);
3535 }
3536 }
3537
3538 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3539 let new_active_entry = entry.and_then(|project_path| {
3540 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3541 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3542 Some(entry.id)
3543 });
3544 if new_active_entry != self.active_entry {
3545 self.active_entry = new_active_entry;
3546 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3547 }
3548 }
3549
3550 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3551 self.language_server_statuses
3552 .values()
3553 .any(|status| status.pending_diagnostic_updates > 0)
3554 }
3555
3556 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3557 let mut summary = DiagnosticSummary::default();
3558 for (_, path_summary) in self.diagnostic_summaries(cx) {
3559 summary.error_count += path_summary.error_count;
3560 summary.warning_count += path_summary.warning_count;
3561 }
3562 summary
3563 }
3564
3565 pub fn diagnostic_summaries<'a>(
3566 &'a self,
3567 cx: &'a AppContext,
3568 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3569 self.worktrees(cx).flat_map(move |worktree| {
3570 let worktree = worktree.read(cx);
3571 let worktree_id = worktree.id();
3572 worktree
3573 .diagnostic_summaries()
3574 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3575 })
3576 }
3577
3578 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3579 if self
3580 .language_server_statuses
3581 .values()
3582 .map(|status| status.pending_diagnostic_updates)
3583 .sum::<isize>()
3584 == 1
3585 {
3586 cx.emit(Event::DiskBasedDiagnosticsStarted);
3587 }
3588 }
3589
3590 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3591 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3592 if self
3593 .language_server_statuses
3594 .values()
3595 .map(|status| status.pending_diagnostic_updates)
3596 .sum::<isize>()
3597 == 0
3598 {
3599 cx.emit(Event::DiskBasedDiagnosticsFinished);
3600 }
3601 }
3602
3603 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3604 self.active_entry
3605 }
3606
3607 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3608 self.worktree_for_id(path.worktree_id, cx)?
3609 .read(cx)
3610 .entry_for_path(&path.path)
3611 .map(|entry| entry.id)
3612 }
3613
3614 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3615 let worktree = self.worktree_for_entry(entry_id, cx)?;
3616 let worktree = worktree.read(cx);
3617 let worktree_id = worktree.id();
3618 let path = worktree.entry_for_id(entry_id)?.path.clone();
3619 Some(ProjectPath { worktree_id, path })
3620 }
3621
3622 // RPC message handlers
3623
3624 async fn handle_unshare_project(
3625 this: ModelHandle<Self>,
3626 _: TypedEnvelope<proto::UnshareProject>,
3627 _: Arc<Client>,
3628 mut cx: AsyncAppContext,
3629 ) -> Result<()> {
3630 this.update(&mut cx, |this, cx| this.project_unshared(cx));
3631 Ok(())
3632 }
3633
3634 async fn handle_add_collaborator(
3635 this: ModelHandle<Self>,
3636 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3637 _: Arc<Client>,
3638 mut cx: AsyncAppContext,
3639 ) -> Result<()> {
3640 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3641 let collaborator = envelope
3642 .payload
3643 .collaborator
3644 .take()
3645 .ok_or_else(|| anyhow!("empty collaborator"))?;
3646
3647 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3648 this.update(&mut cx, |this, cx| {
3649 this.collaborators
3650 .insert(collaborator.peer_id, collaborator);
3651 cx.notify();
3652 });
3653
3654 Ok(())
3655 }
3656
3657 async fn handle_remove_collaborator(
3658 this: ModelHandle<Self>,
3659 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3660 _: Arc<Client>,
3661 mut cx: AsyncAppContext,
3662 ) -> Result<()> {
3663 this.update(&mut cx, |this, cx| {
3664 let peer_id = PeerId(envelope.payload.peer_id);
3665 let replica_id = this
3666 .collaborators
3667 .remove(&peer_id)
3668 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3669 .replica_id;
3670 for (_, buffer) in &this.opened_buffers {
3671 if let Some(buffer) = buffer.upgrade(cx) {
3672 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3673 }
3674 }
3675 cx.emit(Event::CollaboratorLeft(peer_id));
3676 cx.notify();
3677 Ok(())
3678 })
3679 }
3680
3681 async fn handle_register_worktree(
3682 this: ModelHandle<Self>,
3683 envelope: TypedEnvelope<proto::RegisterWorktree>,
3684 client: Arc<Client>,
3685 mut cx: AsyncAppContext,
3686 ) -> Result<()> {
3687 this.update(&mut cx, |this, cx| {
3688 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3689 let replica_id = this.replica_id();
3690 let worktree = proto::Worktree {
3691 id: envelope.payload.worktree_id,
3692 root_name: envelope.payload.root_name,
3693 entries: Default::default(),
3694 diagnostic_summaries: Default::default(),
3695 visible: envelope.payload.visible,
3696 };
3697 let (worktree, load_task) =
3698 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3699 this.add_worktree(&worktree, cx);
3700 load_task.detach();
3701 Ok(())
3702 })
3703 }
3704
3705 async fn handle_unregister_worktree(
3706 this: ModelHandle<Self>,
3707 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3708 _: Arc<Client>,
3709 mut cx: AsyncAppContext,
3710 ) -> Result<()> {
3711 this.update(&mut cx, |this, cx| {
3712 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3713 this.remove_worktree(worktree_id, cx);
3714 Ok(())
3715 })
3716 }
3717
3718 async fn handle_update_worktree(
3719 this: ModelHandle<Self>,
3720 envelope: TypedEnvelope<proto::UpdateWorktree>,
3721 _: Arc<Client>,
3722 mut cx: AsyncAppContext,
3723 ) -> Result<()> {
3724 this.update(&mut cx, |this, cx| {
3725 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3726 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3727 worktree.update(cx, |worktree, _| {
3728 let worktree = worktree.as_remote_mut().unwrap();
3729 worktree.update_from_remote(envelope)
3730 })?;
3731 }
3732 Ok(())
3733 })
3734 }
3735
3736 async fn handle_update_diagnostic_summary(
3737 this: ModelHandle<Self>,
3738 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3739 _: Arc<Client>,
3740 mut cx: AsyncAppContext,
3741 ) -> Result<()> {
3742 this.update(&mut cx, |this, cx| {
3743 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3744 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3745 if let Some(summary) = envelope.payload.summary {
3746 let project_path = ProjectPath {
3747 worktree_id,
3748 path: Path::new(&summary.path).into(),
3749 };
3750 worktree.update(cx, |worktree, _| {
3751 worktree
3752 .as_remote_mut()
3753 .unwrap()
3754 .update_diagnostic_summary(project_path.path.clone(), &summary);
3755 });
3756 cx.emit(Event::DiagnosticsUpdated(project_path));
3757 }
3758 }
3759 Ok(())
3760 })
3761 }
3762
3763 async fn handle_start_language_server(
3764 this: ModelHandle<Self>,
3765 envelope: TypedEnvelope<proto::StartLanguageServer>,
3766 _: Arc<Client>,
3767 mut cx: AsyncAppContext,
3768 ) -> Result<()> {
3769 let server = envelope
3770 .payload
3771 .server
3772 .ok_or_else(|| anyhow!("invalid server"))?;
3773 this.update(&mut cx, |this, cx| {
3774 this.language_server_statuses.insert(
3775 server.id as usize,
3776 LanguageServerStatus {
3777 name: server.name,
3778 pending_work: Default::default(),
3779 pending_diagnostic_updates: 0,
3780 },
3781 );
3782 cx.notify();
3783 });
3784 Ok(())
3785 }
3786
3787 async fn handle_update_language_server(
3788 this: ModelHandle<Self>,
3789 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
3790 _: Arc<Client>,
3791 mut cx: AsyncAppContext,
3792 ) -> Result<()> {
3793 let language_server_id = envelope.payload.language_server_id as usize;
3794 match envelope
3795 .payload
3796 .variant
3797 .ok_or_else(|| anyhow!("invalid variant"))?
3798 {
3799 proto::update_language_server::Variant::WorkStart(payload) => {
3800 this.update(&mut cx, |this, cx| {
3801 this.on_lsp_work_start(language_server_id, payload.token, cx);
3802 })
3803 }
3804 proto::update_language_server::Variant::WorkProgress(payload) => {
3805 this.update(&mut cx, |this, cx| {
3806 this.on_lsp_work_progress(
3807 language_server_id,
3808 payload.token,
3809 LanguageServerProgress {
3810 message: payload.message,
3811 percentage: payload.percentage.map(|p| p as usize),
3812 last_update_at: Instant::now(),
3813 },
3814 cx,
3815 );
3816 })
3817 }
3818 proto::update_language_server::Variant::WorkEnd(payload) => {
3819 this.update(&mut cx, |this, cx| {
3820 this.on_lsp_work_end(language_server_id, payload.token, cx);
3821 })
3822 }
3823 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
3824 this.update(&mut cx, |this, cx| {
3825 this.disk_based_diagnostics_started(cx);
3826 })
3827 }
3828 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
3829 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3830 }
3831 }
3832
3833 Ok(())
3834 }
3835
3836 async fn handle_update_buffer(
3837 this: ModelHandle<Self>,
3838 envelope: TypedEnvelope<proto::UpdateBuffer>,
3839 _: Arc<Client>,
3840 mut cx: AsyncAppContext,
3841 ) -> Result<()> {
3842 this.update(&mut cx, |this, cx| {
3843 let payload = envelope.payload.clone();
3844 let buffer_id = payload.buffer_id;
3845 let ops = payload
3846 .operations
3847 .into_iter()
3848 .map(|op| language::proto::deserialize_operation(op))
3849 .collect::<Result<Vec<_>, _>>()?;
3850 match this.opened_buffers.entry(buffer_id) {
3851 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3852 OpenBuffer::Strong(buffer) => {
3853 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3854 }
3855 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3856 OpenBuffer::Weak(_) => {}
3857 },
3858 hash_map::Entry::Vacant(e) => {
3859 e.insert(OpenBuffer::Loading(ops));
3860 }
3861 }
3862 Ok(())
3863 })
3864 }
3865
3866 async fn handle_update_buffer_file(
3867 this: ModelHandle<Self>,
3868 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3869 _: Arc<Client>,
3870 mut cx: AsyncAppContext,
3871 ) -> Result<()> {
3872 this.update(&mut cx, |this, cx| {
3873 let payload = envelope.payload.clone();
3874 let buffer_id = payload.buffer_id;
3875 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3876 let worktree = this
3877 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3878 .ok_or_else(|| anyhow!("no such worktree"))?;
3879 let file = File::from_proto(file, worktree.clone(), cx)?;
3880 let buffer = this
3881 .opened_buffers
3882 .get_mut(&buffer_id)
3883 .and_then(|b| b.upgrade(cx))
3884 .ok_or_else(|| anyhow!("no such buffer"))?;
3885 buffer.update(cx, |buffer, cx| {
3886 buffer.file_updated(Box::new(file), cx).detach();
3887 });
3888 Ok(())
3889 })
3890 }
3891
3892 async fn handle_save_buffer(
3893 this: ModelHandle<Self>,
3894 envelope: TypedEnvelope<proto::SaveBuffer>,
3895 _: Arc<Client>,
3896 mut cx: AsyncAppContext,
3897 ) -> Result<proto::BufferSaved> {
3898 let buffer_id = envelope.payload.buffer_id;
3899 let requested_version = deserialize_version(envelope.payload.version);
3900
3901 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3902 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3903 let buffer = this
3904 .opened_buffers
3905 .get(&buffer_id)
3906 .and_then(|buffer| buffer.upgrade(cx))
3907 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3908 Ok::<_, anyhow::Error>((project_id, buffer))
3909 })?;
3910 buffer
3911 .update(&mut cx, |buffer, _| {
3912 buffer.wait_for_version(requested_version)
3913 })
3914 .await;
3915
3916 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3917 Ok(proto::BufferSaved {
3918 project_id,
3919 buffer_id,
3920 version: serialize_version(&saved_version),
3921 mtime: Some(mtime.into()),
3922 })
3923 }
3924
3925 async fn handle_reload_buffers(
3926 this: ModelHandle<Self>,
3927 envelope: TypedEnvelope<proto::ReloadBuffers>,
3928 _: Arc<Client>,
3929 mut cx: AsyncAppContext,
3930 ) -> Result<proto::ReloadBuffersResponse> {
3931 let sender_id = envelope.original_sender_id()?;
3932 let reload = this.update(&mut cx, |this, cx| {
3933 let mut buffers = HashSet::default();
3934 for buffer_id in &envelope.payload.buffer_ids {
3935 buffers.insert(
3936 this.opened_buffers
3937 .get(buffer_id)
3938 .and_then(|buffer| buffer.upgrade(cx))
3939 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3940 );
3941 }
3942 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
3943 })?;
3944
3945 let project_transaction = reload.await?;
3946 let project_transaction = this.update(&mut cx, |this, cx| {
3947 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3948 });
3949 Ok(proto::ReloadBuffersResponse {
3950 transaction: Some(project_transaction),
3951 })
3952 }
3953
3954 async fn handle_format_buffers(
3955 this: ModelHandle<Self>,
3956 envelope: TypedEnvelope<proto::FormatBuffers>,
3957 _: Arc<Client>,
3958 mut cx: AsyncAppContext,
3959 ) -> Result<proto::FormatBuffersResponse> {
3960 let sender_id = envelope.original_sender_id()?;
3961 let format = this.update(&mut cx, |this, cx| {
3962 let mut buffers = HashSet::default();
3963 for buffer_id in &envelope.payload.buffer_ids {
3964 buffers.insert(
3965 this.opened_buffers
3966 .get(buffer_id)
3967 .and_then(|buffer| buffer.upgrade(cx))
3968 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3969 );
3970 }
3971 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3972 })?;
3973
3974 let project_transaction = format.await?;
3975 let project_transaction = this.update(&mut cx, |this, cx| {
3976 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3977 });
3978 Ok(proto::FormatBuffersResponse {
3979 transaction: Some(project_transaction),
3980 })
3981 }
3982
3983 async fn handle_get_completions(
3984 this: ModelHandle<Self>,
3985 envelope: TypedEnvelope<proto::GetCompletions>,
3986 _: Arc<Client>,
3987 mut cx: AsyncAppContext,
3988 ) -> Result<proto::GetCompletionsResponse> {
3989 let position = envelope
3990 .payload
3991 .position
3992 .and_then(language::proto::deserialize_anchor)
3993 .ok_or_else(|| anyhow!("invalid position"))?;
3994 let version = deserialize_version(envelope.payload.version);
3995 let buffer = this.read_with(&cx, |this, cx| {
3996 this.opened_buffers
3997 .get(&envelope.payload.buffer_id)
3998 .and_then(|buffer| buffer.upgrade(cx))
3999 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4000 })?;
4001 buffer
4002 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4003 .await;
4004 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4005 let completions = this
4006 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4007 .await?;
4008
4009 Ok(proto::GetCompletionsResponse {
4010 completions: completions
4011 .iter()
4012 .map(language::proto::serialize_completion)
4013 .collect(),
4014 version: serialize_version(&version),
4015 })
4016 }
4017
4018 async fn handle_apply_additional_edits_for_completion(
4019 this: ModelHandle<Self>,
4020 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4021 _: Arc<Client>,
4022 mut cx: AsyncAppContext,
4023 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4024 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4025 let buffer = this
4026 .opened_buffers
4027 .get(&envelope.payload.buffer_id)
4028 .and_then(|buffer| buffer.upgrade(cx))
4029 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4030 let language = buffer.read(cx).language();
4031 let completion = language::proto::deserialize_completion(
4032 envelope
4033 .payload
4034 .completion
4035 .ok_or_else(|| anyhow!("invalid completion"))?,
4036 language,
4037 )?;
4038 Ok::<_, anyhow::Error>(
4039 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4040 )
4041 })?;
4042
4043 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4044 transaction: apply_additional_edits
4045 .await?
4046 .as_ref()
4047 .map(language::proto::serialize_transaction),
4048 })
4049 }
4050
4051 async fn handle_get_code_actions(
4052 this: ModelHandle<Self>,
4053 envelope: TypedEnvelope<proto::GetCodeActions>,
4054 _: Arc<Client>,
4055 mut cx: AsyncAppContext,
4056 ) -> Result<proto::GetCodeActionsResponse> {
4057 let start = envelope
4058 .payload
4059 .start
4060 .and_then(language::proto::deserialize_anchor)
4061 .ok_or_else(|| anyhow!("invalid start"))?;
4062 let end = envelope
4063 .payload
4064 .end
4065 .and_then(language::proto::deserialize_anchor)
4066 .ok_or_else(|| anyhow!("invalid end"))?;
4067 let buffer = this.update(&mut cx, |this, cx| {
4068 this.opened_buffers
4069 .get(&envelope.payload.buffer_id)
4070 .and_then(|buffer| buffer.upgrade(cx))
4071 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4072 })?;
4073 buffer
4074 .update(&mut cx, |buffer, _| {
4075 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4076 })
4077 .await;
4078
4079 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4080 let code_actions = this.update(&mut cx, |this, cx| {
4081 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4082 })?;
4083
4084 Ok(proto::GetCodeActionsResponse {
4085 actions: code_actions
4086 .await?
4087 .iter()
4088 .map(language::proto::serialize_code_action)
4089 .collect(),
4090 version: serialize_version(&version),
4091 })
4092 }
4093
4094 async fn handle_apply_code_action(
4095 this: ModelHandle<Self>,
4096 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4097 _: Arc<Client>,
4098 mut cx: AsyncAppContext,
4099 ) -> Result<proto::ApplyCodeActionResponse> {
4100 let sender_id = envelope.original_sender_id()?;
4101 let action = language::proto::deserialize_code_action(
4102 envelope
4103 .payload
4104 .action
4105 .ok_or_else(|| anyhow!("invalid action"))?,
4106 )?;
4107 let apply_code_action = this.update(&mut cx, |this, cx| {
4108 let buffer = this
4109 .opened_buffers
4110 .get(&envelope.payload.buffer_id)
4111 .and_then(|buffer| buffer.upgrade(cx))
4112 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4113 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4114 })?;
4115
4116 let project_transaction = apply_code_action.await?;
4117 let project_transaction = this.update(&mut cx, |this, cx| {
4118 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4119 });
4120 Ok(proto::ApplyCodeActionResponse {
4121 transaction: Some(project_transaction),
4122 })
4123 }
4124
4125 async fn handle_lsp_command<T: LspCommand>(
4126 this: ModelHandle<Self>,
4127 envelope: TypedEnvelope<T::ProtoRequest>,
4128 _: Arc<Client>,
4129 mut cx: AsyncAppContext,
4130 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4131 where
4132 <T::LspRequest as lsp::request::Request>::Result: Send,
4133 {
4134 let sender_id = envelope.original_sender_id()?;
4135 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4136 let buffer_handle = this.read_with(&cx, |this, _| {
4137 this.opened_buffers
4138 .get(&buffer_id)
4139 .and_then(|buffer| buffer.upgrade(&cx))
4140 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4141 })?;
4142 let request = T::from_proto(
4143 envelope.payload,
4144 this.clone(),
4145 buffer_handle.clone(),
4146 cx.clone(),
4147 )
4148 .await?;
4149 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4150 let response = this
4151 .update(&mut cx, |this, cx| {
4152 this.request_lsp(buffer_handle, request, cx)
4153 })
4154 .await?;
4155 this.update(&mut cx, |this, cx| {
4156 Ok(T::response_to_proto(
4157 response,
4158 this,
4159 sender_id,
4160 &buffer_version,
4161 cx,
4162 ))
4163 })
4164 }
4165
4166 async fn handle_get_project_symbols(
4167 this: ModelHandle<Self>,
4168 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4169 _: Arc<Client>,
4170 mut cx: AsyncAppContext,
4171 ) -> Result<proto::GetProjectSymbolsResponse> {
4172 let symbols = this
4173 .update(&mut cx, |this, cx| {
4174 this.symbols(&envelope.payload.query, cx)
4175 })
4176 .await?;
4177
4178 Ok(proto::GetProjectSymbolsResponse {
4179 symbols: symbols.iter().map(serialize_symbol).collect(),
4180 })
4181 }
4182
4183 async fn handle_search_project(
4184 this: ModelHandle<Self>,
4185 envelope: TypedEnvelope<proto::SearchProject>,
4186 _: Arc<Client>,
4187 mut cx: AsyncAppContext,
4188 ) -> Result<proto::SearchProjectResponse> {
4189 let peer_id = envelope.original_sender_id()?;
4190 let query = SearchQuery::from_proto(envelope.payload)?;
4191 let result = this
4192 .update(&mut cx, |this, cx| this.search(query, cx))
4193 .await?;
4194
4195 this.update(&mut cx, |this, cx| {
4196 let mut locations = Vec::new();
4197 for (buffer, ranges) in result {
4198 for range in ranges {
4199 let start = serialize_anchor(&range.start);
4200 let end = serialize_anchor(&range.end);
4201 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4202 locations.push(proto::Location {
4203 buffer: Some(buffer),
4204 start: Some(start),
4205 end: Some(end),
4206 });
4207 }
4208 }
4209 Ok(proto::SearchProjectResponse { locations })
4210 })
4211 }
4212
4213 async fn handle_open_buffer_for_symbol(
4214 this: ModelHandle<Self>,
4215 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4216 _: Arc<Client>,
4217 mut cx: AsyncAppContext,
4218 ) -> Result<proto::OpenBufferForSymbolResponse> {
4219 let peer_id = envelope.original_sender_id()?;
4220 let symbol = envelope
4221 .payload
4222 .symbol
4223 .ok_or_else(|| anyhow!("invalid symbol"))?;
4224 let symbol = this.read_with(&cx, |this, _| {
4225 let symbol = this.deserialize_symbol(symbol)?;
4226 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4227 if signature == symbol.signature {
4228 Ok(symbol)
4229 } else {
4230 Err(anyhow!("invalid symbol signature"))
4231 }
4232 })?;
4233 let buffer = this
4234 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4235 .await?;
4236
4237 Ok(proto::OpenBufferForSymbolResponse {
4238 buffer: Some(this.update(&mut cx, |this, cx| {
4239 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4240 })),
4241 })
4242 }
4243
4244 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4245 let mut hasher = Sha256::new();
4246 hasher.update(worktree_id.to_proto().to_be_bytes());
4247 hasher.update(path.to_string_lossy().as_bytes());
4248 hasher.update(self.nonce.to_be_bytes());
4249 hasher.finalize().as_slice().try_into().unwrap()
4250 }
4251
4252 async fn handle_open_buffer_by_id(
4253 this: ModelHandle<Self>,
4254 envelope: TypedEnvelope<proto::OpenBufferById>,
4255 _: Arc<Client>,
4256 mut cx: AsyncAppContext,
4257 ) -> Result<proto::OpenBufferResponse> {
4258 let peer_id = envelope.original_sender_id()?;
4259 let buffer = this
4260 .update(&mut cx, |this, cx| {
4261 this.open_buffer_by_id(envelope.payload.id, cx)
4262 })
4263 .await?;
4264 this.update(&mut cx, |this, cx| {
4265 Ok(proto::OpenBufferResponse {
4266 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4267 })
4268 })
4269 }
4270
4271 async fn handle_open_buffer_by_path(
4272 this: ModelHandle<Self>,
4273 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4274 _: Arc<Client>,
4275 mut cx: AsyncAppContext,
4276 ) -> Result<proto::OpenBufferResponse> {
4277 let peer_id = envelope.original_sender_id()?;
4278 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4279 let open_buffer = this.update(&mut cx, |this, cx| {
4280 this.open_buffer(
4281 ProjectPath {
4282 worktree_id,
4283 path: PathBuf::from(envelope.payload.path).into(),
4284 },
4285 cx,
4286 )
4287 });
4288
4289 let buffer = open_buffer.await?;
4290 this.update(&mut cx, |this, cx| {
4291 Ok(proto::OpenBufferResponse {
4292 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4293 })
4294 })
4295 }
4296
4297 fn serialize_project_transaction_for_peer(
4298 &mut self,
4299 project_transaction: ProjectTransaction,
4300 peer_id: PeerId,
4301 cx: &AppContext,
4302 ) -> proto::ProjectTransaction {
4303 let mut serialized_transaction = proto::ProjectTransaction {
4304 buffers: Default::default(),
4305 transactions: Default::default(),
4306 };
4307 for (buffer, transaction) in project_transaction.0 {
4308 serialized_transaction
4309 .buffers
4310 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4311 serialized_transaction
4312 .transactions
4313 .push(language::proto::serialize_transaction(&transaction));
4314 }
4315 serialized_transaction
4316 }
4317
4318 fn deserialize_project_transaction(
4319 &mut self,
4320 message: proto::ProjectTransaction,
4321 push_to_history: bool,
4322 cx: &mut ModelContext<Self>,
4323 ) -> Task<Result<ProjectTransaction>> {
4324 cx.spawn(|this, mut cx| async move {
4325 let mut project_transaction = ProjectTransaction::default();
4326 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4327 let buffer = this
4328 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4329 .await?;
4330 let transaction = language::proto::deserialize_transaction(transaction)?;
4331 project_transaction.0.insert(buffer, transaction);
4332 }
4333
4334 for (buffer, transaction) in &project_transaction.0 {
4335 buffer
4336 .update(&mut cx, |buffer, _| {
4337 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4338 })
4339 .await;
4340
4341 if push_to_history {
4342 buffer.update(&mut cx, |buffer, _| {
4343 buffer.push_transaction(transaction.clone(), Instant::now());
4344 });
4345 }
4346 }
4347
4348 Ok(project_transaction)
4349 })
4350 }
4351
4352 fn serialize_buffer_for_peer(
4353 &mut self,
4354 buffer: &ModelHandle<Buffer>,
4355 peer_id: PeerId,
4356 cx: &AppContext,
4357 ) -> proto::Buffer {
4358 let buffer_id = buffer.read(cx).remote_id();
4359 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4360 if shared_buffers.insert(buffer_id) {
4361 proto::Buffer {
4362 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4363 }
4364 } else {
4365 proto::Buffer {
4366 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4367 }
4368 }
4369 }
4370
4371 fn deserialize_buffer(
4372 &mut self,
4373 buffer: proto::Buffer,
4374 cx: &mut ModelContext<Self>,
4375 ) -> Task<Result<ModelHandle<Buffer>>> {
4376 let replica_id = self.replica_id();
4377
4378 let opened_buffer_tx = self.opened_buffer.0.clone();
4379 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4380 cx.spawn(|this, mut cx| async move {
4381 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4382 proto::buffer::Variant::Id(id) => {
4383 let buffer = loop {
4384 let buffer = this.read_with(&cx, |this, cx| {
4385 this.opened_buffers
4386 .get(&id)
4387 .and_then(|buffer| buffer.upgrade(cx))
4388 });
4389 if let Some(buffer) = buffer {
4390 break buffer;
4391 }
4392 opened_buffer_rx
4393 .next()
4394 .await
4395 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4396 };
4397 Ok(buffer)
4398 }
4399 proto::buffer::Variant::State(mut buffer) => {
4400 let mut buffer_worktree = None;
4401 let mut buffer_file = None;
4402 if let Some(file) = buffer.file.take() {
4403 this.read_with(&cx, |this, cx| {
4404 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4405 let worktree =
4406 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4407 anyhow!("no worktree found for id {}", file.worktree_id)
4408 })?;
4409 buffer_file =
4410 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4411 as Box<dyn language::File>);
4412 buffer_worktree = Some(worktree);
4413 Ok::<_, anyhow::Error>(())
4414 })?;
4415 }
4416
4417 let buffer = cx.add_model(|cx| {
4418 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4419 });
4420
4421 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4422
4423 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4424 Ok(buffer)
4425 }
4426 }
4427 })
4428 }
4429
4430 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4431 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4432 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4433 let start = serialized_symbol
4434 .start
4435 .ok_or_else(|| anyhow!("invalid start"))?;
4436 let end = serialized_symbol
4437 .end
4438 .ok_or_else(|| anyhow!("invalid end"))?;
4439 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4440 let path = PathBuf::from(serialized_symbol.path);
4441 let language = self.languages.select_language(&path);
4442 Ok(Symbol {
4443 source_worktree_id,
4444 worktree_id,
4445 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4446 label: language
4447 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4448 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4449 name: serialized_symbol.name,
4450 path,
4451 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4452 kind,
4453 signature: serialized_symbol
4454 .signature
4455 .try_into()
4456 .map_err(|_| anyhow!("invalid signature"))?,
4457 })
4458 }
4459
4460 async fn handle_buffer_saved(
4461 this: ModelHandle<Self>,
4462 envelope: TypedEnvelope<proto::BufferSaved>,
4463 _: Arc<Client>,
4464 mut cx: AsyncAppContext,
4465 ) -> Result<()> {
4466 let version = deserialize_version(envelope.payload.version);
4467 let mtime = envelope
4468 .payload
4469 .mtime
4470 .ok_or_else(|| anyhow!("missing mtime"))?
4471 .into();
4472
4473 this.update(&mut cx, |this, cx| {
4474 let buffer = this
4475 .opened_buffers
4476 .get(&envelope.payload.buffer_id)
4477 .and_then(|buffer| buffer.upgrade(cx));
4478 if let Some(buffer) = buffer {
4479 buffer.update(cx, |buffer, cx| {
4480 buffer.did_save(version, mtime, None, cx);
4481 });
4482 }
4483 Ok(())
4484 })
4485 }
4486
4487 async fn handle_buffer_reloaded(
4488 this: ModelHandle<Self>,
4489 envelope: TypedEnvelope<proto::BufferReloaded>,
4490 _: Arc<Client>,
4491 mut cx: AsyncAppContext,
4492 ) -> Result<()> {
4493 let payload = envelope.payload.clone();
4494 let version = deserialize_version(payload.version);
4495 let mtime = payload
4496 .mtime
4497 .ok_or_else(|| anyhow!("missing mtime"))?
4498 .into();
4499 this.update(&mut cx, |this, cx| {
4500 let buffer = this
4501 .opened_buffers
4502 .get(&payload.buffer_id)
4503 .and_then(|buffer| buffer.upgrade(cx));
4504 if let Some(buffer) = buffer {
4505 buffer.update(cx, |buffer, cx| {
4506 buffer.did_reload(version, mtime, cx);
4507 });
4508 }
4509 Ok(())
4510 })
4511 }
4512
4513 pub fn match_paths<'a>(
4514 &self,
4515 query: &'a str,
4516 include_ignored: bool,
4517 smart_case: bool,
4518 max_results: usize,
4519 cancel_flag: &'a AtomicBool,
4520 cx: &AppContext,
4521 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4522 let worktrees = self
4523 .worktrees(cx)
4524 .filter(|worktree| worktree.read(cx).is_visible())
4525 .collect::<Vec<_>>();
4526 let include_root_name = worktrees.len() > 1;
4527 let candidate_sets = worktrees
4528 .into_iter()
4529 .map(|worktree| CandidateSet {
4530 snapshot: worktree.read(cx).snapshot(),
4531 include_ignored,
4532 include_root_name,
4533 })
4534 .collect::<Vec<_>>();
4535
4536 let background = cx.background().clone();
4537 async move {
4538 fuzzy::match_paths(
4539 candidate_sets.as_slice(),
4540 query,
4541 smart_case,
4542 max_results,
4543 cancel_flag,
4544 background,
4545 )
4546 .await
4547 }
4548 }
4549
4550 fn edits_from_lsp(
4551 &mut self,
4552 buffer: &ModelHandle<Buffer>,
4553 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4554 version: Option<i32>,
4555 cx: &mut ModelContext<Self>,
4556 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4557 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4558 cx.background().spawn(async move {
4559 let snapshot = snapshot?;
4560 let mut lsp_edits = lsp_edits
4561 .into_iter()
4562 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4563 .peekable();
4564
4565 let mut edits = Vec::new();
4566 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4567 // Combine any LSP edits that are adjacent.
4568 //
4569 // Also, combine LSP edits that are separated from each other by only
4570 // a newline. This is important because for some code actions,
4571 // Rust-analyzer rewrites the entire buffer via a series of edits that
4572 // are separated by unchanged newline characters.
4573 //
4574 // In order for the diffing logic below to work properly, any edits that
4575 // cancel each other out must be combined into one.
4576 while let Some((next_range, next_text)) = lsp_edits.peek() {
4577 if next_range.start > range.end {
4578 if next_range.start.row > range.end.row + 1
4579 || next_range.start.column > 0
4580 || snapshot.clip_point_utf16(
4581 PointUtf16::new(range.end.row, u32::MAX),
4582 Bias::Left,
4583 ) > range.end
4584 {
4585 break;
4586 }
4587 new_text.push('\n');
4588 }
4589 range.end = next_range.end;
4590 new_text.push_str(&next_text);
4591 lsp_edits.next();
4592 }
4593
4594 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4595 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4596 {
4597 return Err(anyhow!("invalid edits received from language server"));
4598 }
4599
4600 // For multiline edits, perform a diff of the old and new text so that
4601 // we can identify the changes more precisely, preserving the locations
4602 // of any anchors positioned in the unchanged regions.
4603 if range.end.row > range.start.row {
4604 let mut offset = range.start.to_offset(&snapshot);
4605 let old_text = snapshot.text_for_range(range).collect::<String>();
4606
4607 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4608 let mut moved_since_edit = true;
4609 for change in diff.iter_all_changes() {
4610 let tag = change.tag();
4611 let value = change.value();
4612 match tag {
4613 ChangeTag::Equal => {
4614 offset += value.len();
4615 moved_since_edit = true;
4616 }
4617 ChangeTag::Delete => {
4618 let start = snapshot.anchor_after(offset);
4619 let end = snapshot.anchor_before(offset + value.len());
4620 if moved_since_edit {
4621 edits.push((start..end, String::new()));
4622 } else {
4623 edits.last_mut().unwrap().0.end = end;
4624 }
4625 offset += value.len();
4626 moved_since_edit = false;
4627 }
4628 ChangeTag::Insert => {
4629 if moved_since_edit {
4630 let anchor = snapshot.anchor_after(offset);
4631 edits.push((anchor.clone()..anchor, value.to_string()));
4632 } else {
4633 edits.last_mut().unwrap().1.push_str(value);
4634 }
4635 moved_since_edit = false;
4636 }
4637 }
4638 }
4639 } else if range.end == range.start {
4640 let anchor = snapshot.anchor_after(range.start);
4641 edits.push((anchor.clone()..anchor, new_text));
4642 } else {
4643 let edit_start = snapshot.anchor_after(range.start);
4644 let edit_end = snapshot.anchor_before(range.end);
4645 edits.push((edit_start..edit_end, new_text));
4646 }
4647 }
4648
4649 Ok(edits)
4650 })
4651 }
4652
4653 fn buffer_snapshot_for_lsp_version(
4654 &mut self,
4655 buffer: &ModelHandle<Buffer>,
4656 version: Option<i32>,
4657 cx: &AppContext,
4658 ) -> Result<TextBufferSnapshot> {
4659 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4660
4661 if let Some(version) = version {
4662 let buffer_id = buffer.read(cx).remote_id();
4663 let snapshots = self
4664 .buffer_snapshots
4665 .get_mut(&buffer_id)
4666 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4667 let mut found_snapshot = None;
4668 snapshots.retain(|(snapshot_version, snapshot)| {
4669 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4670 false
4671 } else {
4672 if *snapshot_version == version {
4673 found_snapshot = Some(snapshot.clone());
4674 }
4675 true
4676 }
4677 });
4678
4679 found_snapshot.ok_or_else(|| {
4680 anyhow!(
4681 "snapshot not found for buffer {} at version {}",
4682 buffer_id,
4683 version
4684 )
4685 })
4686 } else {
4687 Ok((buffer.read(cx)).text_snapshot())
4688 }
4689 }
4690
4691 fn language_server_for_buffer(
4692 &self,
4693 buffer: &Buffer,
4694 cx: &AppContext,
4695 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4696 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4697 let worktree_id = file.worktree_id(cx);
4698 self.language_servers
4699 .get(&(worktree_id, language.lsp_adapter()?.name()))
4700 } else {
4701 None
4702 }
4703 }
4704}
4705
4706impl WorktreeHandle {
4707 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4708 match self {
4709 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4710 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4711 }
4712 }
4713}
4714
4715impl OpenBuffer {
4716 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4717 match self {
4718 OpenBuffer::Strong(handle) => Some(handle.clone()),
4719 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4720 OpenBuffer::Loading(_) => None,
4721 }
4722 }
4723}
4724
4725struct CandidateSet {
4726 snapshot: Snapshot,
4727 include_ignored: bool,
4728 include_root_name: bool,
4729}
4730
4731impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
4732 type Candidates = CandidateSetIter<'a>;
4733
4734 fn id(&self) -> usize {
4735 self.snapshot.id().to_usize()
4736 }
4737
4738 fn len(&self) -> usize {
4739 if self.include_ignored {
4740 self.snapshot.file_count()
4741 } else {
4742 self.snapshot.visible_file_count()
4743 }
4744 }
4745
4746 fn prefix(&self) -> Arc<str> {
4747 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
4748 self.snapshot.root_name().into()
4749 } else if self.include_root_name {
4750 format!("{}/", self.snapshot.root_name()).into()
4751 } else {
4752 "".into()
4753 }
4754 }
4755
4756 fn candidates(&'a self, start: usize) -> Self::Candidates {
4757 CandidateSetIter {
4758 traversal: self.snapshot.files(self.include_ignored, start),
4759 }
4760 }
4761}
4762
4763struct CandidateSetIter<'a> {
4764 traversal: Traversal<'a>,
4765}
4766
4767impl<'a> Iterator for CandidateSetIter<'a> {
4768 type Item = PathMatchCandidate<'a>;
4769
4770 fn next(&mut self) -> Option<Self::Item> {
4771 self.traversal.next().map(|entry| {
4772 if let EntryKind::File(char_bag) = entry.kind {
4773 PathMatchCandidate {
4774 path: &entry.path,
4775 char_bag,
4776 }
4777 } else {
4778 unreachable!()
4779 }
4780 })
4781 }
4782}
4783
4784impl Entity for Project {
4785 type Event = Event;
4786
4787 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4788 match &self.client_state {
4789 ProjectClientState::Local { remote_id_rx, .. } => {
4790 if let Some(project_id) = *remote_id_rx.borrow() {
4791 self.client
4792 .send(proto::UnregisterProject { project_id })
4793 .log_err();
4794 }
4795 }
4796 ProjectClientState::Remote { remote_id, .. } => {
4797 self.client
4798 .send(proto::LeaveProject {
4799 project_id: *remote_id,
4800 })
4801 .log_err();
4802 }
4803 }
4804 }
4805
4806 fn app_will_quit(
4807 &mut self,
4808 _: &mut MutableAppContext,
4809 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4810 let shutdown_futures = self
4811 .language_servers
4812 .drain()
4813 .filter_map(|(_, (_, server))| server.shutdown())
4814 .collect::<Vec<_>>();
4815 Some(
4816 async move {
4817 futures::future::join_all(shutdown_futures).await;
4818 }
4819 .boxed(),
4820 )
4821 }
4822}
4823
4824impl Collaborator {
4825 fn from_proto(
4826 message: proto::Collaborator,
4827 user_store: &ModelHandle<UserStore>,
4828 cx: &mut AsyncAppContext,
4829 ) -> impl Future<Output = Result<Self>> {
4830 let user = user_store.update(cx, |user_store, cx| {
4831 user_store.fetch_user(message.user_id, cx)
4832 });
4833
4834 async move {
4835 Ok(Self {
4836 peer_id: PeerId(message.peer_id),
4837 user: user.await?,
4838 replica_id: message.replica_id as ReplicaId,
4839 })
4840 }
4841 }
4842}
4843
4844impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4845 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4846 Self {
4847 worktree_id,
4848 path: path.as_ref().into(),
4849 }
4850 }
4851}
4852
4853impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4854 fn from(options: lsp::CreateFileOptions) -> Self {
4855 Self {
4856 overwrite: options.overwrite.unwrap_or(false),
4857 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4858 }
4859 }
4860}
4861
4862impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4863 fn from(options: lsp::RenameFileOptions) -> Self {
4864 Self {
4865 overwrite: options.overwrite.unwrap_or(false),
4866 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4867 }
4868 }
4869}
4870
4871impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4872 fn from(options: lsp::DeleteFileOptions) -> Self {
4873 Self {
4874 recursive: options.recursive.unwrap_or(false),
4875 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4876 }
4877 }
4878}
4879
4880fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4881 proto::Symbol {
4882 source_worktree_id: symbol.source_worktree_id.to_proto(),
4883 worktree_id: symbol.worktree_id.to_proto(),
4884 language_server_name: symbol.language_server_name.0.to_string(),
4885 name: symbol.name.clone(),
4886 kind: unsafe { mem::transmute(symbol.kind) },
4887 path: symbol.path.to_string_lossy().to_string(),
4888 start: Some(proto::Point {
4889 row: symbol.range.start.row,
4890 column: symbol.range.start.column,
4891 }),
4892 end: Some(proto::Point {
4893 row: symbol.range.end.row,
4894 column: symbol.range.end.column,
4895 }),
4896 signature: symbol.signature.to_vec(),
4897 }
4898}
4899
4900fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4901 let mut path_components = path.components();
4902 let mut base_components = base.components();
4903 let mut components: Vec<Component> = Vec::new();
4904 loop {
4905 match (path_components.next(), base_components.next()) {
4906 (None, None) => break,
4907 (Some(a), None) => {
4908 components.push(a);
4909 components.extend(path_components.by_ref());
4910 break;
4911 }
4912 (None, _) => components.push(Component::ParentDir),
4913 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4914 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4915 (Some(a), Some(_)) => {
4916 components.push(Component::ParentDir);
4917 for _ in base_components {
4918 components.push(Component::ParentDir);
4919 }
4920 components.push(a);
4921 components.extend(path_components.by_ref());
4922 break;
4923 }
4924 }
4925 }
4926 components.iter().map(|c| c.as_os_str()).collect()
4927}
4928
4929impl Item for Buffer {
4930 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
4931 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
4932 }
4933}
4934
4935#[cfg(test)]
4936mod tests {
4937 use crate::worktree::WorktreeHandle;
4938
4939 use super::{Event, *};
4940 use fs::RealFs;
4941 use futures::{future, StreamExt};
4942 use gpui::test::subscribe;
4943 use language::{
4944 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
4945 OffsetRangeExt, Point, ToPoint,
4946 };
4947 use lsp::Url;
4948 use serde_json::json;
4949 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
4950 use unindent::Unindent as _;
4951 use util::{assert_set_eq, test::temp_tree};
4952
4953 #[gpui::test]
4954 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4955 let dir = temp_tree(json!({
4956 "root": {
4957 "apple": "",
4958 "banana": {
4959 "carrot": {
4960 "date": "",
4961 "endive": "",
4962 }
4963 },
4964 "fennel": {
4965 "grape": "",
4966 }
4967 }
4968 }));
4969
4970 let root_link_path = dir.path().join("root_link");
4971 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4972 unix::fs::symlink(
4973 &dir.path().join("root/fennel"),
4974 &dir.path().join("root/finnochio"),
4975 )
4976 .unwrap();
4977
4978 let project = Project::test(Arc::new(RealFs), [root_link_path], cx).await;
4979
4980 project.read_with(cx, |project, cx| {
4981 let tree = project.worktrees(cx).next().unwrap().read(cx);
4982 assert_eq!(tree.file_count(), 5);
4983 assert_eq!(
4984 tree.inode_for_path("fennel/grape"),
4985 tree.inode_for_path("finnochio/grape")
4986 );
4987 });
4988
4989 let cancel_flag = Default::default();
4990 let results = project
4991 .read_with(cx, |project, cx| {
4992 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4993 })
4994 .await;
4995 assert_eq!(
4996 results
4997 .into_iter()
4998 .map(|result| result.path)
4999 .collect::<Vec<Arc<Path>>>(),
5000 vec![
5001 PathBuf::from("banana/carrot/date").into(),
5002 PathBuf::from("banana/carrot/endive").into(),
5003 ]
5004 );
5005 }
5006
5007 #[gpui::test]
5008 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5009 cx.foreground().forbid_parking();
5010
5011 let mut rust_language = Language::new(
5012 LanguageConfig {
5013 name: "Rust".into(),
5014 path_suffixes: vec!["rs".to_string()],
5015 ..Default::default()
5016 },
5017 Some(tree_sitter_rust::language()),
5018 );
5019 let mut json_language = Language::new(
5020 LanguageConfig {
5021 name: "JSON".into(),
5022 path_suffixes: vec!["json".to_string()],
5023 ..Default::default()
5024 },
5025 None,
5026 );
5027 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5028 name: "the-rust-language-server",
5029 capabilities: lsp::ServerCapabilities {
5030 completion_provider: Some(lsp::CompletionOptions {
5031 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5032 ..Default::default()
5033 }),
5034 ..Default::default()
5035 },
5036 ..Default::default()
5037 });
5038 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5039 name: "the-json-language-server",
5040 capabilities: lsp::ServerCapabilities {
5041 completion_provider: Some(lsp::CompletionOptions {
5042 trigger_characters: Some(vec![":".to_string()]),
5043 ..Default::default()
5044 }),
5045 ..Default::default()
5046 },
5047 ..Default::default()
5048 });
5049
5050 let fs = FakeFs::new(cx.background());
5051 fs.insert_tree(
5052 "/the-root",
5053 json!({
5054 "test.rs": "const A: i32 = 1;",
5055 "test2.rs": "",
5056 "Cargo.toml": "a = 1",
5057 "package.json": "{\"a\": 1}",
5058 }),
5059 )
5060 .await;
5061
5062 let project = Project::test(fs.clone(), ["/the-root"], cx).await;
5063 project.update(cx, |project, _| {
5064 project.languages.add(Arc::new(rust_language));
5065 project.languages.add(Arc::new(json_language));
5066 });
5067
5068 // Open a buffer without an associated language server.
5069 let toml_buffer = project
5070 .update(cx, |project, cx| {
5071 project.open_local_buffer("/the-root/Cargo.toml", cx)
5072 })
5073 .await
5074 .unwrap();
5075
5076 // Open a buffer with an associated language server.
5077 let rust_buffer = project
5078 .update(cx, |project, cx| {
5079 project.open_local_buffer("/the-root/test.rs", cx)
5080 })
5081 .await
5082 .unwrap();
5083
5084 // A server is started up, and it is notified about Rust files.
5085 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5086 assert_eq!(
5087 fake_rust_server
5088 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5089 .await
5090 .text_document,
5091 lsp::TextDocumentItem {
5092 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5093 version: 0,
5094 text: "const A: i32 = 1;".to_string(),
5095 language_id: Default::default()
5096 }
5097 );
5098
5099 // The buffer is configured based on the language server's capabilities.
5100 rust_buffer.read_with(cx, |buffer, _| {
5101 assert_eq!(
5102 buffer.completion_triggers(),
5103 &[".".to_string(), "::".to_string()]
5104 );
5105 });
5106 toml_buffer.read_with(cx, |buffer, _| {
5107 assert!(buffer.completion_triggers().is_empty());
5108 });
5109
5110 // Edit a buffer. The changes are reported to the language server.
5111 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5112 assert_eq!(
5113 fake_rust_server
5114 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5115 .await
5116 .text_document,
5117 lsp::VersionedTextDocumentIdentifier::new(
5118 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5119 1
5120 )
5121 );
5122
5123 // Open a third buffer with a different associated language server.
5124 let json_buffer = project
5125 .update(cx, |project, cx| {
5126 project.open_local_buffer("/the-root/package.json", cx)
5127 })
5128 .await
5129 .unwrap();
5130
5131 // A json language server is started up and is only notified about the json buffer.
5132 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5133 assert_eq!(
5134 fake_json_server
5135 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5136 .await
5137 .text_document,
5138 lsp::TextDocumentItem {
5139 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5140 version: 0,
5141 text: "{\"a\": 1}".to_string(),
5142 language_id: Default::default()
5143 }
5144 );
5145
5146 // This buffer is configured based on the second language server's
5147 // capabilities.
5148 json_buffer.read_with(cx, |buffer, _| {
5149 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5150 });
5151
5152 // When opening another buffer whose language server is already running,
5153 // it is also configured based on the existing language server's capabilities.
5154 let rust_buffer2 = project
5155 .update(cx, |project, cx| {
5156 project.open_local_buffer("/the-root/test2.rs", cx)
5157 })
5158 .await
5159 .unwrap();
5160 rust_buffer2.read_with(cx, |buffer, _| {
5161 assert_eq!(
5162 buffer.completion_triggers(),
5163 &[".".to_string(), "::".to_string()]
5164 );
5165 });
5166
5167 // Changes are reported only to servers matching the buffer's language.
5168 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5169 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5170 assert_eq!(
5171 fake_rust_server
5172 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5173 .await
5174 .text_document,
5175 lsp::VersionedTextDocumentIdentifier::new(
5176 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5177 1
5178 )
5179 );
5180
5181 // Save notifications are reported to all servers.
5182 toml_buffer
5183 .update(cx, |buffer, cx| buffer.save(cx))
5184 .await
5185 .unwrap();
5186 assert_eq!(
5187 fake_rust_server
5188 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5189 .await
5190 .text_document,
5191 lsp::TextDocumentIdentifier::new(
5192 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5193 )
5194 );
5195 assert_eq!(
5196 fake_json_server
5197 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5198 .await
5199 .text_document,
5200 lsp::TextDocumentIdentifier::new(
5201 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5202 )
5203 );
5204
5205 // Renames are reported only to servers matching the buffer's language.
5206 fs.rename(
5207 Path::new("/the-root/test2.rs"),
5208 Path::new("/the-root/test3.rs"),
5209 Default::default(),
5210 )
5211 .await
5212 .unwrap();
5213 assert_eq!(
5214 fake_rust_server
5215 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5216 .await
5217 .text_document,
5218 lsp::TextDocumentIdentifier::new(
5219 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5220 ),
5221 );
5222 assert_eq!(
5223 fake_rust_server
5224 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5225 .await
5226 .text_document,
5227 lsp::TextDocumentItem {
5228 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5229 version: 0,
5230 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5231 language_id: Default::default()
5232 },
5233 );
5234
5235 rust_buffer2.update(cx, |buffer, cx| {
5236 buffer.update_diagnostics(
5237 DiagnosticSet::from_sorted_entries(
5238 vec![DiagnosticEntry {
5239 diagnostic: Default::default(),
5240 range: Anchor::MIN..Anchor::MAX,
5241 }],
5242 &buffer.snapshot(),
5243 ),
5244 cx,
5245 );
5246 assert_eq!(
5247 buffer
5248 .snapshot()
5249 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5250 .count(),
5251 1
5252 );
5253 });
5254
5255 // When the rename changes the extension of the file, the buffer gets closed on the old
5256 // language server and gets opened on the new one.
5257 fs.rename(
5258 Path::new("/the-root/test3.rs"),
5259 Path::new("/the-root/test3.json"),
5260 Default::default(),
5261 )
5262 .await
5263 .unwrap();
5264 assert_eq!(
5265 fake_rust_server
5266 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5267 .await
5268 .text_document,
5269 lsp::TextDocumentIdentifier::new(
5270 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5271 ),
5272 );
5273 assert_eq!(
5274 fake_json_server
5275 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5276 .await
5277 .text_document,
5278 lsp::TextDocumentItem {
5279 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5280 version: 0,
5281 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5282 language_id: Default::default()
5283 },
5284 );
5285 // We clear the diagnostics, since the language has changed.
5286 rust_buffer2.read_with(cx, |buffer, _| {
5287 assert_eq!(
5288 buffer
5289 .snapshot()
5290 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5291 .count(),
5292 0
5293 );
5294 });
5295
5296 // The renamed file's version resets after changing language server.
5297 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5298 assert_eq!(
5299 fake_json_server
5300 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5301 .await
5302 .text_document,
5303 lsp::VersionedTextDocumentIdentifier::new(
5304 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5305 1
5306 )
5307 );
5308
5309 // Restart language servers
5310 project.update(cx, |project, cx| {
5311 project.restart_language_servers_for_buffers(
5312 vec![rust_buffer.clone(), json_buffer.clone()],
5313 cx,
5314 );
5315 });
5316
5317 let mut rust_shutdown_requests = fake_rust_server
5318 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5319 let mut json_shutdown_requests = fake_json_server
5320 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5321 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5322
5323 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5324 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5325
5326 // Ensure rust document is reopened in new rust language server
5327 assert_eq!(
5328 fake_rust_server
5329 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5330 .await
5331 .text_document,
5332 lsp::TextDocumentItem {
5333 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5334 version: 1,
5335 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5336 language_id: Default::default()
5337 }
5338 );
5339
5340 // Ensure json documents are reopened in new json language server
5341 assert_set_eq!(
5342 [
5343 fake_json_server
5344 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5345 .await
5346 .text_document,
5347 fake_json_server
5348 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5349 .await
5350 .text_document,
5351 ],
5352 [
5353 lsp::TextDocumentItem {
5354 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5355 version: 0,
5356 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5357 language_id: Default::default()
5358 },
5359 lsp::TextDocumentItem {
5360 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5361 version: 1,
5362 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5363 language_id: Default::default()
5364 }
5365 ]
5366 );
5367
5368 // Close notifications are reported only to servers matching the buffer's language.
5369 cx.update(|_| drop(json_buffer));
5370 let close_message = lsp::DidCloseTextDocumentParams {
5371 text_document: lsp::TextDocumentIdentifier::new(
5372 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5373 ),
5374 };
5375 assert_eq!(
5376 fake_json_server
5377 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5378 .await,
5379 close_message,
5380 );
5381 }
5382
5383 #[gpui::test]
5384 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5385 cx.foreground().forbid_parking();
5386
5387 let fs = FakeFs::new(cx.background());
5388 fs.insert_tree(
5389 "/dir",
5390 json!({
5391 "a.rs": "let a = 1;",
5392 "b.rs": "let b = 2;"
5393 }),
5394 )
5395 .await;
5396
5397 let project = Project::test(fs, ["/dir/a.rs", "/dir/b.rs"], cx).await;
5398
5399 let buffer_a = project
5400 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5401 .await
5402 .unwrap();
5403 let buffer_b = project
5404 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5405 .await
5406 .unwrap();
5407
5408 project.update(cx, |project, cx| {
5409 project
5410 .update_diagnostics(
5411 lsp::PublishDiagnosticsParams {
5412 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5413 version: None,
5414 diagnostics: vec![lsp::Diagnostic {
5415 range: lsp::Range::new(
5416 lsp::Position::new(0, 4),
5417 lsp::Position::new(0, 5),
5418 ),
5419 severity: Some(lsp::DiagnosticSeverity::ERROR),
5420 message: "error 1".to_string(),
5421 ..Default::default()
5422 }],
5423 },
5424 &[],
5425 cx,
5426 )
5427 .unwrap();
5428 project
5429 .update_diagnostics(
5430 lsp::PublishDiagnosticsParams {
5431 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5432 version: None,
5433 diagnostics: vec![lsp::Diagnostic {
5434 range: lsp::Range::new(
5435 lsp::Position::new(0, 4),
5436 lsp::Position::new(0, 5),
5437 ),
5438 severity: Some(lsp::DiagnosticSeverity::WARNING),
5439 message: "error 2".to_string(),
5440 ..Default::default()
5441 }],
5442 },
5443 &[],
5444 cx,
5445 )
5446 .unwrap();
5447 });
5448
5449 buffer_a.read_with(cx, |buffer, _| {
5450 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5451 assert_eq!(
5452 chunks
5453 .iter()
5454 .map(|(s, d)| (s.as_str(), *d))
5455 .collect::<Vec<_>>(),
5456 &[
5457 ("let ", None),
5458 ("a", Some(DiagnosticSeverity::ERROR)),
5459 (" = 1;", None),
5460 ]
5461 );
5462 });
5463 buffer_b.read_with(cx, |buffer, _| {
5464 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5465 assert_eq!(
5466 chunks
5467 .iter()
5468 .map(|(s, d)| (s.as_str(), *d))
5469 .collect::<Vec<_>>(),
5470 &[
5471 ("let ", None),
5472 ("b", Some(DiagnosticSeverity::WARNING)),
5473 (" = 2;", None),
5474 ]
5475 );
5476 });
5477 }
5478
5479 #[gpui::test]
5480 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5481 cx.foreground().forbid_parking();
5482
5483 let progress_token = "the-progress-token";
5484 let mut language = Language::new(
5485 LanguageConfig {
5486 name: "Rust".into(),
5487 path_suffixes: vec!["rs".to_string()],
5488 ..Default::default()
5489 },
5490 Some(tree_sitter_rust::language()),
5491 );
5492 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5493 disk_based_diagnostics_progress_token: Some(progress_token),
5494 disk_based_diagnostics_sources: &["disk"],
5495 ..Default::default()
5496 });
5497
5498 let fs = FakeFs::new(cx.background());
5499 fs.insert_tree(
5500 "/dir",
5501 json!({
5502 "a.rs": "fn a() { A }",
5503 "b.rs": "const y: i32 = 1",
5504 }),
5505 )
5506 .await;
5507
5508 let project = Project::test(fs, ["/dir"], cx).await;
5509 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5510 let worktree_id =
5511 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5512
5513 // Cause worktree to start the fake language server
5514 let _buffer = project
5515 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5516 .await
5517 .unwrap();
5518
5519 let mut events = subscribe(&project, cx);
5520
5521 let mut fake_server = fake_servers.next().await.unwrap();
5522 fake_server.start_progress(progress_token).await;
5523 assert_eq!(
5524 events.next().await.unwrap(),
5525 Event::DiskBasedDiagnosticsStarted
5526 );
5527
5528 fake_server.start_progress(progress_token).await;
5529 fake_server.end_progress(progress_token).await;
5530 fake_server.start_progress(progress_token).await;
5531
5532 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5533 lsp::PublishDiagnosticsParams {
5534 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5535 version: None,
5536 diagnostics: vec![lsp::Diagnostic {
5537 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5538 severity: Some(lsp::DiagnosticSeverity::ERROR),
5539 message: "undefined variable 'A'".to_string(),
5540 ..Default::default()
5541 }],
5542 },
5543 );
5544 assert_eq!(
5545 events.next().await.unwrap(),
5546 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5547 );
5548
5549 fake_server.end_progress(progress_token).await;
5550 fake_server.end_progress(progress_token).await;
5551 assert_eq!(
5552 events.next().await.unwrap(),
5553 Event::DiskBasedDiagnosticsUpdated
5554 );
5555 assert_eq!(
5556 events.next().await.unwrap(),
5557 Event::DiskBasedDiagnosticsFinished
5558 );
5559
5560 let buffer = project
5561 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5562 .await
5563 .unwrap();
5564
5565 buffer.read_with(cx, |buffer, _| {
5566 let snapshot = buffer.snapshot();
5567 let diagnostics = snapshot
5568 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5569 .collect::<Vec<_>>();
5570 assert_eq!(
5571 diagnostics,
5572 &[DiagnosticEntry {
5573 range: Point::new(0, 9)..Point::new(0, 10),
5574 diagnostic: Diagnostic {
5575 severity: lsp::DiagnosticSeverity::ERROR,
5576 message: "undefined variable 'A'".to_string(),
5577 group_id: 0,
5578 is_primary: true,
5579 ..Default::default()
5580 }
5581 }]
5582 )
5583 });
5584
5585 // Ensure publishing empty diagnostics twice only results in one update event.
5586 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5587 lsp::PublishDiagnosticsParams {
5588 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5589 version: None,
5590 diagnostics: Default::default(),
5591 },
5592 );
5593 assert_eq!(
5594 events.next().await.unwrap(),
5595 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5596 );
5597
5598 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5599 lsp::PublishDiagnosticsParams {
5600 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5601 version: None,
5602 diagnostics: Default::default(),
5603 },
5604 );
5605 cx.foreground().run_until_parked();
5606 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5607 }
5608
5609 #[gpui::test]
5610 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5611 cx.foreground().forbid_parking();
5612
5613 let progress_token = "the-progress-token";
5614 let mut language = Language::new(
5615 LanguageConfig {
5616 path_suffixes: vec!["rs".to_string()],
5617 ..Default::default()
5618 },
5619 None,
5620 );
5621 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5622 disk_based_diagnostics_sources: &["disk"],
5623 disk_based_diagnostics_progress_token: Some(progress_token),
5624 ..Default::default()
5625 });
5626
5627 let fs = FakeFs::new(cx.background());
5628 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5629
5630 let project = Project::test(fs, ["/dir"], cx).await;
5631 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5632
5633 let buffer = project
5634 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5635 .await
5636 .unwrap();
5637
5638 // Simulate diagnostics starting to update.
5639 let mut fake_server = fake_servers.next().await.unwrap();
5640 fake_server.start_progress(progress_token).await;
5641
5642 // Restart the server before the diagnostics finish updating.
5643 project.update(cx, |project, cx| {
5644 project.restart_language_servers_for_buffers([buffer], cx);
5645 });
5646 let mut events = subscribe(&project, cx);
5647
5648 // Simulate the newly started server sending more diagnostics.
5649 let mut fake_server = fake_servers.next().await.unwrap();
5650 fake_server.start_progress(progress_token).await;
5651 assert_eq!(
5652 events.next().await.unwrap(),
5653 Event::DiskBasedDiagnosticsStarted
5654 );
5655
5656 // All diagnostics are considered done, despite the old server's diagnostic
5657 // task never completing.
5658 fake_server.end_progress(progress_token).await;
5659 assert_eq!(
5660 events.next().await.unwrap(),
5661 Event::DiskBasedDiagnosticsUpdated
5662 );
5663 assert_eq!(
5664 events.next().await.unwrap(),
5665 Event::DiskBasedDiagnosticsFinished
5666 );
5667 project.read_with(cx, |project, _| {
5668 assert!(!project.is_running_disk_based_diagnostics());
5669 });
5670 }
5671
5672 #[gpui::test]
5673 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5674 cx.foreground().forbid_parking();
5675
5676 let mut language = Language::new(
5677 LanguageConfig {
5678 name: "Rust".into(),
5679 path_suffixes: vec!["rs".to_string()],
5680 ..Default::default()
5681 },
5682 Some(tree_sitter_rust::language()),
5683 );
5684 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5685 disk_based_diagnostics_sources: &["disk"],
5686 ..Default::default()
5687 });
5688
5689 let text = "
5690 fn a() { A }
5691 fn b() { BB }
5692 fn c() { CCC }
5693 "
5694 .unindent();
5695
5696 let fs = FakeFs::new(cx.background());
5697 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5698
5699 let project = Project::test(fs, ["/dir"], cx).await;
5700 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5701
5702 let buffer = project
5703 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5704 .await
5705 .unwrap();
5706
5707 let mut fake_server = fake_servers.next().await.unwrap();
5708 let open_notification = fake_server
5709 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5710 .await;
5711
5712 // Edit the buffer, moving the content down
5713 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
5714 let change_notification_1 = fake_server
5715 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5716 .await;
5717 assert!(
5718 change_notification_1.text_document.version > open_notification.text_document.version
5719 );
5720
5721 // Report some diagnostics for the initial version of the buffer
5722 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5723 lsp::PublishDiagnosticsParams {
5724 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5725 version: Some(open_notification.text_document.version),
5726 diagnostics: vec![
5727 lsp::Diagnostic {
5728 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5729 severity: Some(DiagnosticSeverity::ERROR),
5730 message: "undefined variable 'A'".to_string(),
5731 source: Some("disk".to_string()),
5732 ..Default::default()
5733 },
5734 lsp::Diagnostic {
5735 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5736 severity: Some(DiagnosticSeverity::ERROR),
5737 message: "undefined variable 'BB'".to_string(),
5738 source: Some("disk".to_string()),
5739 ..Default::default()
5740 },
5741 lsp::Diagnostic {
5742 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
5743 severity: Some(DiagnosticSeverity::ERROR),
5744 source: Some("disk".to_string()),
5745 message: "undefined variable 'CCC'".to_string(),
5746 ..Default::default()
5747 },
5748 ],
5749 },
5750 );
5751
5752 // The diagnostics have moved down since they were created.
5753 buffer.next_notification(cx).await;
5754 buffer.read_with(cx, |buffer, _| {
5755 assert_eq!(
5756 buffer
5757 .snapshot()
5758 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
5759 .collect::<Vec<_>>(),
5760 &[
5761 DiagnosticEntry {
5762 range: Point::new(3, 9)..Point::new(3, 11),
5763 diagnostic: Diagnostic {
5764 severity: DiagnosticSeverity::ERROR,
5765 message: "undefined variable 'BB'".to_string(),
5766 is_disk_based: true,
5767 group_id: 1,
5768 is_primary: true,
5769 ..Default::default()
5770 },
5771 },
5772 DiagnosticEntry {
5773 range: Point::new(4, 9)..Point::new(4, 12),
5774 diagnostic: Diagnostic {
5775 severity: DiagnosticSeverity::ERROR,
5776 message: "undefined variable 'CCC'".to_string(),
5777 is_disk_based: true,
5778 group_id: 2,
5779 is_primary: true,
5780 ..Default::default()
5781 }
5782 }
5783 ]
5784 );
5785 assert_eq!(
5786 chunks_with_diagnostics(buffer, 0..buffer.len()),
5787 [
5788 ("\n\nfn a() { ".to_string(), None),
5789 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5790 (" }\nfn b() { ".to_string(), None),
5791 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
5792 (" }\nfn c() { ".to_string(), None),
5793 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
5794 (" }\n".to_string(), None),
5795 ]
5796 );
5797 assert_eq!(
5798 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
5799 [
5800 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
5801 (" }\nfn c() { ".to_string(), None),
5802 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
5803 ]
5804 );
5805 });
5806
5807 // Ensure overlapping diagnostics are highlighted correctly.
5808 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5809 lsp::PublishDiagnosticsParams {
5810 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5811 version: Some(open_notification.text_document.version),
5812 diagnostics: vec![
5813 lsp::Diagnostic {
5814 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5815 severity: Some(DiagnosticSeverity::ERROR),
5816 message: "undefined variable 'A'".to_string(),
5817 source: Some("disk".to_string()),
5818 ..Default::default()
5819 },
5820 lsp::Diagnostic {
5821 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
5822 severity: Some(DiagnosticSeverity::WARNING),
5823 message: "unreachable statement".to_string(),
5824 source: Some("disk".to_string()),
5825 ..Default::default()
5826 },
5827 ],
5828 },
5829 );
5830
5831 buffer.next_notification(cx).await;
5832 buffer.read_with(cx, |buffer, _| {
5833 assert_eq!(
5834 buffer
5835 .snapshot()
5836 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
5837 .collect::<Vec<_>>(),
5838 &[
5839 DiagnosticEntry {
5840 range: Point::new(2, 9)..Point::new(2, 12),
5841 diagnostic: Diagnostic {
5842 severity: DiagnosticSeverity::WARNING,
5843 message: "unreachable statement".to_string(),
5844 is_disk_based: true,
5845 group_id: 1,
5846 is_primary: true,
5847 ..Default::default()
5848 }
5849 },
5850 DiagnosticEntry {
5851 range: Point::new(2, 9)..Point::new(2, 10),
5852 diagnostic: Diagnostic {
5853 severity: DiagnosticSeverity::ERROR,
5854 message: "undefined variable 'A'".to_string(),
5855 is_disk_based: true,
5856 group_id: 0,
5857 is_primary: true,
5858 ..Default::default()
5859 },
5860 }
5861 ]
5862 );
5863 assert_eq!(
5864 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
5865 [
5866 ("fn a() { ".to_string(), None),
5867 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
5868 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5869 ("\n".to_string(), None),
5870 ]
5871 );
5872 assert_eq!(
5873 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
5874 [
5875 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
5876 ("\n".to_string(), None),
5877 ]
5878 );
5879 });
5880
5881 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
5882 // changes since the last save.
5883 buffer.update(cx, |buffer, cx| {
5884 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
5885 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
5886 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
5887 });
5888 let change_notification_2 = fake_server
5889 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5890 .await;
5891 assert!(
5892 change_notification_2.text_document.version
5893 > change_notification_1.text_document.version
5894 );
5895
5896 // Handle out-of-order diagnostics
5897 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5898 lsp::PublishDiagnosticsParams {
5899 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5900 version: Some(change_notification_2.text_document.version),
5901 diagnostics: vec![
5902 lsp::Diagnostic {
5903 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
5904 severity: Some(DiagnosticSeverity::ERROR),
5905 message: "undefined variable 'BB'".to_string(),
5906 source: Some("disk".to_string()),
5907 ..Default::default()
5908 },
5909 lsp::Diagnostic {
5910 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5911 severity: Some(DiagnosticSeverity::WARNING),
5912 message: "undefined variable 'A'".to_string(),
5913 source: Some("disk".to_string()),
5914 ..Default::default()
5915 },
5916 ],
5917 },
5918 );
5919
5920 buffer.next_notification(cx).await;
5921 buffer.read_with(cx, |buffer, _| {
5922 assert_eq!(
5923 buffer
5924 .snapshot()
5925 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5926 .collect::<Vec<_>>(),
5927 &[
5928 DiagnosticEntry {
5929 range: Point::new(2, 21)..Point::new(2, 22),
5930 diagnostic: Diagnostic {
5931 severity: DiagnosticSeverity::WARNING,
5932 message: "undefined variable 'A'".to_string(),
5933 is_disk_based: true,
5934 group_id: 1,
5935 is_primary: true,
5936 ..Default::default()
5937 }
5938 },
5939 DiagnosticEntry {
5940 range: Point::new(3, 9)..Point::new(3, 14),
5941 diagnostic: Diagnostic {
5942 severity: DiagnosticSeverity::ERROR,
5943 message: "undefined variable 'BB'".to_string(),
5944 is_disk_based: true,
5945 group_id: 0,
5946 is_primary: true,
5947 ..Default::default()
5948 },
5949 }
5950 ]
5951 );
5952 });
5953 }
5954
5955 #[gpui::test]
5956 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
5957 cx.foreground().forbid_parking();
5958
5959 let text = concat!(
5960 "let one = ;\n", //
5961 "let two = \n",
5962 "let three = 3;\n",
5963 );
5964
5965 let fs = FakeFs::new(cx.background());
5966 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5967
5968 let project = Project::test(fs, ["/dir"], cx).await;
5969 let buffer = project
5970 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5971 .await
5972 .unwrap();
5973
5974 project.update(cx, |project, cx| {
5975 project
5976 .update_buffer_diagnostics(
5977 &buffer,
5978 vec![
5979 DiagnosticEntry {
5980 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
5981 diagnostic: Diagnostic {
5982 severity: DiagnosticSeverity::ERROR,
5983 message: "syntax error 1".to_string(),
5984 ..Default::default()
5985 },
5986 },
5987 DiagnosticEntry {
5988 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
5989 diagnostic: Diagnostic {
5990 severity: DiagnosticSeverity::ERROR,
5991 message: "syntax error 2".to_string(),
5992 ..Default::default()
5993 },
5994 },
5995 ],
5996 None,
5997 cx,
5998 )
5999 .unwrap();
6000 });
6001
6002 // An empty range is extended forward to include the following character.
6003 // At the end of a line, an empty range is extended backward to include
6004 // the preceding character.
6005 buffer.read_with(cx, |buffer, _| {
6006 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6007 assert_eq!(
6008 chunks
6009 .iter()
6010 .map(|(s, d)| (s.as_str(), *d))
6011 .collect::<Vec<_>>(),
6012 &[
6013 ("let one = ", None),
6014 (";", Some(DiagnosticSeverity::ERROR)),
6015 ("\nlet two =", None),
6016 (" ", Some(DiagnosticSeverity::ERROR)),
6017 ("\nlet three = 3;\n", None)
6018 ]
6019 );
6020 });
6021 }
6022
6023 #[gpui::test]
6024 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6025 cx.foreground().forbid_parking();
6026
6027 let mut language = Language::new(
6028 LanguageConfig {
6029 name: "Rust".into(),
6030 path_suffixes: vec!["rs".to_string()],
6031 ..Default::default()
6032 },
6033 Some(tree_sitter_rust::language()),
6034 );
6035 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6036
6037 let text = "
6038 fn a() {
6039 f1();
6040 }
6041 fn b() {
6042 f2();
6043 }
6044 fn c() {
6045 f3();
6046 }
6047 "
6048 .unindent();
6049
6050 let fs = FakeFs::new(cx.background());
6051 fs.insert_tree(
6052 "/dir",
6053 json!({
6054 "a.rs": text.clone(),
6055 }),
6056 )
6057 .await;
6058
6059 let project = Project::test(fs, ["/dir"], cx).await;
6060 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6061 let buffer = project
6062 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6063 .await
6064 .unwrap();
6065
6066 let mut fake_server = fake_servers.next().await.unwrap();
6067 let lsp_document_version = fake_server
6068 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6069 .await
6070 .text_document
6071 .version;
6072
6073 // Simulate editing the buffer after the language server computes some edits.
6074 buffer.update(cx, |buffer, cx| {
6075 buffer.edit(
6076 [(
6077 Point::new(0, 0)..Point::new(0, 0),
6078 "// above first function\n",
6079 )],
6080 cx,
6081 );
6082 buffer.edit(
6083 [(
6084 Point::new(2, 0)..Point::new(2, 0),
6085 " // inside first function\n",
6086 )],
6087 cx,
6088 );
6089 buffer.edit(
6090 [(
6091 Point::new(6, 4)..Point::new(6, 4),
6092 "// inside second function ",
6093 )],
6094 cx,
6095 );
6096
6097 assert_eq!(
6098 buffer.text(),
6099 "
6100 // above first function
6101 fn a() {
6102 // inside first function
6103 f1();
6104 }
6105 fn b() {
6106 // inside second function f2();
6107 }
6108 fn c() {
6109 f3();
6110 }
6111 "
6112 .unindent()
6113 );
6114 });
6115
6116 let edits = project
6117 .update(cx, |project, cx| {
6118 project.edits_from_lsp(
6119 &buffer,
6120 vec![
6121 // replace body of first function
6122 lsp::TextEdit {
6123 range: lsp::Range::new(
6124 lsp::Position::new(0, 0),
6125 lsp::Position::new(3, 0),
6126 ),
6127 new_text: "
6128 fn a() {
6129 f10();
6130 }
6131 "
6132 .unindent(),
6133 },
6134 // edit inside second function
6135 lsp::TextEdit {
6136 range: lsp::Range::new(
6137 lsp::Position::new(4, 6),
6138 lsp::Position::new(4, 6),
6139 ),
6140 new_text: "00".into(),
6141 },
6142 // edit inside third function via two distinct edits
6143 lsp::TextEdit {
6144 range: lsp::Range::new(
6145 lsp::Position::new(7, 5),
6146 lsp::Position::new(7, 5),
6147 ),
6148 new_text: "4000".into(),
6149 },
6150 lsp::TextEdit {
6151 range: lsp::Range::new(
6152 lsp::Position::new(7, 5),
6153 lsp::Position::new(7, 6),
6154 ),
6155 new_text: "".into(),
6156 },
6157 ],
6158 Some(lsp_document_version),
6159 cx,
6160 )
6161 })
6162 .await
6163 .unwrap();
6164
6165 buffer.update(cx, |buffer, cx| {
6166 for (range, new_text) in edits {
6167 buffer.edit([(range, new_text)], cx);
6168 }
6169 assert_eq!(
6170 buffer.text(),
6171 "
6172 // above first function
6173 fn a() {
6174 // inside first function
6175 f10();
6176 }
6177 fn b() {
6178 // inside second function f200();
6179 }
6180 fn c() {
6181 f4000();
6182 }
6183 "
6184 .unindent()
6185 );
6186 });
6187 }
6188
6189 #[gpui::test]
6190 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6191 cx.foreground().forbid_parking();
6192
6193 let text = "
6194 use a::b;
6195 use a::c;
6196
6197 fn f() {
6198 b();
6199 c();
6200 }
6201 "
6202 .unindent();
6203
6204 let fs = FakeFs::new(cx.background());
6205 fs.insert_tree(
6206 "/dir",
6207 json!({
6208 "a.rs": text.clone(),
6209 }),
6210 )
6211 .await;
6212
6213 let project = Project::test(fs, ["/dir"], cx).await;
6214 let buffer = project
6215 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6216 .await
6217 .unwrap();
6218
6219 // Simulate the language server sending us a small edit in the form of a very large diff.
6220 // Rust-analyzer does this when performing a merge-imports code action.
6221 let edits = project
6222 .update(cx, |project, cx| {
6223 project.edits_from_lsp(
6224 &buffer,
6225 [
6226 // Replace the first use statement without editing the semicolon.
6227 lsp::TextEdit {
6228 range: lsp::Range::new(
6229 lsp::Position::new(0, 4),
6230 lsp::Position::new(0, 8),
6231 ),
6232 new_text: "a::{b, c}".into(),
6233 },
6234 // Reinsert the remainder of the file between the semicolon and the final
6235 // newline of the file.
6236 lsp::TextEdit {
6237 range: lsp::Range::new(
6238 lsp::Position::new(0, 9),
6239 lsp::Position::new(0, 9),
6240 ),
6241 new_text: "\n\n".into(),
6242 },
6243 lsp::TextEdit {
6244 range: lsp::Range::new(
6245 lsp::Position::new(0, 9),
6246 lsp::Position::new(0, 9),
6247 ),
6248 new_text: "
6249 fn f() {
6250 b();
6251 c();
6252 }"
6253 .unindent(),
6254 },
6255 // Delete everything after the first newline of the file.
6256 lsp::TextEdit {
6257 range: lsp::Range::new(
6258 lsp::Position::new(1, 0),
6259 lsp::Position::new(7, 0),
6260 ),
6261 new_text: "".into(),
6262 },
6263 ],
6264 None,
6265 cx,
6266 )
6267 })
6268 .await
6269 .unwrap();
6270
6271 buffer.update(cx, |buffer, cx| {
6272 let edits = edits
6273 .into_iter()
6274 .map(|(range, text)| {
6275 (
6276 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6277 text,
6278 )
6279 })
6280 .collect::<Vec<_>>();
6281
6282 assert_eq!(
6283 edits,
6284 [
6285 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6286 (Point::new(1, 0)..Point::new(2, 0), "".into())
6287 ]
6288 );
6289
6290 for (range, new_text) in edits {
6291 buffer.edit([(range, new_text)], cx);
6292 }
6293 assert_eq!(
6294 buffer.text(),
6295 "
6296 use a::{b, c};
6297
6298 fn f() {
6299 b();
6300 c();
6301 }
6302 "
6303 .unindent()
6304 );
6305 });
6306 }
6307
6308 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6309 buffer: &Buffer,
6310 range: Range<T>,
6311 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6312 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6313 for chunk in buffer.snapshot().chunks(range, true) {
6314 if chunks.last().map_or(false, |prev_chunk| {
6315 prev_chunk.1 == chunk.diagnostic_severity
6316 }) {
6317 chunks.last_mut().unwrap().0.push_str(chunk.text);
6318 } else {
6319 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6320 }
6321 }
6322 chunks
6323 }
6324
6325 #[gpui::test]
6326 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6327 let dir = temp_tree(json!({
6328 "root": {
6329 "dir1": {},
6330 "dir2": {
6331 "dir3": {}
6332 }
6333 }
6334 }));
6335
6336 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6337 let cancel_flag = Default::default();
6338 let results = project
6339 .read_with(cx, |project, cx| {
6340 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6341 })
6342 .await;
6343
6344 assert!(results.is_empty());
6345 }
6346
6347 #[gpui::test]
6348 async fn test_definition(cx: &mut gpui::TestAppContext) {
6349 let mut language = Language::new(
6350 LanguageConfig {
6351 name: "Rust".into(),
6352 path_suffixes: vec!["rs".to_string()],
6353 ..Default::default()
6354 },
6355 Some(tree_sitter_rust::language()),
6356 );
6357 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6358
6359 let fs = FakeFs::new(cx.background());
6360 fs.insert_tree(
6361 "/dir",
6362 json!({
6363 "a.rs": "const fn a() { A }",
6364 "b.rs": "const y: i32 = crate::a()",
6365 }),
6366 )
6367 .await;
6368
6369 let project = Project::test(fs, ["/dir/b.rs"], cx).await;
6370 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6371
6372 let buffer = project
6373 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6374 .await
6375 .unwrap();
6376
6377 let fake_server = fake_servers.next().await.unwrap();
6378 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6379 let params = params.text_document_position_params;
6380 assert_eq!(
6381 params.text_document.uri.to_file_path().unwrap(),
6382 Path::new("/dir/b.rs"),
6383 );
6384 assert_eq!(params.position, lsp::Position::new(0, 22));
6385
6386 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6387 lsp::Location::new(
6388 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6389 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6390 ),
6391 )))
6392 });
6393
6394 let mut definitions = project
6395 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6396 .await
6397 .unwrap();
6398
6399 assert_eq!(definitions.len(), 1);
6400 let definition = definitions.pop().unwrap();
6401 cx.update(|cx| {
6402 let target_buffer = definition.buffer.read(cx);
6403 assert_eq!(
6404 target_buffer
6405 .file()
6406 .unwrap()
6407 .as_local()
6408 .unwrap()
6409 .abs_path(cx),
6410 Path::new("/dir/a.rs"),
6411 );
6412 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6413 assert_eq!(
6414 list_worktrees(&project, cx),
6415 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6416 );
6417
6418 drop(definition);
6419 });
6420 cx.read(|cx| {
6421 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6422 });
6423
6424 fn list_worktrees<'a>(
6425 project: &'a ModelHandle<Project>,
6426 cx: &'a AppContext,
6427 ) -> Vec<(&'a Path, bool)> {
6428 project
6429 .read(cx)
6430 .worktrees(cx)
6431 .map(|worktree| {
6432 let worktree = worktree.read(cx);
6433 (
6434 worktree.as_local().unwrap().abs_path().as_ref(),
6435 worktree.is_visible(),
6436 )
6437 })
6438 .collect::<Vec<_>>()
6439 }
6440 }
6441
6442 #[gpui::test]
6443 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6444 let mut language = Language::new(
6445 LanguageConfig {
6446 name: "TypeScript".into(),
6447 path_suffixes: vec!["ts".to_string()],
6448 ..Default::default()
6449 },
6450 Some(tree_sitter_typescript::language_typescript()),
6451 );
6452 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6453
6454 let fs = FakeFs::new(cx.background());
6455 fs.insert_tree(
6456 "/dir",
6457 json!({
6458 "a.ts": "",
6459 }),
6460 )
6461 .await;
6462
6463 let project = Project::test(fs, ["/dir"], cx).await;
6464 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6465 let buffer = project
6466 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6467 .await
6468 .unwrap();
6469
6470 let fake_server = fake_language_servers.next().await.unwrap();
6471
6472 let text = "let a = b.fqn";
6473 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6474 let completions = project.update(cx, |project, cx| {
6475 project.completions(&buffer, text.len(), cx)
6476 });
6477
6478 fake_server
6479 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6480 Ok(Some(lsp::CompletionResponse::Array(vec![
6481 lsp::CompletionItem {
6482 label: "fullyQualifiedName?".into(),
6483 insert_text: Some("fullyQualifiedName".into()),
6484 ..Default::default()
6485 },
6486 ])))
6487 })
6488 .next()
6489 .await;
6490 let completions = completions.await.unwrap();
6491 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6492 assert_eq!(completions.len(), 1);
6493 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6494 assert_eq!(
6495 completions[0].old_range.to_offset(&snapshot),
6496 text.len() - 3..text.len()
6497 );
6498 }
6499
6500 #[gpui::test(iterations = 10)]
6501 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6502 let mut language = Language::new(
6503 LanguageConfig {
6504 name: "TypeScript".into(),
6505 path_suffixes: vec!["ts".to_string()],
6506 ..Default::default()
6507 },
6508 None,
6509 );
6510 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6511
6512 let fs = FakeFs::new(cx.background());
6513 fs.insert_tree(
6514 "/dir",
6515 json!({
6516 "a.ts": "a",
6517 }),
6518 )
6519 .await;
6520
6521 let project = Project::test(fs, ["/dir"], cx).await;
6522 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6523 let buffer = project
6524 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6525 .await
6526 .unwrap();
6527
6528 let fake_server = fake_language_servers.next().await.unwrap();
6529
6530 // Language server returns code actions that contain commands, and not edits.
6531 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6532 fake_server
6533 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6534 Ok(Some(vec![
6535 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6536 title: "The code action".into(),
6537 command: Some(lsp::Command {
6538 title: "The command".into(),
6539 command: "_the/command".into(),
6540 arguments: Some(vec![json!("the-argument")]),
6541 }),
6542 ..Default::default()
6543 }),
6544 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6545 title: "two".into(),
6546 ..Default::default()
6547 }),
6548 ]))
6549 })
6550 .next()
6551 .await;
6552
6553 let action = actions.await.unwrap()[0].clone();
6554 let apply = project.update(cx, |project, cx| {
6555 project.apply_code_action(buffer.clone(), action, true, cx)
6556 });
6557
6558 // Resolving the code action does not populate its edits. In absence of
6559 // edits, we must execute the given command.
6560 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6561 |action, _| async move { Ok(action) },
6562 );
6563
6564 // While executing the command, the language server sends the editor
6565 // a `workspaceEdit` request.
6566 fake_server
6567 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6568 let fake = fake_server.clone();
6569 move |params, _| {
6570 assert_eq!(params.command, "_the/command");
6571 let fake = fake.clone();
6572 async move {
6573 fake.server
6574 .request::<lsp::request::ApplyWorkspaceEdit>(
6575 lsp::ApplyWorkspaceEditParams {
6576 label: None,
6577 edit: lsp::WorkspaceEdit {
6578 changes: Some(
6579 [(
6580 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6581 vec![lsp::TextEdit {
6582 range: lsp::Range::new(
6583 lsp::Position::new(0, 0),
6584 lsp::Position::new(0, 0),
6585 ),
6586 new_text: "X".into(),
6587 }],
6588 )]
6589 .into_iter()
6590 .collect(),
6591 ),
6592 ..Default::default()
6593 },
6594 },
6595 )
6596 .await
6597 .unwrap();
6598 Ok(Some(json!(null)))
6599 }
6600 }
6601 })
6602 .next()
6603 .await;
6604
6605 // Applying the code action returns a project transaction containing the edits
6606 // sent by the language server in its `workspaceEdit` request.
6607 let transaction = apply.await.unwrap();
6608 assert!(transaction.0.contains_key(&buffer));
6609 buffer.update(cx, |buffer, cx| {
6610 assert_eq!(buffer.text(), "Xa");
6611 buffer.undo(cx);
6612 assert_eq!(buffer.text(), "a");
6613 });
6614 }
6615
6616 #[gpui::test]
6617 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6618 let fs = FakeFs::new(cx.background());
6619 fs.insert_tree(
6620 "/dir",
6621 json!({
6622 "file1": "the old contents",
6623 }),
6624 )
6625 .await;
6626
6627 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6628 let buffer = project
6629 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6630 .await
6631 .unwrap();
6632 buffer
6633 .update(cx, |buffer, cx| {
6634 assert_eq!(buffer.text(), "the old contents");
6635 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6636 buffer.save(cx)
6637 })
6638 .await
6639 .unwrap();
6640
6641 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6642 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6643 }
6644
6645 #[gpui::test]
6646 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6647 let fs = FakeFs::new(cx.background());
6648 fs.insert_tree(
6649 "/dir",
6650 json!({
6651 "file1": "the old contents",
6652 }),
6653 )
6654 .await;
6655
6656 let project = Project::test(fs.clone(), ["/dir/file1"], cx).await;
6657 let buffer = project
6658 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6659 .await
6660 .unwrap();
6661 buffer
6662 .update(cx, |buffer, cx| {
6663 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6664 buffer.save(cx)
6665 })
6666 .await
6667 .unwrap();
6668
6669 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6670 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6671 }
6672
6673 #[gpui::test]
6674 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6675 let fs = FakeFs::new(cx.background());
6676 fs.insert_tree("/dir", json!({})).await;
6677
6678 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6679 let buffer = project.update(cx, |project, cx| {
6680 project.create_buffer("", None, cx).unwrap()
6681 });
6682 buffer.update(cx, |buffer, cx| {
6683 buffer.edit([(0..0, "abc")], cx);
6684 assert!(buffer.is_dirty());
6685 assert!(!buffer.has_conflict());
6686 });
6687 project
6688 .update(cx, |project, cx| {
6689 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6690 })
6691 .await
6692 .unwrap();
6693 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6694 buffer.read_with(cx, |buffer, cx| {
6695 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6696 assert!(!buffer.is_dirty());
6697 assert!(!buffer.has_conflict());
6698 });
6699
6700 let opened_buffer = project
6701 .update(cx, |project, cx| {
6702 project.open_local_buffer("/dir/file1", cx)
6703 })
6704 .await
6705 .unwrap();
6706 assert_eq!(opened_buffer, buffer);
6707 }
6708
6709 #[gpui::test(retries = 5)]
6710 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6711 let dir = temp_tree(json!({
6712 "a": {
6713 "file1": "",
6714 "file2": "",
6715 "file3": "",
6716 },
6717 "b": {
6718 "c": {
6719 "file4": "",
6720 "file5": "",
6721 }
6722 }
6723 }));
6724
6725 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6726 let rpc = project.read_with(cx, |p, _| p.client.clone());
6727
6728 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
6729 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
6730 async move { buffer.await.unwrap() }
6731 };
6732 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
6733 project.read_with(cx, |project, cx| {
6734 let tree = project.worktrees(cx).next().unwrap();
6735 tree.read(cx)
6736 .entry_for_path(path)
6737 .expect(&format!("no entry for path {}", path))
6738 .id
6739 })
6740 };
6741
6742 let buffer2 = buffer_for_path("a/file2", cx).await;
6743 let buffer3 = buffer_for_path("a/file3", cx).await;
6744 let buffer4 = buffer_for_path("b/c/file4", cx).await;
6745 let buffer5 = buffer_for_path("b/c/file5", cx).await;
6746
6747 let file2_id = id_for_path("a/file2", &cx);
6748 let file3_id = id_for_path("a/file3", &cx);
6749 let file4_id = id_for_path("b/c/file4", &cx);
6750
6751 // Create a remote copy of this worktree.
6752 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
6753 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
6754 let (remote, load_task) = cx.update(|cx| {
6755 Worktree::remote(
6756 1,
6757 1,
6758 initial_snapshot.to_proto(&Default::default(), true),
6759 rpc.clone(),
6760 cx,
6761 )
6762 });
6763 // tree
6764 load_task.await;
6765
6766 cx.read(|cx| {
6767 assert!(!buffer2.read(cx).is_dirty());
6768 assert!(!buffer3.read(cx).is_dirty());
6769 assert!(!buffer4.read(cx).is_dirty());
6770 assert!(!buffer5.read(cx).is_dirty());
6771 });
6772
6773 // Rename and delete files and directories.
6774 tree.flush_fs_events(&cx).await;
6775 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
6776 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
6777 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
6778 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
6779 tree.flush_fs_events(&cx).await;
6780
6781 let expected_paths = vec![
6782 "a",
6783 "a/file1",
6784 "a/file2.new",
6785 "b",
6786 "d",
6787 "d/file3",
6788 "d/file4",
6789 ];
6790
6791 cx.read(|app| {
6792 assert_eq!(
6793 tree.read(app)
6794 .paths()
6795 .map(|p| p.to_str().unwrap())
6796 .collect::<Vec<_>>(),
6797 expected_paths
6798 );
6799
6800 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
6801 assert_eq!(id_for_path("d/file3", &cx), file3_id);
6802 assert_eq!(id_for_path("d/file4", &cx), file4_id);
6803
6804 assert_eq!(
6805 buffer2.read(app).file().unwrap().path().as_ref(),
6806 Path::new("a/file2.new")
6807 );
6808 assert_eq!(
6809 buffer3.read(app).file().unwrap().path().as_ref(),
6810 Path::new("d/file3")
6811 );
6812 assert_eq!(
6813 buffer4.read(app).file().unwrap().path().as_ref(),
6814 Path::new("d/file4")
6815 );
6816 assert_eq!(
6817 buffer5.read(app).file().unwrap().path().as_ref(),
6818 Path::new("b/c/file5")
6819 );
6820
6821 assert!(!buffer2.read(app).file().unwrap().is_deleted());
6822 assert!(!buffer3.read(app).file().unwrap().is_deleted());
6823 assert!(!buffer4.read(app).file().unwrap().is_deleted());
6824 assert!(buffer5.read(app).file().unwrap().is_deleted());
6825 });
6826
6827 // Update the remote worktree. Check that it becomes consistent with the
6828 // local worktree.
6829 remote.update(cx, |remote, cx| {
6830 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
6831 &initial_snapshot,
6832 1,
6833 1,
6834 true,
6835 );
6836 remote
6837 .as_remote_mut()
6838 .unwrap()
6839 .snapshot
6840 .apply_remote_update(update_message)
6841 .unwrap();
6842
6843 assert_eq!(
6844 remote
6845 .paths()
6846 .map(|p| p.to_str().unwrap())
6847 .collect::<Vec<_>>(),
6848 expected_paths
6849 );
6850 });
6851 }
6852
6853 #[gpui::test]
6854 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
6855 let fs = FakeFs::new(cx.background());
6856 fs.insert_tree(
6857 "/dir",
6858 json!({
6859 "a.txt": "a-contents",
6860 "b.txt": "b-contents",
6861 }),
6862 )
6863 .await;
6864
6865 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6866
6867 // Spawn multiple tasks to open paths, repeating some paths.
6868 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6869 (
6870 p.open_local_buffer("/dir/a.txt", cx),
6871 p.open_local_buffer("/dir/b.txt", cx),
6872 p.open_local_buffer("/dir/a.txt", cx),
6873 )
6874 });
6875
6876 let buffer_a_1 = buffer_a_1.await.unwrap();
6877 let buffer_a_2 = buffer_a_2.await.unwrap();
6878 let buffer_b = buffer_b.await.unwrap();
6879 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
6880 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
6881
6882 // There is only one buffer per path.
6883 let buffer_a_id = buffer_a_1.id();
6884 assert_eq!(buffer_a_2.id(), buffer_a_id);
6885
6886 // Open the same path again while it is still open.
6887 drop(buffer_a_1);
6888 let buffer_a_3 = project
6889 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
6890 .await
6891 .unwrap();
6892
6893 // There's still only one buffer per path.
6894 assert_eq!(buffer_a_3.id(), buffer_a_id);
6895 }
6896
6897 #[gpui::test]
6898 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6899 let fs = FakeFs::new(cx.background());
6900 fs.insert_tree(
6901 "/dir",
6902 json!({
6903 "file1": "abc",
6904 "file2": "def",
6905 "file3": "ghi",
6906 }),
6907 )
6908 .await;
6909
6910 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6911
6912 let buffer1 = project
6913 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6914 .await
6915 .unwrap();
6916 let events = Rc::new(RefCell::new(Vec::new()));
6917
6918 // initially, the buffer isn't dirty.
6919 buffer1.update(cx, |buffer, cx| {
6920 cx.subscribe(&buffer1, {
6921 let events = events.clone();
6922 move |_, _, event, _| match event {
6923 BufferEvent::Operation(_) => {}
6924 _ => events.borrow_mut().push(event.clone()),
6925 }
6926 })
6927 .detach();
6928
6929 assert!(!buffer.is_dirty());
6930 assert!(events.borrow().is_empty());
6931
6932 buffer.edit([(1..2, "")], cx);
6933 });
6934
6935 // after the first edit, the buffer is dirty, and emits a dirtied event.
6936 buffer1.update(cx, |buffer, cx| {
6937 assert!(buffer.text() == "ac");
6938 assert!(buffer.is_dirty());
6939 assert_eq!(
6940 *events.borrow(),
6941 &[language::Event::Edited, language::Event::Dirtied]
6942 );
6943 events.borrow_mut().clear();
6944 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
6945 });
6946
6947 // after saving, the buffer is not dirty, and emits a saved event.
6948 buffer1.update(cx, |buffer, cx| {
6949 assert!(!buffer.is_dirty());
6950 assert_eq!(*events.borrow(), &[language::Event::Saved]);
6951 events.borrow_mut().clear();
6952
6953 buffer.edit([(1..1, "B")], cx);
6954 buffer.edit([(2..2, "D")], cx);
6955 });
6956
6957 // after editing again, the buffer is dirty, and emits another dirty event.
6958 buffer1.update(cx, |buffer, cx| {
6959 assert!(buffer.text() == "aBDc");
6960 assert!(buffer.is_dirty());
6961 assert_eq!(
6962 *events.borrow(),
6963 &[
6964 language::Event::Edited,
6965 language::Event::Dirtied,
6966 language::Event::Edited,
6967 ],
6968 );
6969 events.borrow_mut().clear();
6970
6971 // TODO - currently, after restoring the buffer to its
6972 // previously-saved state, the is still considered dirty.
6973 buffer.edit([(1..3, "")], cx);
6974 assert!(buffer.text() == "ac");
6975 assert!(buffer.is_dirty());
6976 });
6977
6978 assert_eq!(*events.borrow(), &[language::Event::Edited]);
6979
6980 // When a file is deleted, the buffer is considered dirty.
6981 let events = Rc::new(RefCell::new(Vec::new()));
6982 let buffer2 = project
6983 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
6984 .await
6985 .unwrap();
6986 buffer2.update(cx, |_, cx| {
6987 cx.subscribe(&buffer2, {
6988 let events = events.clone();
6989 move |_, _, event, _| events.borrow_mut().push(event.clone())
6990 })
6991 .detach();
6992 });
6993
6994 fs.remove_file("/dir/file2".as_ref(), Default::default())
6995 .await
6996 .unwrap();
6997 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
6998 assert_eq!(
6999 *events.borrow(),
7000 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7001 );
7002
7003 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7004 let events = Rc::new(RefCell::new(Vec::new()));
7005 let buffer3 = project
7006 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7007 .await
7008 .unwrap();
7009 buffer3.update(cx, |_, cx| {
7010 cx.subscribe(&buffer3, {
7011 let events = events.clone();
7012 move |_, _, event, _| events.borrow_mut().push(event.clone())
7013 })
7014 .detach();
7015 });
7016
7017 buffer3.update(cx, |buffer, cx| {
7018 buffer.edit([(0..0, "x")], cx);
7019 });
7020 events.borrow_mut().clear();
7021 fs.remove_file("/dir/file3".as_ref(), Default::default())
7022 .await
7023 .unwrap();
7024 buffer3
7025 .condition(&cx, |_, _| !events.borrow().is_empty())
7026 .await;
7027 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7028 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7029 }
7030
7031 #[gpui::test]
7032 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7033 let initial_contents = "aaa\nbbbbb\nc\n";
7034 let fs = FakeFs::new(cx.background());
7035 fs.insert_tree(
7036 "/dir",
7037 json!({
7038 "the-file": initial_contents,
7039 }),
7040 )
7041 .await;
7042 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7043 let buffer = project
7044 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7045 .await
7046 .unwrap();
7047
7048 let anchors = (0..3)
7049 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7050 .collect::<Vec<_>>();
7051
7052 // Change the file on disk, adding two new lines of text, and removing
7053 // one line.
7054 buffer.read_with(cx, |buffer, _| {
7055 assert!(!buffer.is_dirty());
7056 assert!(!buffer.has_conflict());
7057 });
7058 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7059 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7060 .await
7061 .unwrap();
7062
7063 // Because the buffer was not modified, it is reloaded from disk. Its
7064 // contents are edited according to the diff between the old and new
7065 // file contents.
7066 buffer
7067 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7068 .await;
7069
7070 buffer.update(cx, |buffer, _| {
7071 assert_eq!(buffer.text(), new_contents);
7072 assert!(!buffer.is_dirty());
7073 assert!(!buffer.has_conflict());
7074
7075 let anchor_positions = anchors
7076 .iter()
7077 .map(|anchor| anchor.to_point(&*buffer))
7078 .collect::<Vec<_>>();
7079 assert_eq!(
7080 anchor_positions,
7081 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7082 );
7083 });
7084
7085 // Modify the buffer
7086 buffer.update(cx, |buffer, cx| {
7087 buffer.edit([(0..0, " ")], cx);
7088 assert!(buffer.is_dirty());
7089 assert!(!buffer.has_conflict());
7090 });
7091
7092 // Change the file on disk again, adding blank lines to the beginning.
7093 fs.save(
7094 "/dir/the-file".as_ref(),
7095 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7096 )
7097 .await
7098 .unwrap();
7099
7100 // Because the buffer is modified, it doesn't reload from disk, but is
7101 // marked as having a conflict.
7102 buffer
7103 .condition(&cx, |buffer, _| buffer.has_conflict())
7104 .await;
7105 }
7106
7107 #[gpui::test]
7108 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7109 cx.foreground().forbid_parking();
7110
7111 let fs = FakeFs::new(cx.background());
7112 fs.insert_tree(
7113 "/the-dir",
7114 json!({
7115 "a.rs": "
7116 fn foo(mut v: Vec<usize>) {
7117 for x in &v {
7118 v.push(1);
7119 }
7120 }
7121 "
7122 .unindent(),
7123 }),
7124 )
7125 .await;
7126
7127 let project = Project::test(fs.clone(), ["/the-dir"], cx).await;
7128 let buffer = project
7129 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7130 .await
7131 .unwrap();
7132
7133 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7134 let message = lsp::PublishDiagnosticsParams {
7135 uri: buffer_uri.clone(),
7136 diagnostics: vec![
7137 lsp::Diagnostic {
7138 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7139 severity: Some(DiagnosticSeverity::WARNING),
7140 message: "error 1".to_string(),
7141 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7142 location: lsp::Location {
7143 uri: buffer_uri.clone(),
7144 range: lsp::Range::new(
7145 lsp::Position::new(1, 8),
7146 lsp::Position::new(1, 9),
7147 ),
7148 },
7149 message: "error 1 hint 1".to_string(),
7150 }]),
7151 ..Default::default()
7152 },
7153 lsp::Diagnostic {
7154 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7155 severity: Some(DiagnosticSeverity::HINT),
7156 message: "error 1 hint 1".to_string(),
7157 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7158 location: lsp::Location {
7159 uri: buffer_uri.clone(),
7160 range: lsp::Range::new(
7161 lsp::Position::new(1, 8),
7162 lsp::Position::new(1, 9),
7163 ),
7164 },
7165 message: "original diagnostic".to_string(),
7166 }]),
7167 ..Default::default()
7168 },
7169 lsp::Diagnostic {
7170 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7171 severity: Some(DiagnosticSeverity::ERROR),
7172 message: "error 2".to_string(),
7173 related_information: Some(vec![
7174 lsp::DiagnosticRelatedInformation {
7175 location: lsp::Location {
7176 uri: buffer_uri.clone(),
7177 range: lsp::Range::new(
7178 lsp::Position::new(1, 13),
7179 lsp::Position::new(1, 15),
7180 ),
7181 },
7182 message: "error 2 hint 1".to_string(),
7183 },
7184 lsp::DiagnosticRelatedInformation {
7185 location: lsp::Location {
7186 uri: buffer_uri.clone(),
7187 range: lsp::Range::new(
7188 lsp::Position::new(1, 13),
7189 lsp::Position::new(1, 15),
7190 ),
7191 },
7192 message: "error 2 hint 2".to_string(),
7193 },
7194 ]),
7195 ..Default::default()
7196 },
7197 lsp::Diagnostic {
7198 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7199 severity: Some(DiagnosticSeverity::HINT),
7200 message: "error 2 hint 1".to_string(),
7201 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7202 location: lsp::Location {
7203 uri: buffer_uri.clone(),
7204 range: lsp::Range::new(
7205 lsp::Position::new(2, 8),
7206 lsp::Position::new(2, 17),
7207 ),
7208 },
7209 message: "original diagnostic".to_string(),
7210 }]),
7211 ..Default::default()
7212 },
7213 lsp::Diagnostic {
7214 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7215 severity: Some(DiagnosticSeverity::HINT),
7216 message: "error 2 hint 2".to_string(),
7217 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7218 location: lsp::Location {
7219 uri: buffer_uri.clone(),
7220 range: lsp::Range::new(
7221 lsp::Position::new(2, 8),
7222 lsp::Position::new(2, 17),
7223 ),
7224 },
7225 message: "original diagnostic".to_string(),
7226 }]),
7227 ..Default::default()
7228 },
7229 ],
7230 version: None,
7231 };
7232
7233 project
7234 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7235 .unwrap();
7236 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7237
7238 assert_eq!(
7239 buffer
7240 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7241 .collect::<Vec<_>>(),
7242 &[
7243 DiagnosticEntry {
7244 range: Point::new(1, 8)..Point::new(1, 9),
7245 diagnostic: Diagnostic {
7246 severity: DiagnosticSeverity::WARNING,
7247 message: "error 1".to_string(),
7248 group_id: 0,
7249 is_primary: true,
7250 ..Default::default()
7251 }
7252 },
7253 DiagnosticEntry {
7254 range: Point::new(1, 8)..Point::new(1, 9),
7255 diagnostic: Diagnostic {
7256 severity: DiagnosticSeverity::HINT,
7257 message: "error 1 hint 1".to_string(),
7258 group_id: 0,
7259 is_primary: false,
7260 ..Default::default()
7261 }
7262 },
7263 DiagnosticEntry {
7264 range: Point::new(1, 13)..Point::new(1, 15),
7265 diagnostic: Diagnostic {
7266 severity: DiagnosticSeverity::HINT,
7267 message: "error 2 hint 1".to_string(),
7268 group_id: 1,
7269 is_primary: false,
7270 ..Default::default()
7271 }
7272 },
7273 DiagnosticEntry {
7274 range: Point::new(1, 13)..Point::new(1, 15),
7275 diagnostic: Diagnostic {
7276 severity: DiagnosticSeverity::HINT,
7277 message: "error 2 hint 2".to_string(),
7278 group_id: 1,
7279 is_primary: false,
7280 ..Default::default()
7281 }
7282 },
7283 DiagnosticEntry {
7284 range: Point::new(2, 8)..Point::new(2, 17),
7285 diagnostic: Diagnostic {
7286 severity: DiagnosticSeverity::ERROR,
7287 message: "error 2".to_string(),
7288 group_id: 1,
7289 is_primary: true,
7290 ..Default::default()
7291 }
7292 }
7293 ]
7294 );
7295
7296 assert_eq!(
7297 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7298 &[
7299 DiagnosticEntry {
7300 range: Point::new(1, 8)..Point::new(1, 9),
7301 diagnostic: Diagnostic {
7302 severity: DiagnosticSeverity::WARNING,
7303 message: "error 1".to_string(),
7304 group_id: 0,
7305 is_primary: true,
7306 ..Default::default()
7307 }
7308 },
7309 DiagnosticEntry {
7310 range: Point::new(1, 8)..Point::new(1, 9),
7311 diagnostic: Diagnostic {
7312 severity: DiagnosticSeverity::HINT,
7313 message: "error 1 hint 1".to_string(),
7314 group_id: 0,
7315 is_primary: false,
7316 ..Default::default()
7317 }
7318 },
7319 ]
7320 );
7321 assert_eq!(
7322 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7323 &[
7324 DiagnosticEntry {
7325 range: Point::new(1, 13)..Point::new(1, 15),
7326 diagnostic: Diagnostic {
7327 severity: DiagnosticSeverity::HINT,
7328 message: "error 2 hint 1".to_string(),
7329 group_id: 1,
7330 is_primary: false,
7331 ..Default::default()
7332 }
7333 },
7334 DiagnosticEntry {
7335 range: Point::new(1, 13)..Point::new(1, 15),
7336 diagnostic: Diagnostic {
7337 severity: DiagnosticSeverity::HINT,
7338 message: "error 2 hint 2".to_string(),
7339 group_id: 1,
7340 is_primary: false,
7341 ..Default::default()
7342 }
7343 },
7344 DiagnosticEntry {
7345 range: Point::new(2, 8)..Point::new(2, 17),
7346 diagnostic: Diagnostic {
7347 severity: DiagnosticSeverity::ERROR,
7348 message: "error 2".to_string(),
7349 group_id: 1,
7350 is_primary: true,
7351 ..Default::default()
7352 }
7353 }
7354 ]
7355 );
7356 }
7357
7358 #[gpui::test]
7359 async fn test_rename(cx: &mut gpui::TestAppContext) {
7360 cx.foreground().forbid_parking();
7361
7362 let mut language = Language::new(
7363 LanguageConfig {
7364 name: "Rust".into(),
7365 path_suffixes: vec!["rs".to_string()],
7366 ..Default::default()
7367 },
7368 Some(tree_sitter_rust::language()),
7369 );
7370 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7371 capabilities: lsp::ServerCapabilities {
7372 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7373 prepare_provider: Some(true),
7374 work_done_progress_options: Default::default(),
7375 })),
7376 ..Default::default()
7377 },
7378 ..Default::default()
7379 });
7380
7381 let fs = FakeFs::new(cx.background());
7382 fs.insert_tree(
7383 "/dir",
7384 json!({
7385 "one.rs": "const ONE: usize = 1;",
7386 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7387 }),
7388 )
7389 .await;
7390
7391 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7392 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7393 let buffer = project
7394 .update(cx, |project, cx| {
7395 project.open_local_buffer("/dir/one.rs", cx)
7396 })
7397 .await
7398 .unwrap();
7399
7400 let fake_server = fake_servers.next().await.unwrap();
7401
7402 let response = project.update(cx, |project, cx| {
7403 project.prepare_rename(buffer.clone(), 7, cx)
7404 });
7405 fake_server
7406 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7407 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7408 assert_eq!(params.position, lsp::Position::new(0, 7));
7409 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7410 lsp::Position::new(0, 6),
7411 lsp::Position::new(0, 9),
7412 ))))
7413 })
7414 .next()
7415 .await
7416 .unwrap();
7417 let range = response.await.unwrap().unwrap();
7418 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7419 assert_eq!(range, 6..9);
7420
7421 let response = project.update(cx, |project, cx| {
7422 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7423 });
7424 fake_server
7425 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7426 assert_eq!(
7427 params.text_document_position.text_document.uri.as_str(),
7428 "file:///dir/one.rs"
7429 );
7430 assert_eq!(
7431 params.text_document_position.position,
7432 lsp::Position::new(0, 7)
7433 );
7434 assert_eq!(params.new_name, "THREE");
7435 Ok(Some(lsp::WorkspaceEdit {
7436 changes: Some(
7437 [
7438 (
7439 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7440 vec![lsp::TextEdit::new(
7441 lsp::Range::new(
7442 lsp::Position::new(0, 6),
7443 lsp::Position::new(0, 9),
7444 ),
7445 "THREE".to_string(),
7446 )],
7447 ),
7448 (
7449 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7450 vec![
7451 lsp::TextEdit::new(
7452 lsp::Range::new(
7453 lsp::Position::new(0, 24),
7454 lsp::Position::new(0, 27),
7455 ),
7456 "THREE".to_string(),
7457 ),
7458 lsp::TextEdit::new(
7459 lsp::Range::new(
7460 lsp::Position::new(0, 35),
7461 lsp::Position::new(0, 38),
7462 ),
7463 "THREE".to_string(),
7464 ),
7465 ],
7466 ),
7467 ]
7468 .into_iter()
7469 .collect(),
7470 ),
7471 ..Default::default()
7472 }))
7473 })
7474 .next()
7475 .await
7476 .unwrap();
7477 let mut transaction = response.await.unwrap().0;
7478 assert_eq!(transaction.len(), 2);
7479 assert_eq!(
7480 transaction
7481 .remove_entry(&buffer)
7482 .unwrap()
7483 .0
7484 .read_with(cx, |buffer, _| buffer.text()),
7485 "const THREE: usize = 1;"
7486 );
7487 assert_eq!(
7488 transaction
7489 .into_keys()
7490 .next()
7491 .unwrap()
7492 .read_with(cx, |buffer, _| buffer.text()),
7493 "const TWO: usize = one::THREE + one::THREE;"
7494 );
7495 }
7496
7497 #[gpui::test]
7498 async fn test_search(cx: &mut gpui::TestAppContext) {
7499 let fs = FakeFs::new(cx.background());
7500 fs.insert_tree(
7501 "/dir",
7502 json!({
7503 "one.rs": "const ONE: usize = 1;",
7504 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7505 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7506 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7507 }),
7508 )
7509 .await;
7510 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7511 assert_eq!(
7512 search(&project, SearchQuery::text("TWO", false, true), cx)
7513 .await
7514 .unwrap(),
7515 HashMap::from_iter([
7516 ("two.rs".to_string(), vec![6..9]),
7517 ("three.rs".to_string(), vec![37..40])
7518 ])
7519 );
7520
7521 let buffer_4 = project
7522 .update(cx, |project, cx| {
7523 project.open_local_buffer("/dir/four.rs", cx)
7524 })
7525 .await
7526 .unwrap();
7527 buffer_4.update(cx, |buffer, cx| {
7528 let text = "two::TWO";
7529 buffer.edit([(20..28, text), (31..43, text)], cx);
7530 });
7531
7532 assert_eq!(
7533 search(&project, SearchQuery::text("TWO", false, true), cx)
7534 .await
7535 .unwrap(),
7536 HashMap::from_iter([
7537 ("two.rs".to_string(), vec![6..9]),
7538 ("three.rs".to_string(), vec![37..40]),
7539 ("four.rs".to_string(), vec![25..28, 36..39])
7540 ])
7541 );
7542
7543 async fn search(
7544 project: &ModelHandle<Project>,
7545 query: SearchQuery,
7546 cx: &mut gpui::TestAppContext,
7547 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7548 let results = project
7549 .update(cx, |project, cx| project.search(query, cx))
7550 .await?;
7551
7552 Ok(results
7553 .into_iter()
7554 .map(|(buffer, ranges)| {
7555 buffer.read_with(cx, |buffer, _| {
7556 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7557 let ranges = ranges
7558 .into_iter()
7559 .map(|range| range.to_offset(buffer))
7560 .collect::<Vec<_>>();
7561 (path, ranges)
7562 })
7563 })
7564 .collect())
7565 }
7566 }
7567}